From 207df9dd309b37bb43964ccaf84fce599fc663e5 Mon Sep 17 00:00:00 2001 From: OpenHands Date: Tue, 12 Nov 2024 17:23:11 -0500 Subject: [PATCH] Fix issue #4912: [Bug]: BedrockException: "The number of toolResult blocks at messages.2.content exceeds the number of toolUse blocks of previous turn.". (#4937) Co-authored-by: Xingyao Wang Co-authored-by: Graham Neubig Co-authored-by: mamoodi --- docs/modules/usage/llms/litellm-proxy.md | 20 ++++++++++++++++++++ docs/modules/usage/llms/llms.md | 1 + docs/sidebars.ts | 5 +++++ 3 files changed, 26 insertions(+) create mode 100644 docs/modules/usage/llms/litellm-proxy.md diff --git a/docs/modules/usage/llms/litellm-proxy.md b/docs/modules/usage/llms/litellm-proxy.md new file mode 100644 index 000000000000..9178bc5c33ea --- /dev/null +++ b/docs/modules/usage/llms/litellm-proxy.md @@ -0,0 +1,20 @@ +# LiteLLM Proxy + +OpenHands supports using the [LiteLLM proxy](https://docs.litellm.ai/docs/proxy/quick_start) to access various LLM providers. + +## Configuration + +To use LiteLLM proxy with OpenHands, you need to: + +1. Set up a LiteLLM proxy server (see [LiteLLM documentation](https://docs.litellm.ai/docs/proxy/quick_start)) +2. When running OpenHands, you'll need to set the following in the OpenHands UI through the Settings: + * Enable `Advanced Options` + * `Custom Model` to the prefix `litellm_proxy/` + the model you will be using (e.g. `litellm_proxy/anthropic.claude-3-5-sonnet-20241022-v2:0`) + * `Base URL` to your LiteLLM proxy URL (e.g. `https://your-litellm-proxy.com`) + * `API Key` to your LiteLLM proxy API key + +## Supported Models + +The supported models depend on your LiteLLM proxy configuration. OpenHands supports any model that your LiteLLM proxy is configured to handle. + +Refer to your LiteLLM proxy configuration for the list of available models and their names. diff --git a/docs/modules/usage/llms/llms.md b/docs/modules/usage/llms/llms.md index 3ce773fcc15e..d9254b2070a4 100644 --- a/docs/modules/usage/llms/llms.md +++ b/docs/modules/usage/llms/llms.md @@ -63,6 +63,7 @@ We have a few guides for running OpenHands with specific model providers: - [Azure](llms/azure-llms) - [Google](llms/google-llms) - [Groq](llms/groq) +- [LiteLLM Proxy](llms/litellm-proxy) - [OpenAI](llms/openai-llms) - [OpenRouter](llms/openrouter) diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 19356116f28c..7ce0a1f210c2 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -76,6 +76,11 @@ const sidebars: SidebarsConfig = { label: 'Groq', id: 'usage/llms/groq', }, + { + type: 'doc', + label: 'LiteLLM Proxy', + id: 'usage/llms/litellm-proxy', + }, { type: 'doc', label: 'OpenAI',