diff --git a/docs.json b/docs.json
index dd4215a0..5c265f4e 100644
--- a/docs.json
+++ b/docs.json
@@ -73,6 +73,7 @@
"group": "Providers",
"pages": [
"openhands/usage/llms/openhands-llms",
+ "openhands/usage/llms/aws-bedrock",
"openhands/usage/llms/azure-llms",
"openhands/usage/llms/google-llms",
"openhands/usage/llms/groq",
diff --git a/openapi/agent-sdk.json b/openapi/agent-sdk.json
index 62aa00b8..cbb419e4 100644
--- a/openapi/agent-sdk.json
+++ b/openapi/agent-sdk.json
@@ -1489,14 +1489,14 @@
}
}
},
- "/api/bash/execute_bash_command": {
+ "/api/bash/start_bash_command": {
"post": {
"tags": [
"Bash"
],
"summary": "Start Bash Command",
- "description": "Execute a bash command",
- "operationId": "start_bash_command_api_bash_execute_bash_command_post",
+ "description": "Execute a bash command in the background",
+ "operationId": "start_bash_command_api_bash_start_bash_command_post",
"requestBody": {
"content": {
"application/json": {
@@ -1531,6 +1531,48 @@
}
}
},
+ "/api/bash/execute_bash_command": {
+ "post": {
+ "tags": [
+ "Bash"
+ ],
+ "summary": "Execute Bash Command",
+ "description": "Execute a bash command and wait for a result",
+ "operationId": "execute_bash_command_api_bash_execute_bash_command_post",
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ExecuteBashRequest"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/BashOutput"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
"/api/bash/bash_events": {
"delete": {
"tags": [
diff --git a/openhands/usage/llms/aws-bedrock.mdx b/openhands/usage/llms/aws-bedrock.mdx
new file mode 100644
index 00000000..7fe544ef
--- /dev/null
+++ b/openhands/usage/llms/aws-bedrock.mdx
@@ -0,0 +1,25 @@
+---
+title: AWS Bedrock
+description: OpenHands can work with AWS Bedrock models using an OpenAI-compatible API endpoint.
+---
+
+## Configuration
+
+AWS Bedrock provides OpenAI-compatible API endpoints for their models. You can find their documentation on using the OpenAI Chat Completions API [here](https://docs.aws.amazon.com/bedrock/latest/userguide/inference-chat-completions.html).
+
+To configure OpenHands to use AWS Bedrock:
+
+1. In the OpenHands UI, navigate to Settings under the `LLM` tab
+2. Enable `Advanced` options
+3. Set the following:
+ - `Custom Model` to `openai/{model_name}` (e.g. `openai/anthropic.claude-3-5-sonnet-20240620-v1:0`)
+ - `Base URL` to `https://bedrock-runtime.{region}.amazonaws.com/openai/v1` (e.g. `https://bedrock-runtime.us-west-2.amazonaws.com/openai/v1`)
+ - `API Key` to your AWS Bedrock API key
+
+
+The `openai/` prefix in the Custom Model field indicates that OpenHands should use an OpenAI-compatible API format. It does not mean you are directly calling the OpenAI provider.
+
+
+
+Replace `{region}` with your desired AWS region (e.g., `us-west-2`, `us-east-1`). Ensure the region you specify matches where your Bedrock models are available.
+