Skip to content

Commit 3204b3a

Browse files
authored
docs: Bedrock example. (#410)
* Adds Bedrock example. * Adds README for bedrock example, and checks that bearer token is defined. * removes bare except. * Adds an openai example and moves boto3 to [litellm], removing [aws] * feat: add bedrock_name to model identifiers and bedrock examples * Adds pytest skip reasons. * reverts change to .pre-commit-config. * Revert "reverts change to .pre-commit-config." This reverts commit 649743a. * bedrock work. * Fixes model ids. * saving wokr on openai bedrock example. * fixes pre-commit hooks. * Some cleanup.
1 parent 840a02d commit 3204b3a

File tree

8 files changed

+320
-2
lines changed

8 files changed

+320
-2
lines changed

docs/examples/bedrock/README.md

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
# Using Mellea with Bedrock
2+
3+
Mellea can be used with Bedrock models via Mellea's LiteLLM or OpenAI backend support.
4+
5+
## Pre-requisites
6+
7+
To get started:
8+
9+
1. Set the `AWS_BEARER_TOKEN_BEDROCK` environment variable.
10+
11+
```bash
12+
export AWS_BEARER_TOKEN_BEDROCK=<your API key goes here>
13+
```
14+
15+
2. If you want to use litellm, you need to install the optional `litellm` dependencies:
16+
17+
```python
18+
uv pip install mellea[litellm]
19+
```
20+
21+
3. You can now use Bedrock. We've included some built-in model ids for convienance:
22+
23+
24+
```python
25+
from mellea import MelleaSession
26+
from mellea.backends.bedrock import create_bedrock_mantle_backend
27+
from mellea.backends.model_ids import OPENAI_GPT_OSS_120B
28+
from mellea.stdlib.context import ChatContext
29+
30+
bedrock_oai_backend = create_bedrock_mantle_backend(model_id=OPENAI_GPT_OSS_120B, region="us-east-1")
31+
32+
m = MelleaSession(backend=bedrock_oai_backend, ctx=ChatContext())
33+
34+
print(m.chat("Tell me 3 facts about Amazon.").content)
35+
```
36+
37+
You can also use your own model IDs as strings, as long as they're accessible using the [mantle endpoints](https://docs.aws.amazon.com/bedrock/latest/userguide/bedrock-mantle.html):
38+
39+
```python
40+
from mellea import MelleaSession
41+
from mellea.backends.bedrock import create_bedrock_mantle_backend
42+
from mellea.stdlib.context import ChatContext
43+
44+
bedrock_oai_backend = create_bedrock_mantle_backend(
45+
model_id="qwen.qwen3-coder-480b-a35b-instruct",
46+
region="us-east-1"
47+
)
48+
49+
m = MelleaSession(backend=bedrock_oai_backend, ctx=ChatContext())
50+
51+
print(m.chat("Tell me 3 facts about Amazon.").content)
52+
```
53+
54+
You can get a list of all models that are available at the mantle endpoint for a region by running this utility script:
55+
56+
```python
57+
from mellea.backends.bedrock import stringify_mantle_model_ids
58+
REGION = "us-east-1" # change this to see other region availability.
59+
print(f"Available Models in {REGION}:\n{stringify_mantle_model_ids(region=REGION)}")
60+
```
61+
62+
## Other Examples
63+
64+
Using LiteLLM with Bedrock (based on [litellm's docs](https://docs.litellm.ai/docs/providers/bedrock)):
65+
66+
```
67+
uv run bedrock_litellm_example.py
68+
```
69+
70+
Stand-alone email using the mantle OpenAI backends:
71+
72+
```python
73+
uv run bedrock_openai_example.py
74+
```
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
# pytest: skip
2+
# SKIP REASON: Requires an AWS bearer token for Bedrock.
3+
#!/usr/bin/env -S uv run --script
4+
# /// script
5+
# requires-python = ">=3.10"
6+
# dependencies = [
7+
# "mellea[litellm]",
8+
# "boto3" # including so that this example works before the next release.
9+
# ]
10+
# ///
11+
import os
12+
13+
import mellea
14+
15+
try:
16+
import boto3
17+
except Exception:
18+
raise Exception(
19+
"Using Bedrock requires separately installing boto3. "
20+
"Run `uv pip install mellea[litellm]`"
21+
)
22+
23+
assert "AWS_BEARER_TOKEN_BEDROCK" in os.environ.keys(), (
24+
"Using AWS Bedrock requires setting a AWS_BEARER_TOKEN_BEDROCK environment variable. "
25+
"Generate a key from the AWS console at: https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/api-keys?tab=long-term "
26+
"Then run `export AWS_BEARER_TOKEN_BEDROCK=<insert your key here>"
27+
)
28+
29+
MODEL_ID = "bedrock/converse/us.amazon.nova-pro-v1:0"
30+
31+
m = mellea.start_session(backend_name="litellm", model_id=MODEL_ID)
32+
33+
result = m.chat("Give me three facts about Amazon.")
34+
35+
print(result.content)
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# pytest: skip
2+
# SKIP REASON: Requires an AWS bearer token for Bedrock.
3+
#!/usr/bin/env -S uv run --script
4+
# /// script
5+
# requires-python = ">=3.10"
6+
# dependencies = [
7+
# "mellea[openai]"
8+
# ]
9+
# ///
10+
import os
11+
12+
from mellea import MelleaSession
13+
from mellea.backends import model_ids
14+
from mellea.backends.bedrock import create_bedrock_mantle_backend
15+
from mellea.backends.openai import OpenAIBackend
16+
from mellea.stdlib.context import ChatContext
17+
18+
assert "AWS_BEARER_TOKEN_BEDROCK" in os.environ.keys(), (
19+
"Using AWS Bedrock requires setting a AWS_BEARER_TOKEN_BEDROCK environment variable.\n\nTo proceed:\n"
20+
"\n\t1. Generate a key from the AWS console at: https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/api-keys?tab=long-term "
21+
"\n\t2. Run `export AWS_BEARER_TOKEN_BEDROCK=<insert your key here>"
22+
)
23+
24+
m = MelleaSession(
25+
backend=create_bedrock_mantle_backend(model_id=model_ids.OPENAI_GPT_OSS_120B),
26+
ctx=ChatContext(),
27+
)
28+
29+
result = m.chat("Give me three facts about Amazon.")
30+
31+
print(result.content)

mellea/backends/bedrock.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
"""Helpers for creating bedrock backends from openai/litellm."""
2+
3+
import os
4+
5+
from openai import OpenAI
6+
from openai.pagination import SyncPage
7+
8+
from mellea.backends.model_ids import ModelIdentifier
9+
from mellea.backends.openai import OpenAIBackend
10+
11+
12+
def _make_region_for_uri(region: str | None):
13+
if region is None:
14+
region = "us-east-1"
15+
return region
16+
17+
18+
def _make_mantle_uri(region: str | None = None):
19+
region_for_uri = _make_region_for_uri(region)
20+
uri = f"https://bedrock-mantle.{region_for_uri}.api.aws/v1"
21+
return uri
22+
23+
24+
def list_mantle_models(region: str | None = None) -> list:
25+
"""Helper function get getting all models available at a bedrock-mantle endpoint."""
26+
uri = _make_mantle_uri(region)
27+
client = OpenAI(base_url=uri, api_key=os.environ["AWS_BEARER_TOKEN_BEDROCK"])
28+
ms = client.models.list()
29+
all_models = list(ms)
30+
assert ms.next_page_info() is None
31+
return all_models
32+
33+
34+
def stringify_mantle_model_ids(region: str | None = None) -> str:
35+
"""Helper function for getting a human-readable list of all models available at the mantle endpoint for an AWS region."""
36+
models = list_mantle_models()
37+
model_names = "\n * ".join([str(m.id) for m in models])
38+
return f" * {model_names}"
39+
40+
41+
def create_bedrock_mantle_backend(
42+
model_id: ModelIdentifier | str, region: str | None = None
43+
) -> OpenAIBackend:
44+
"""Returns an OpenAI backend that points to Bedrock mantle for model `model_id`."""
45+
model_name = ""
46+
match model_id:
47+
case ModelIdentifier() if model_id.bedrock_name is None:
48+
raise Exception(
49+
f"We do not have a known bedrock model identifier for {model_id}. If Bedrock supports this model, please pass the model_id string directly and open an issue to add the model id: https://github.com/generative-computing/mellea/issues/new"
50+
)
51+
case ModelIdentifier() if model_id.bedrock_name is not None:
52+
assert model_id.bedrock_name is not None # for type checker help.
53+
model_name = model_id.bedrock_name
54+
case str():
55+
model_name = model_id
56+
assert model_name != ""
57+
58+
assert "AWS_BEARER_TOKEN_BEDROCK" in os.environ.keys(), (
59+
"Using AWS Bedrock requires setting a AWS_BEARER_TOKEN_BEDROCK environment variable.\n\nTo proceed:\n"
60+
"\n\t1. Generate a key from the AWS console at: https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/api-keys?tab=long-term "
61+
"\n\t2. Run `export AWS_BEARER_TOKEN_BEDROCK=<insert your key here>\n"
62+
"If you need to use normal AWS credentials instead of a bedrock-specific bearer token, please open an issue: https://github.com/generative-computing/mellea/issues/new"
63+
)
64+
65+
uri = _make_mantle_uri(region=region)
66+
67+
models = list_mantle_models(region)
68+
if model_name not in [m.id for m in models]:
69+
raise Exception(
70+
f"Model {model_name} is not supported in region {_make_region_for_uri(region=region)}.\nSupported models are:\n{stringify_mantle_model_ids(region)}\n\nPerhaps change regions or check that model access for {model_name} is not gated on Bedrock?"
71+
)
72+
73+
backend = OpenAIBackend(
74+
model_id=model_name, # sic: do not pass the model_id itself!
75+
base_url=uri,
76+
api_key=os.environ["AWS_BEARER_TOKEN_BEDROCK"],
77+
)
78+
return backend

mellea/backends/model_ids.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ class ModelIdentifier:
1717
watsonx_name: str | None = None
1818
mlx_name: str | None = None
1919
openai_name: str | None = None
20+
bedrock_name: str | None = None
2021

2122
hf_tokenizer_name: str | None = None # if None, is the same as hf_model_name
2223

@@ -154,10 +155,12 @@ class ModelIdentifier:
154155
OPENAI_GPT_OSS_20B = ModelIdentifier(
155156
hf_model_name="openai/gpt-oss-20b", # OpenAI GPT-OSS 20B
156157
ollama_name="gpt-oss:20b", # Ollama
158+
bedrock_name="openai.gpt-oss-20b",
157159
)
158160
OPENAI_GPT_OSS_120B = ModelIdentifier(
159161
hf_model_name="openai/gpt-oss-120b", # OpenAI GPT-OSS 120B
160162
ollama_name="gpt-oss:120b", # Ollama
163+
bedrock_name="openai.gpt-oss-120b",
161164
)
162165

163166
###########################

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,8 @@ vllm = [
8989
]
9090

9191
litellm = [
92-
"litellm>=1.76"
92+
"litellm>=1.76",
93+
"boto3", # for Bedrock support.
9394
]
9495

9596
watsonx = [

test/backends/test_bedrock.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
import os
2+
3+
import openai
4+
import pytest
5+
6+
import mellea.backends.model_ids
7+
import mellea.backends.model_ids as model_ids
8+
from mellea import MelleaSession
9+
from mellea.backends.bedrock import create_bedrock_mantle_backend
10+
from mellea.backends.openai import OpenAIBackend
11+
from mellea.stdlib.context import ChatContext
12+
13+
# Skip entire module in CI since the single test is qualitative
14+
pytestmark = [
15+
pytest.mark.llm,
16+
pytest.mark.skipif(
17+
"AWS_BEARER_TOKEN_BEDROCK" not in os.environ.keys(),
18+
reason="Skipping Bedrock backend tests if $AWS_BEARER_TOKEN_BEDROCK is not set.",
19+
),
20+
]
21+
22+
23+
def _is_bedrock_model(model_id: model_ids.ModelIdentifier):
24+
return model_id.bedrock_name is not None
25+
26+
27+
def test_model_ids_exist():
28+
bedrock_models = [
29+
getattr(mellea.backends.model_ids, name)
30+
for name in dir(mellea.backends.model_ids)
31+
if "bedrock_name" in dir(getattr(mellea.backends.model_ids, name))
32+
and _is_bedrock_model(getattr(mellea.backends.model_ids, name))
33+
]
34+
35+
# non_bedrock_models = [
36+
# getattr(mellea.backends.model_ids, name)
37+
# for name in dir(mellea.backends.model_ids)
38+
# if "bedrock_name" not in dir(getattr(mellea.backends.model_ids, name))
39+
# and "ollama_name" in dir(getattr(mellea.backends.model_ids, name))
40+
# ]
41+
42+
print(f"Found {len(bedrock_models)} bedrock-supported models.")
43+
for model in bedrock_models:
44+
print(f"Checking {model.bedrock_name}")
45+
m = MelleaSession(
46+
backend=create_bedrock_mantle_backend(model_id=model), ctx=ChatContext()
47+
)
48+
print(m.chat("What is 1+1?").content)
49+
50+
51+
if __name__ == "__main__":
52+
test_model_ids_exist()
53+
# pytest.main([__file__])

uv.lock

Lines changed: 44 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)