Skip to content

Commit dd83b19

Browse files
authored
Merge pull request #781 from koid/feature/support-bedrock-claude3
added support for bedrock/claude3
2 parents eff6468 + 3bae515 commit dd83b19

File tree

5 files changed

+20
-4
lines changed

5 files changed

+20
-4
lines changed

docs/docs/usage-guide/additional_configurations.md

+9-2
Original file line numberDiff line numberDiff line change
@@ -162,15 +162,22 @@ To use Amazon Bedrock and its foundational models, add the below configuration:
162162

163163
```
164164
[config] # in configuration.toml
165-
model = "anthropic.claude-v2"
166-
fallback_models="anthropic.claude-instant-v1"
165+
model="bedrock/anthropic.claude-3-sonnet-20240229-v1:0"
166+
model_turbo="bedrock/anthropic.claude-3-sonnet-20240229-v1:0"
167+
fallback_models=["bedrock/anthropic.claude-v2:1"]
167168
168169
[aws] # in .secrets.toml
169170
bedrock_region = "us-east-1"
170171
```
171172

172173
Note that you have to add access to foundational models before using them. Please refer to [this document](https://docs.aws.amazon.com/bedrock/latest/userguide/setting-up.html) for more details.
173174

175+
If you are using the claude-3 model, please configure the following settings as there are parameters incompatible with claude-3.
176+
```
177+
[litellm]
178+
drop_params = true
179+
```
180+
174181
AWS session is automatically authenticated from your environment, but you can also explicitly set `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables.
175182

176183

pr_agent/algo/__init__.py

+5
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,9 @@
2323
'anthropic.claude-v1': 100000,
2424
'anthropic.claude-v2': 100000,
2525
'anthropic/claude-3-opus-20240229': 100000,
26+
'bedrock/anthropic.claude-instant-v1': 100000,
27+
'bedrock/anthropic.claude-v2': 100000,
28+
'bedrock/anthropic.claude-v2:1': 100000,
29+
'bedrock/anthropic.claude-3-sonnet-20240229-v1:0': 100000,
30+
'bedrock/anthropic.claude-3-haiku-20240307-v1:0': 100000,
2631
}

pr_agent/algo/ai_handlers/litellm_ai_handler.py

+3
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ def __init__(self):
3636
assert litellm_token, "LITELLM_TOKEN is required"
3737
os.environ["LITELLM_TOKEN"] = litellm_token
3838
litellm.use_client = True
39+
if get_settings().get("LITELLM.DROP_PARAMS", None):
40+
litellm.drop_params = get_settings().litellm.drop_params
3941
if get_settings().get("OPENAI.ORG", None):
4042
litellm.organization = get_settings().openai.org
4143
if get_settings().get("OPENAI.API_TYPE", None):
@@ -68,6 +70,7 @@ def __init__(self):
6870
)
6971
if get_settings().get("AWS.BEDROCK_REGION", None):
7072
litellm.AmazonAnthropicConfig.max_tokens_to_sample = 2000
73+
litellm.AmazonAnthropicClaude3Config.max_tokens = 2000
7174
self.aws_bedrock_client = boto3.client(
7275
service_name="bedrock-runtime",
7376
region_name=get_settings().aws.bedrock_region,

pr_agent/settings/configuration.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,8 @@ pr_commands = [
192192
url = ""
193193

194194
[litellm]
195-
#use_client = false
195+
# use_client = false
196+
# drop_params = false
196197

197198
[pr_similar_issue]
198199
skip_comments = false

requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ GitPython==3.1.32
99
google-cloud-aiplatform==1.35.0
1010
google-cloud-storage==2.10.0
1111
Jinja2==3.1.2
12-
litellm==1.29.1
12+
litellm==1.31.10
1313
loguru==0.7.2
1414
msrest==0.7.1
1515
openai==1.13.3

0 commit comments

Comments
 (0)