Skip to content

Commit

Permalink
Issue 697 - Show API error when calling .complete on Anthropic clie…
Browse files Browse the repository at this point in the history
…nt (#698)
  • Loading branch information
Jbrito6492 authored Jul 7, 2024
1 parent f82fa45 commit 2438126
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 1 deletion.
14 changes: 13 additions & 1 deletion lib/langchain/llm/anthropic.rb
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,10 @@ def complete(
parameters[:metadata] = metadata if metadata
parameters[:stream] = stream if stream

response = client.complete(parameters: parameters)
response = with_api_error_handling do
client.complete(parameters: parameters)
end

Langchain::LLM::AnthropicResponse.new(response)
end

Expand Down Expand Up @@ -114,6 +117,15 @@ def chat(params = {})
Langchain::LLM::AnthropicResponse.new(response)
end

def with_api_error_handling
response = yield
return if response.empty?

raise Langchain::LLM::ApiError.new "Anthropic API error: #{response.dig("error", "message")}" if response&.dig("error")

response
end

private

def set_extra_headers!
Expand Down
1 change: 1 addition & 0 deletions lib/langchain/llm/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ class ApiError < StandardError; end
# Langchain.rb provides a common interface to interact with all supported LLMs:
#
# - {Langchain::LLM::AI21}
# - {Langchain::LLM::Anthropic}
# - {Langchain::LLM::Azure}
# - {Langchain::LLM::Cohere}
# - {Langchain::LLM::GooglePalm}
Expand Down
7 changes: 7 additions & 0 deletions spec/fixtures/llm/anthropic/error.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"type": "error",
"error": {
"type": "invalid_request_error",
"message": "The request is invalid. Please check the request and try again."
}
}
19 changes: 19 additions & 0 deletions spec/langchain/llm/anthropic_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,25 @@
expect(subject.complete(prompt: completion).model).to eq("claude-2.1")
end
end

context "with failed API call" do
let(:fixture) { File.read("spec/fixtures/llm/anthropic/error.json") }

before do
allow(subject.client).to receive(:complete)
.with(parameters: {
model: described_class::DEFAULTS[:completion_model_name],
prompt: completion,
temperature: described_class::DEFAULTS[:temperature],
max_tokens_to_sample: described_class::DEFAULTS[:max_tokens_to_sample]
})
.and_return(JSON.parse(fixture))
end

it "raises an error" do
expect { subject.complete(prompt: completion) }.to raise_error(Langchain::LLM::ApiError, "Anthropic API error: The request is invalid. Please check the request and try again.")
end
end
end

describe "#chat" do
Expand Down

0 comments on commit 2438126

Please sign in to comment.