Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions lib/chat_models/chat_vertex_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,9 @@ defmodule LangChain.ChatModels.ChatVertexAI do
# lengthy response, a longer time limit may be required. However, when it
# goes on too long by itself, it tends to hallucinate more.
field :receive_timeout, :integer, default: @receive_timeout

field :stream, :boolean, default: false
field :json_response, :boolean, default: false
field :json_schema, :map, default: nil
field :stream, :boolean, default: false

# A list of maps for callback handlers (treated as internal)
field :callbacks, {:array, :map}, default: []
Expand All @@ -124,8 +124,9 @@ defmodule LangChain.ChatModels.ChatVertexAI do
:top_k,
:thinking_config,
:receive_timeout,
:stream,
:json_response
:json_response,
:json_schema,
:stream
]
@required_fields [
:endpoint,
Expand Down Expand Up @@ -177,13 +178,13 @@ defmodule LangChain.ChatModels.ChatVertexAI do
|> List.flatten()
|> List.wrap()

response_mime_type =
{response_mime_type, response_schema} =
case vertex_ai.json_response do
true ->
"application/json"
{"application/json", vertex_ai.json_schema}

false ->
nil
{nil, nil}
end

generation_config_params =
Expand All @@ -194,6 +195,7 @@ defmodule LangChain.ChatModels.ChatVertexAI do
}
|> Utils.conditionally_add_to_map("thinkingConfig", vertex_ai.thinking_config)
|> Utils.conditionally_add_to_map("response_mime_type", response_mime_type)
|> Utils.conditionally_add_to_map("response_schema", response_schema)

req =
%{
Expand Down Expand Up @@ -792,6 +794,7 @@ defmodule LangChain.ChatModels.ChatVertexAI do
:thinking_config,
:receive_timeout,
:json_response,
:json_schema,
:stream
],
@current_config_version
Expand Down
9 changes: 4 additions & 5 deletions test/chat_models/chat_google_ai_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -125,10 +125,9 @@ defmodule ChatModels.ChatGoogleAITest do

test "generated a map containing response_mime_type and response_schema", %{params: params} do
google_ai =
ChatGoogleAI.new!(
params
|> Map.merge(%{"json_response" => true, "json_schema" => %{"type" => "object"}})
)
params
|> Map.merge(%{"json_response" => true, "json_schema" => %{"type" => "object"}})
|> ChatGoogleAI.new!()

data = ChatGoogleAI.for_api(google_ai, [], [])

Expand Down Expand Up @@ -793,7 +792,7 @@ defmodule ChatModels.ChatGoogleAITest do

describe "serialize_config/2" do
test "does not include the API key or callbacks" do
model = ChatGoogleAI.new!(%{model: "gpt-4o"})
model = ChatGoogleAI.new!(%{model: @test_model})
result = ChatGoogleAI.serialize_config(model)
assert result["version"] == 1
refute Map.has_key?(result, "api_key")
Expand Down
90 changes: 70 additions & 20 deletions test/chat_models/chat_vertex_ai_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ defmodule ChatModels.ChatVertexAITest do
alias LangChain.LangChainError
alias LangChain.TokenUsage

@test_model "gemini-2.5-flash"

setup do
{:ok, hello_world} =
Function.new(%{
Expand All @@ -23,7 +25,7 @@ defmodule ChatModels.ChatVertexAITest do

model =
ChatVertexAI.new!(%{
"model" => "gemini-pro",
"model" => @test_model,
"endpoint" => "http://localhost:1234/"
})

Expand All @@ -33,33 +35,64 @@ defmodule ChatModels.ChatVertexAITest do
describe "new/1" do
test "works with minimal attr" do
assert {:ok, %ChatVertexAI{} = vertex_ai} =
ChatVertexAI.new(%{
"model" => "gemini-pro",
"endpoint" => "http://localhost:1234/"
})
ChatVertexAI.new(%{"model" => @test_model, "endpoint" => "http://localhost:1234/"})

assert vertex_ai.model == "gemini-pro"
assert vertex_ai.model == @test_model
end

test "returns error when invalid" do
assert {:error, changeset} = ChatVertexAI.new(%{"model" => nil})
assert {:error, changeset} = ChatVertexAI.new(%{"model" => nil, "endpoint" => nil})
refute changeset.valid?
assert {"can't be blank", _} = changeset.errors[:model]
end
end

describe "for_api/3" do
setup do
test "supports overriding the API endpoint" do
override_url = "http://localhost:1234/"

model =
ChatVertexAI.new!(%{
model: @test_model,
endpoint: override_url
})

assert model.endpoint == override_url
end

test "supports setting json_response and json_schema" do
json_schema = %{
"type" => "object",
"properties" => %{
"name" => %{"type" => "string"},
"age" => %{"type" => "integer"}
}
}

{:ok, vertex_ai} =
ChatVertexAI.new(%{
"model" => "gemini-pro",
"model" => @test_model,
"endpoint" => "http://localhost:1234/",
"temperature" => 1.0,
"top_p" => 1.0,
"top_k" => 1.0
"json_response" => true,
"json_schema" => json_schema
})

%{vertex_ai: vertex_ai}
assert vertex_ai.json_response == true
assert vertex_ai.json_schema == json_schema
end
end

describe "for_api/3" do
setup do
params = %{
"model" => @test_model,
"endpoint" => "http://localhost:1234/",
"temperature" => 1.0,
"top_p" => 1.0,
"top_k" => 1.0
}

{:ok, vertex_ai} = ChatVertexAI.new(params)

%{vertex_ai: vertex_ai, params: params}
end

test "generates a map for an API call", %{vertex_ai: vertex_ai} do
Expand Down Expand Up @@ -168,6 +201,22 @@ defmodule ChatModels.ChatVertexAITest do
assert %{"role" => :model, "parts" => [%{"text" => ^assistant_message}]} = msg2
end

test "generated a map containing response_mime_type and response_schema", %{params: params} do
vertex_ai =
params
|> Map.merge(%{"json_response" => true, "json_schema" => %{"type" => "object"}})
|> ChatVertexAI.new!()

data = ChatVertexAI.for_api(vertex_ai, [], [])

assert %{
"generationConfig" => %{
"response_mime_type" => "application/json",
"response_schema" => %{"type" => "object"}
}
} = data
end

test "generates a map containing function and function call messages", %{vertex_ai: vertex_ai} do
message = "Can you do an action for me?"
arguments = %{"args" => "data"}
Expand Down Expand Up @@ -464,7 +513,7 @@ defmodule ChatModels.ChatVertexAITest do

describe "serialize_config/2" do
test "does not include the API key or callbacks" do
model = ChatVertexAI.new!(%{model: "gemini-pro", endpoint: "http://localhost:1234/"})
model = ChatVertexAI.new!(%{model: @test_model, endpoint: "http://localhost:1234/"})
result = ChatVertexAI.serialize_config(model)
assert result["version"] == 1
refute Map.has_key?(result, "api_key")
Expand All @@ -474,15 +523,15 @@ defmodule ChatModels.ChatVertexAITest do
test "creates expected map" do
model =
ChatVertexAI.new!(%{
model: "gemini-pro",
model: @test_model,
endpoint: "http://localhost:1234/"
})

result = ChatVertexAI.serialize_config(model)

assert result == %{
"endpoint" => "http://localhost:1234/",
"model" => "gemini-pro",
"model" => @test_model,
"module" => "Elixir.LangChain.ChatModels.ChatVertexAI",
"receive_timeout" => 60000,
"thinking_config" => nil,
Expand All @@ -491,14 +540,15 @@ defmodule ChatModels.ChatVertexAITest do
"top_k" => 1.0,
"top_p" => 1.0,
"version" => 1,
"json_response" => false
"json_response" => false,
"json_schema" => nil
}
end
end

describe "inspect" do
test "redacts the API key" do
chain = ChatVertexAI.new!(%{"model" => "gemini-pro", "endpoint" => "http://localhost:1000"})
chain = ChatVertexAI.new!(%{"model" => @test_model, "endpoint" => "http://localhost:1000"})

changeset = Ecto.Changeset.cast(chain, %{api_key: "1234567890"}, [:api_key])

Expand Down