11// Copyright (c) Microsoft. All rights reserved.
22
3+ using System . ComponentModel ;
34using Microsoft . SemanticKernel ;
45using Microsoft . SemanticKernel . ChatCompletion ;
56using Microsoft . SemanticKernel . Connectors . Google ;
@@ -10,7 +11,7 @@ namespace FunctionCalling;
1011/// <summary>
1112/// These examples demonstrate two ways functions called by the Gemini LLM can be invoked using the SK streaming and non-streaming AI API:
1213///
13- /// 1. Automatic Invocation by SK:
14+ /// 1. Automatic Invocation by SK (with and without nullable properties) :
1415/// Functions called by the LLM are invoked automatically by SK. The results of these function invocations
1516/// are automatically added to the chat history and returned to the LLM. The LLM reasons about the chat history
1617/// and generates the final response.
@@ -86,6 +87,92 @@ public async Task VertexAIChatCompletionWithFunctionCalling()
8687 await this . RunSampleAsync ( kernel ) ;
8788 }
8889
90+ [ RetryFact ]
91+ public async Task GoogleAIFunctionCallingNullable ( )
92+ {
93+ Console . WriteLine ( "============= Google AI - Gemini Chat Completion with function calling (nullable properties) =============" ) ;
94+
95+ Assert . NotNull ( TestConfiguration . GoogleAI . ApiKey ) ;
96+
97+ var kernelBuilder = Kernel . CreateBuilder ( )
98+ . AddGoogleAIGeminiChatCompletion (
99+ modelId : TestConfiguration . VertexAI . Gemini . ModelId ,
100+ apiKey : TestConfiguration . GoogleAI . ApiKey ) ;
101+
102+ kernelBuilder . Plugins . AddFromType < MyWeatherPlugin > ( ) ;
103+
104+ var promptExecutionSettings = new GeminiPromptExecutionSettings ( )
105+ {
106+ FunctionChoiceBehavior = FunctionChoiceBehavior . Auto ( ) ,
107+ } ;
108+
109+ var kernel = kernelBuilder . Build ( ) ;
110+
111+ var response = await kernel . InvokePromptAsync ( "Hi, what's the weather in New York?" , new ( promptExecutionSettings ) ) ;
112+
113+ Console . WriteLine ( response . ToString ( ) ) ;
114+ }
115+
116+ private sealed class MyWeatherPlugin
117+ {
118+ [ KernelFunction ]
119+ [ Description ( "Get the weather for a given location." ) ]
120+ private string GetWeather ( WeatherRequest request )
121+ {
122+ return $ "The weather in { request ? . Location } is sunny.";
123+ }
124+ }
125+
126+ [ RetryFact ]
127+ public async Task VertexAIFunctionCallingNullable ( )
128+ {
129+ Console . WriteLine ( "============= Vertex AI - Gemini Chat Completion with function calling (nullable properties) =============" ) ;
130+
131+ Assert . NotNull ( TestConfiguration . VertexAI . BearerKey ) ;
132+ Assert . NotNull ( TestConfiguration . VertexAI . Location ) ;
133+ Assert . NotNull ( TestConfiguration . VertexAI . ProjectId ) ;
134+
135+ var kernelBuilder = Kernel . CreateBuilder ( )
136+ . AddVertexAIGeminiChatCompletion (
137+ modelId : TestConfiguration . VertexAI . Gemini . ModelId ,
138+ bearerKey : TestConfiguration . VertexAI . BearerKey ,
139+ location : TestConfiguration . VertexAI . Location ,
140+ projectId : TestConfiguration . VertexAI . ProjectId ) ;
141+
142+ // To generate bearer key, you need installed google sdk or use Google web console with command:
143+ //
144+ // gcloud auth print-access-token
145+ //
146+ // Above code pass bearer key as string, it is not recommended way in production code,
147+ // especially if IChatCompletionService will be long-lived, tokens generated by google sdk lives for 1 hour.
148+ // You should use bearer key provider, which will be used to generate token on demand:
149+ //
150+ // Example:
151+ //
152+ // Kernel kernel = Kernel.CreateBuilder()
153+ // .AddVertexAIGeminiChatCompletion(
154+ // modelId: TestConfiguration.VertexAI.Gemini.ModelId,
155+ // bearerKeyProvider: () =>
156+ // {
157+ // // This is just example, in production we recommend using Google SDK to generate your BearerKey token.
158+ // // This delegate will be called on every request,
159+ // // when providing the token consider using caching strategy and refresh token logic when it is expired or close to expiration.
160+ // return GetBearerKey();
161+ // },
162+ // location: TestConfiguration.VertexAI.Location,
163+ // projectId: TestConfiguration.VertexAI.ProjectId);
164+
165+ kernelBuilder . Plugins . AddFromType < MyWeatherPlugin > ( ) ;
166+
167+ var promptExecutionSettings = new GeminiPromptExecutionSettings ( )
168+ {
169+ FunctionChoiceBehavior = FunctionChoiceBehavior . Auto ( ) ,
170+ } ;
171+ var kernel = kernelBuilder . Build ( ) ;
172+ var response = await kernel . InvokePromptAsync ( "Hi, what's the weather in New York?" , new ( promptExecutionSettings ) ) ;
173+ Console . WriteLine ( response . ToString ( ) ) ;
174+ }
175+
89176 private async Task RunSampleAsync ( Kernel kernel )
90177 {
91178 // Add a plugin with some helper functions we want to allow the model to utilize.
@@ -214,4 +301,9 @@ private async Task RunSampleAsync(Kernel kernel)
214301 }
215302 */
216303 }
304+
305+ private sealed class WeatherRequest
306+ {
307+ public string ? Location { get ; set ; }
308+ }
217309}
0 commit comments