Skip to content

Commit

Permalink
Add comments to init templates (#521)
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinthecheung authored Jul 2, 2024
1 parent 1afa05d commit bccbb6d
Show file tree
Hide file tree
Showing 6 changed files with 161 additions and 45 deletions.
35 changes: 26 additions & 9 deletions genkit-tools/cli/config/firebase.index.ts.template
Original file line number Diff line number Diff line change
@@ -1,37 +1,51 @@
import * as z from "zod";

// Import the Genkit core libraries and plugins.
import {generate} from "@genkit-ai/ai";
import {configureGenkit} from "@genkit-ai/core";
import {firebaseAuth} from "@genkit-ai/firebase/auth";
import {onFlow} from "@genkit-ai/firebase/functions";
$GENKIT_MODEL_IMPORT
import * as z from "zod";
$GENKIT_CONFIG_IMPORTS
$GENKIT_MODEL_IMPORT

// From the Firebase plugin, import the functions needed to deploy flows using
// Cloud Functions.
import {firebaseAuth} from "@genkit-ai/firebase/auth";
import {onFlow} from "@genkit-ai/firebase/functions";

configureGenkit({
plugins: [
$GENKIT_CONFIG_PLUGINS
],
// Log debug output to tbe console.
logLevel: "debug",
// Perform OpenTelemetry instrumentation and enable trace collection.
enableTracingAndMetrics: true,
});

// Define a simple flow that prompts an LLM to generate menu suggestions.
export const menuSuggestionFlow = onFlow(
{
name: "menuSuggestionFlow",
inputSchema: z.string(),
outputSchema: z.string(),
authPolicy: firebaseAuth((user) => {
// Firebase Auth is required to call this flow using the Firebase Functions SDK.
// TODO: Write additional logic tailored to the needs of your app.
// For example:
// By default, the firebaseAuth policy requires that all requests have an
// `Authorization: Bearer` header containing the user's Firebase
// Authentication ID token. All other requests are rejected with error
// 403. If your app client uses the Firebase Cloud Functions callable
// functions feature, the library automatically attaches this header to
// requests.

// You should also set additional policy requirements as appropriate for
// your app. For example:
// if (!user.email_verified) {
// throw new Error("Verified email required to run flow");
// }
}),
},
async (subject) => {
// Construct a request and send it to the model API.
const prompt =
`Suggest an item for the menu of a ${subject} themed restaurant`;

const llmResponse = await generate({
model: $GENKIT_MODEL,
prompt: prompt,
Expand All @@ -40,7 +54,10 @@ export const menuSuggestionFlow = onFlow(
},
});

// Handle the response from the model API. In this sample, we just
// convert it to a string, but more complicated flows might coerce the
// response into structured output or chain the response into another
// LLM call, etc.
return llmResponse.text();
}
);

23 changes: 18 additions & 5 deletions genkit-tools/cli/config/googleCloud.index.ts.template
Original file line number Diff line number Diff line change
@@ -1,25 +1,31 @@
import { generate } from '@genkit-ai/ai';
import { configureGenkit } from '@genkit-ai/core';
import { defineFlow, startFlowsServer } from '@genkit-ai/flow';
$GENKIT_MODEL_IMPORT
import * as z from 'zod';

// Import the Genkit core libraries and plugins.
import {generate} from '@genkit-ai/ai';
import {configureGenkit} from '@genkit-ai/core';
import {defineFlow, startFlowsServer} from '@genkit-ai/flow';
$GENKIT_CONFIG_IMPORTS
$GENKIT_MODEL_IMPORT

configureGenkit({
plugins: [
$GENKIT_CONFIG_PLUGINS
],
logLevel: 'debug',
// Log debug output to tbe console.
logLevel: "debug",
// Perform OpenTelemetry instrumentation and enable trace collection.
enableTracingAndMetrics: true,
});

// Define a simple flow that prompts an LLM to generate menu suggestions.
export const menuSuggestionFlow = defineFlow(
{
name: 'menuSuggestionFlow',
inputSchema: z.string(),
outputSchema: z.string(),
},
async (subject) => {
// Construct a request and send it to the model API.
const llmResponse = await generate({
prompt: `Suggest an item for the menu of a ${subject} themed restaurant`,
model: $GENKIT_MODEL,
Expand All @@ -28,8 +34,15 @@ export const menuSuggestionFlow = defineFlow(
},
});

// Handle the response from the model API. In this sample, we just convert
// it to a string, but more complicated flows might coerce the response into
// structured output or chain the response into another LLM call, etc.
return llmResponse.text();
}
);

// Start a flow server, which exposes your flows as HTTP endpoints. This call
// must come last, after all of your plug-in configuration and flow definitions.
// You can optionally specify a subset of flows to serve, and configure some
// HTTP server options, but by default, the flow server serves all defined flows.
startFlowsServer();
20 changes: 16 additions & 4 deletions genkit-tools/cli/config/main.go.template
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,25 @@ import (
"fmt"
"log"

// Import the Genkit core libraries.
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
$GENKIT_MODEL_IMPORT
)

func $GENKIT_FUNC_NAME() {
ctx := context.Background()
if err := $GENKIT_MODEL_INIT; err != nil {
log.Fatal(err)
}

$GENKIT_MODEL_INIT

// Define a simple flow that prompts an LLM to generate menu suggestions.
genkit.DefineFlow("menuSuggestionFlow", func(ctx context.Context, input string) (string, error) {
m := $GENKIT_MODEL_LOOKUP
$GENKIT_MODEL_LOOKUP
if m == nil {
return "", errors.New("menuSuggestionFlow: failed to find model")
}

// Construct a request and send it to the model API.
resp, err := m.Generate(ctx,
ai.NewGenerateRequest(
&ai.GenerationCommonConfig{Temperature: 1},
Expand All @@ -30,13 +33,22 @@ func $GENKIT_FUNC_NAME() {
if err != nil {
return "", err
}

// Handle the response from the model API. In this sample, we just
// convert it to a string, but more complicated flows might coerce the
// response into structured output or chain the response into another
// LLM call, etc.
text, err := resp.Text()
if err != nil {
return "", fmt.Errorf("menuSuggestionFlow: %v", err)
}
return text, nil
})

// Initialize Genkit and start a flow server. This call must come last,
// after all of your plug-in configuration and flow definitions. When you
// pass a nil configuration to Init, Genkit starts a local flow server,
// which you can interact with using the developer UI.
if err := genkit.Init(ctx, nil); err != nil {
log.Fatal(err)
}
Expand Down
18 changes: 15 additions & 3 deletions genkit-tools/cli/config/nextjs.genkit.ts.template
Original file line number Diff line number Diff line change
@@ -1,27 +1,33 @@
"use server"

import * as z from 'zod';

// Import the Genkit core libraries and plugins.
import { generate } from '@genkit-ai/ai';
import { configureGenkit } from '@genkit-ai/core';
import { defineFlow, runFlow } from '@genkit-ai/flow';
$GENKIT_MODEL_IMPORT
import * as z from 'zod';
$GENKIT_CONFIG_IMPORTS
$GENKIT_MODEL_IMPORT

configureGenkit({
plugins: [
$GENKIT_CONFIG_PLUGINS
],
logLevel: 'debug',
// Log debug output to tbe console.
logLevel: "debug",
// Perform OpenTelemetry instrumentation and enable trace collection.
enableTracingAndMetrics: true,
});

// Define a simple flow that prompts an LLM to generate menu suggestions.
const menuSuggestionFlow = defineFlow(
{
name: 'menuSuggestionFlow',
inputSchema: z.string(),
outputSchema: z.string(),
},
async (subject) => {
// Construct a request and send it to the model API.
const llmResponse = await generate({
prompt: `Suggest an item for the menu of a ${subject} themed restaurant`,
model: $GENKIT_MODEL,
Expand All @@ -30,11 +36,17 @@ const menuSuggestionFlow = defineFlow(
},
});

// Handle the response from the model API. In this sample, we just
// convert it to a string, but more complicated flows might coerce the
// response into structured output or chain the response into another
// LLM call, etc.
return llmResponse.text();
}
);

export async function callMenuSuggestionFlow() {
// Invoke the flow. The value you pass as the second parameter must conform to
// your flow's input schema.
const flowResponse = await runFlow(menuSuggestionFlow, 'banana');
console.log(flowResponse);
}
54 changes: 41 additions & 13 deletions genkit-tools/cli/src/commands/init/init-go.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,31 +36,57 @@ interface ModelOption {
/** Path to Genkit sample template. */
const templatePath = '../../../config/main.go.template';

/** Ollama init call template. Keep indenting to for expected output. */
const ollamaInit = `ollama.Init(ctx, ollama.Config{
ServerAddress: "http://127.0.0.1:11434",
Models: []ollama.ModelDefinition{{Name: "gemma"}},
})`;

/** Model to plugin name. */
const modelOptions: Record<ModelProvider, ModelOption> = {
googleai: {
label: 'Google AI',
package: 'github.com/firebase/genkit/go/plugins/googleai',
init: 'googleai.Init(ctx, "")',
lookup: 'googleai.Model("gemini-1.5-flash")',
init: `// Initialize the Google AI plugin. When you pass an empty string for the
\t// apiKey parameter, the Google AI plugin will use the value from the
\t// GOOGLE_GENAI_API_KEY environment variable, which is the recommended
\t// practice.
\tif err := googleai.Init(ctx, ""); err != nil {
\t\tlog.Fatal(err)
\t}`,
lookup: `// The Google AI API provides access to several generative models. Here,
\t\t// we specify gemini-1.5-flash.
\t\tm := googleai.Model("gemini-1.5-flash")`,
},
vertexai: {
label: 'Google Cloud Vertex AI',
package: 'github.com/firebase/genkit/go/plugins/vertexai',
init: 'vertexai.Init(ctx, "", "")',
lookup: 'vertexai.Model("gemini-1.5-flash")',
init: `// Initialize the Vertex AI plugin. When you pass an empty string for the
\t// projectID parameter, the Vertex AI plugin will use the value from the
\t// GCLOUD_PROJECT environment variable. When you pass an empty string for
\t// the location parameter, the plugin uses the default value, us-central1.
\tif err := vertexai.Init(ctx, "", ""); err != nil {
\t\tlog.Fatal(err)
\t}`,
lookup: `// The Vertex AI API provides access to several generative models. Here,
\t\t// we specify gemini-1.5-flash.
\t\tm := vertexai.Model("gemini-1.5-flash")`,
},
ollama: {
label: 'Ollama (e.g. Gemma)',
package: 'github.com/firebase/genkit/go/plugins/ollama',
init: ollamaInit,
lookup: 'ollama.Model("gemma")',
init: `// Initialize the Ollama plugin.
\terr := ollama.Init(ctx, ollama.Config{
\t\t// The address of your Ollama API server. This is often a different host
\t\t// from your app backend (which runs Genkit), in order to run Ollama on
\t\t// a GPU-accelerated machine.
ServerAddress: "http://127.0.0.1:11434",
\t\t// The models you want to use. These must already be downloaded and
\t\t// available to the Ollama server.
\t\tModels: []ollama.ModelDefinition{{Name: "gemma"}},
\t})
\tif err != nil {
\t\tlog.Fatal(err)
\t}`,
lookup: `// Ollama provides an interface to many open generative models. Here,
\t\t// we specify Google's Gemma model, which we configured the Ollama
\t\t// plugin to provide, above.
\t\tm := ollama.Model("gemma")`,
},
none: {
label: 'None',
Expand Down Expand Up @@ -206,7 +232,9 @@ async function generateSampleFile(model: ModelProvider) {
)
.replace(
'$GENKIT_MODEL_IMPORT',
modelOption.package ? `"${modelOption.package}"` : ''
modelOption.package
? `\n\t// Import the ${modelOption.label} plugin.\n\t"${modelOption.package}"`
: ''
)
.replace('$GENKIT_MODEL_INIT', modelOption.init)
.replace('$GENKIT_MODEL_LOOKUP', modelOption.lookup);
Expand Down
Loading

0 comments on commit bccbb6d

Please sign in to comment.