diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d204fea5..2abd8d21f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -76,7 +76,7 @@ jobs: run: | echo "Cache key: ${{ needs.setup.outputs.cache-key }}" make helm-install - kubectl wait --for=condition=Accepted agents.kagent.dev -n kagent --all --timeout=60s + kubectl wait --for=condition=Ready agents.kagent.dev -n kagent --all --timeout=60s - name: Run e2e tests working-directory: go diff --git a/.github/workflows/tag.yaml b/.github/workflows/tag.yaml index b1cc2efdb..b109dd804 100644 --- a/.github/workflows/tag.yaml +++ b/.github/workflows/tag.yaml @@ -83,12 +83,34 @@ jobs: export VERSION=$(echo "$GITHUB_REF" | cut -c12-) fi make helm-publish + release-python-packages: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: 'Checkout GitHub Action' + uses: actions/checkout@main + - name: 'Release Python Packages' + working-directory: python + run: | + # if workflow_dispatch is used, use the version input + if [ -n "${{ github.event.inputs.version }}" ]; then + export VERSION=${{ github.event.inputs.version }} + else + export VERSION=$(echo "$GITHUB_REF" | cut -c12-) + fi + uv version $VERSION --package kagent-adk + uv build --package kagent-adk + uv publish --token ${{ secrets.PYPI_TOKEN }} + release: # Only run release after images and helm chart are pushed # In the future we can take the chart from the helm action, # and build the CLI beforehand. needs: - push-helm-chart + - release-python-packages runs-on: ubuntu-latest permissions: contents: write diff --git a/Makefile b/Makefile index 8f071dda1..ef9138583 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ HELM_DIST_FOLDER ?= dist BUILD_DATE := $(shell date -u '+%Y-%m-%d') GIT_COMMIT := $(shell git rev-parse --short HEAD || echo "unknown") -VERSION ?= $(shell git describe --tags --always --dirty 2>/dev/null | sed 's/-dirty//' | grep v || echo "v0.0.0+$(GIT_COMMIT)") +VERSION ?= $(shell git describe --tags --always 2>/dev/null | grep v || echo "v0.0.0+$(GIT_COMMIT)") # Local architecture detection to build for the current platform LOCALARCH ?= $(shell uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/') @@ -52,7 +52,7 @@ LDFLAGS := "-X github.com/kagent-dev/kagent/go/internal/version.Version=$(VERSIO TOOLS_UV_VERSION ?= 0.7.2 TOOLS_BUN_VERSION ?= 1.2.16 TOOLS_NODE_VERSION ?= 22.16.0 -TOOLS_PYTHON_VERSION ?= 3.12 +TOOLS_PYTHON_VERSION ?= 3.13 TOOLS_KIND_IMAGE_VERSION ?= 1.33.1 # build args diff --git a/go/cli/cmd/kagent/main.go b/go/cli/cmd/kagent/main.go index 4e60d3c92..e3d257e0a 100644 --- a/go/cli/cmd/kagent/main.go +++ b/go/cli/cmd/kagent/main.go @@ -70,6 +70,8 @@ func main() { invokeCmd.Flags().StringVarP(&invokeCfg.Agent, "agent", "a", "", "Agent") invokeCmd.Flags().BoolVarP(&invokeCfg.Stream, "stream", "S", false, "Stream the response") invokeCmd.Flags().StringVarP(&invokeCfg.File, "file", "f", "", "File to read the task from") + invokeCmd.Flags().StringVarP(&invokeCfg.URLOverride, "url-override", "u", "", "URL override") + invokeCmd.Flags().MarkHidden("url-override") bugReportCmd := &cobra.Command{ Use: "bug-report", diff --git a/go/cli/internal/cli/chat.go b/go/cli/internal/cli/chat.go index 1fdd32659..c34f330ec 100644 --- a/go/cli/internal/cli/chat.go +++ b/go/cli/internal/cli/chat.go @@ -63,11 +63,8 @@ func ChatCmd(c *ishell.Context) { } agentNames := make([]string, len(agentListResp.Data)) - for i, team := range agentListResp.Data { - if team.Component.Label == "" { - continue - } - agentNames[i] = team.Component.Label + for i, agent := range agentListResp.Data { + agentNames[i] = utils.ConvertToKubernetesIdentifier(agent.ID) } selectedTeamIdx := c.MultiChoice(agentNames, "Select an agent:") @@ -119,8 +116,10 @@ func ChatCmd(c *ishell.Context) { session = existingSessions[selectedSessionIdx-1] } + agentRef := utils.ConvertToKubernetesIdentifier(agentResp.ID) + // Setup A2A client - a2aURL := fmt.Sprintf("%s/a2a/%s/%s", cfg.APIURL, cfg.Namespace, agentResp.Component.Label) + a2aURL := fmt.Sprintf("%s/a2a/%s", cfg.APIURL, agentRef) a2aClient, err := a2aclient.NewA2AClient(a2aURL) if err != nil { c.Printf("Failed to create A2A client: %v\n", err) @@ -131,7 +130,7 @@ func ChatCmd(c *ishell.Context) { cancel := startPortForward(context.Background()) defer cancel() - promptStr := config.BoldGreen(fmt.Sprintf("%s--%s> ", agentResp.Component.Label, session.ID)) + promptStr := config.BoldGreen(fmt.Sprintf("%s--%s> ", agentRef, session.ID)) c.SetPrompt(promptStr) c.ShowPrompt(true) diff --git a/go/cli/internal/cli/const.go b/go/cli/internal/cli/const.go index 7310c7df2..a27fa95c0 100644 --- a/go/cli/internal/cli/const.go +++ b/go/cli/internal/cli/const.go @@ -1,14 +1,15 @@ package cli import ( - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "os" "strings" + + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" ) const ( // Version is the current version of the kagent CLI - DefaultModelProvider = v1alpha1.OpenAI + DefaultModelProvider = v1alpha1.ModelProviderOpenAI DefaultHelmOciRegistry = "oci://ghcr.io/kagent-dev/kagent/helm/" //Provider specific env variables @@ -31,16 +32,16 @@ func GetModelProvider() v1alpha1.ModelProvider { return DefaultModelProvider } switch modelProvider { - case GetModelProviderHelmValuesKey(v1alpha1.OpenAI): - return v1alpha1.OpenAI - case GetModelProviderHelmValuesKey(v1alpha1.Ollama): - return v1alpha1.Ollama - case GetModelProviderHelmValuesKey(v1alpha1.Anthropic): - return v1alpha1.Anthropic - case GetModelProviderHelmValuesKey(v1alpha1.AzureOpenAI): - return v1alpha1.AzureOpenAI + case GetModelProviderHelmValuesKey(v1alpha1.ModelProviderOpenAI): + return v1alpha1.ModelProviderOpenAI + case GetModelProviderHelmValuesKey(v1alpha1.ModelProviderOllama): + return v1alpha1.ModelProviderOllama + case GetModelProviderHelmValuesKey(v1alpha1.ModelProviderAnthropic): + return v1alpha1.ModelProviderAnthropic + case GetModelProviderHelmValuesKey(v1alpha1.ModelProviderAzureOpenAI): + return v1alpha1.ModelProviderAzureOpenAI default: - return v1alpha1.OpenAI + return v1alpha1.ModelProviderOpenAI } } @@ -56,11 +57,11 @@ func GetModelProviderHelmValuesKey(provider v1alpha1.ModelProvider) string { // GetProviderAPIKey returns API_KEY env var name from provider type func GetProviderAPIKey(provider v1alpha1.ModelProvider) string { switch provider { - case v1alpha1.OpenAI: + case v1alpha1.ModelProviderOpenAI: return OPENAI_API_KEY - case v1alpha1.Anthropic: + case v1alpha1.ModelProviderAnthropic: return ANTHROPIC_API_KEY - case v1alpha1.AzureOpenAI: + case v1alpha1.ModelProviderAzureOpenAI: return AZUREOPENAI_API_KEY default: return "" diff --git a/go/cli/internal/cli/const_test.go b/go/cli/internal/cli/const_test.go index 30c2d4f23..4a19b69f4 100644 --- a/go/cli/internal/cli/const_test.go +++ b/go/cli/internal/cli/const_test.go @@ -1,9 +1,10 @@ package cli import ( - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "os" "testing" + + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" ) func TestGetModelProvider(t *testing.T) { @@ -23,29 +24,29 @@ func TestGetModelProvider(t *testing.T) { }, { name: "OpenAI provider", - envVarValue: string(v1alpha1.OpenAI), - expectedResult: v1alpha1.OpenAI, + envVarValue: string(v1alpha1.ModelProviderOpenAI), + expectedResult: v1alpha1.ModelProviderOpenAI, expectedAPIKey: OPENAI_API_KEY, expectedHelmKey: "openAI", }, { name: "AzureOpenAI provider", - envVarValue: string(v1alpha1.AzureOpenAI), - expectedResult: v1alpha1.AzureOpenAI, + envVarValue: string(v1alpha1.ModelProviderAzureOpenAI), + expectedResult: v1alpha1.ModelProviderAzureOpenAI, expectedAPIKey: AZUREOPENAI_API_KEY, expectedHelmKey: "azureOpenAI", }, { name: "Anthropic provider", - envVarValue: string(v1alpha1.Anthropic), - expectedResult: v1alpha1.Anthropic, + envVarValue: string(v1alpha1.ModelProviderAnthropic), + expectedResult: v1alpha1.ModelProviderAnthropic, expectedAPIKey: "ANTHROPIC_API_KEY", expectedHelmKey: "anthropic", }, { name: "Ollama provider", - envVarValue: string(v1alpha1.Ollama), - expectedResult: v1alpha1.Ollama, + envVarValue: string(v1alpha1.ModelProviderOllama), + expectedResult: v1alpha1.ModelProviderOllama, expectedAPIKey: "", expectedHelmKey: "ollama", }, diff --git a/go/cli/internal/cli/get.go b/go/cli/internal/cli/get.go index e82ccc176..47683c4c3 100644 --- a/go/cli/internal/cli/get.go +++ b/go/cli/internal/cli/get.go @@ -88,15 +88,15 @@ func GetToolCmd(cfg *config.Config) { } func printTools(tools []database.Tool) error { - headers := []string{"#", "ID", "PROVIDER", "LABEL", "CREATED"} + headers := []string{"#", "NAME", "SERVER_NAME", "DESCRIPTION", "CREATED"} rows := make([][]string, len(tools)) for i, tool := range tools { rows[i] = []string{ strconv.Itoa(i + 1), - strconv.Itoa(int(tool.Model.ID)), - tool.Component.Provider, - tool.Component.Label, - tool.Model.CreatedAt.String(), + tool.ID, + tool.ServerName, + tool.Description, + tool.CreatedAt.Format(time.RFC3339), } } @@ -122,14 +122,14 @@ func printSessions(sessions []*database.Session) error { headers := []string{"#", "NAME", "AGENT", "CREATED"} rows := make([][]string, len(sessions)) for i, session := range sessions { - agentID := -1 + agentID := "" if session.AgentID != nil { - agentID = int(*session.AgentID) + agentID = *session.AgentID } rows[i] = []string{ strconv.Itoa(i + 1), session.ID, - strconv.Itoa(agentID), + agentID, session.CreatedAt.Format(time.RFC3339), } } diff --git a/go/cli/internal/cli/invoke.go b/go/cli/internal/cli/invoke.go index 0eb6d991f..fec8a2e8b 100644 --- a/go/cli/internal/cli/invoke.go +++ b/go/cli/internal/cli/invoke.go @@ -14,12 +14,13 @@ import ( ) type InvokeCfg struct { - Config *config.Config - Task string - File string - Session string - Agent string - Stream bool + Config *config.Config + Task string + File string + Session string + Agent string + Stream bool + URLOverride string } func InvokeCmd(ctx context.Context, cfg *InvokeCfg) { @@ -60,123 +61,75 @@ func InvokeCmd(ctx context.Context, cfg *InvokeCfg) { return } - // Start port forwarding for A2A - cancel := startPortForward(ctx) - defer cancel() - - // If session is set invoke within a session. - if cfg.Session != "" { + var a2aClient *a2aclient.A2AClient + var err error + if cfg.URLOverride != "" { + a2aClient, err = a2aclient.NewA2AClient(cfg.URLOverride) + if err != nil { + fmt.Fprintf(os.Stderr, "Error creating A2A client: %v\n", err) + return + } + } else { if cfg.Agent == "" { fmt.Fprintln(os.Stderr, "Agent is required") return } - // Setup A2A client a2aURL := fmt.Sprintf("%s/a2a/%s/%s", cfg.Config.APIURL, cfg.Config.Namespace, cfg.Agent) - a2aClient, err := a2aclient.NewA2AClient(a2aURL) + a2aClient, err = a2aclient.NewA2AClient(a2aURL) if err != nil { fmt.Fprintf(os.Stderr, "Error creating A2A client: %v\n", err) return } + } - // Use A2A client to send message - if cfg.Stream { - ctx, cancel := context.WithTimeout(ctx, 300*time.Second) - defer cancel() - - result, err := a2aClient.StreamMessage(ctx, protocol.SendMessageParams{ - Message: protocol.Message{ - Role: protocol.MessageRoleUser, - ContextID: &cfg.Session, - Parts: []protocol.Part{protocol.NewTextPart(task)}, - }, - }) - if err != nil { - fmt.Fprintf(os.Stderr, "Error invoking session: %v\n", err) - return - } - StreamA2AEvents(result, cfg.Config.Verbose) - } else { - ctx, cancel := context.WithTimeout(ctx, 300*time.Second) - defer cancel() - - result, err := a2aClient.SendMessage(ctx, protocol.SendMessageParams{ - Message: protocol.Message{ - Role: protocol.MessageRoleUser, - ContextID: &cfg.Session, - Parts: []protocol.Part{protocol.NewTextPart(task)}, - }, - }) - if err != nil { - fmt.Fprintf(os.Stderr, "Error invoking session: %v\n", err) - return - } - - jsn, err := result.MarshalJSON() - if err != nil { - fmt.Fprintf(os.Stderr, "Error marshaling result: %v\n", err) - return - } + var sessionID *string + if cfg.Session != "" { + sessionID = &cfg.Session + } - fmt.Fprintf(os.Stdout, "%+v\n", string(jsn)) + // Use A2A client to send message + if cfg.Stream { + ctx, cancel := context.WithTimeout(ctx, 300*time.Second) + defer cancel() + + result, err := a2aClient.StreamMessage(ctx, protocol.SendMessageParams{ + Message: protocol.Message{ + Kind: protocol.KindMessage, + Role: protocol.MessageRoleUser, + ContextID: sessionID, + Parts: []protocol.Part{protocol.NewTextPart(task)}, + }, + }) + if err != nil { + fmt.Fprintf(os.Stderr, "Error invoking session: %v\n", err) + return } - + StreamA2AEvents(result, cfg.Config.Verbose) } else { - - if cfg.Agent == "" { - fmt.Fprintln(os.Stderr, "Agent is required") + ctx, cancel := context.WithTimeout(ctx, 300*time.Second) + defer cancel() + + result, err := a2aClient.SendMessage(ctx, protocol.SendMessageParams{ + Message: protocol.Message{ + Kind: protocol.KindMessage, + Role: protocol.MessageRoleUser, + ContextID: sessionID, + Parts: []protocol.Part{protocol.NewTextPart(task)}, + }, + }) + if err != nil { + fmt.Fprintf(os.Stderr, "Error invoking session: %v\n", err) return } - // Setup A2A client - a2aURL := fmt.Sprintf("%s/a2a/%s/%s", cfg.Config.APIURL, cfg.Config.Namespace, cfg.Agent) - a2aClient, err := a2aclient.NewA2AClient(a2aURL) + jsn, err := result.MarshalJSON() if err != nil { - fmt.Fprintf(os.Stderr, "Error creating A2A client: %v\n", err) + fmt.Fprintf(os.Stderr, "Error marshaling result: %v\n", err) return } - // Use A2A client to send message (no session) - if cfg.Stream { - ctx, cancel := context.WithTimeout(ctx, 300*time.Second) - defer cancel() - - result, err := a2aClient.StreamMessage(ctx, protocol.SendMessageParams{ - Message: protocol.Message{ - Role: protocol.MessageRoleUser, - ContextID: nil, // No session - Parts: []protocol.Part{protocol.NewTextPart(task)}, - }, - }) - if err != nil { - fmt.Fprintf(os.Stderr, "Error invoking task: %v\n", err) - return - } - StreamA2AEvents(result, cfg.Config.Verbose) - } else { - ctx, cancel := context.WithTimeout(ctx, 300*time.Second) - defer cancel() - - result, err := a2aClient.SendMessage(ctx, protocol.SendMessageParams{ - Message: protocol.Message{ - Role: protocol.MessageRoleUser, - ContextID: nil, // No session - Parts: []protocol.Part{protocol.NewTextPart(task)}, - }, - }) - if err != nil { - fmt.Fprintf(os.Stderr, "Error invoking task: %v\n", err) - return - } - - jsn, err := result.MarshalJSON() - if err != nil { - fmt.Fprintf(os.Stderr, "Error marshaling result: %v\n", err) - return - } - - fmt.Fprintf(os.Stdout, "%+v\n", string(jsn)) - } + fmt.Fprintf(os.Stdout, "%+v\n", string(jsn)) } } diff --git a/go/cli/internal/cli/utils.go b/go/cli/internal/cli/utils.go index c8c9113e0..901556129 100644 --- a/go/cli/internal/cli/utils.go +++ b/go/cli/internal/cli/utils.go @@ -2,18 +2,14 @@ package cli import ( "context" - "encoding/json" "fmt" "os" "os/exec" "strings" "time" - "github.com/jedib0t/go-pretty/v6/table" "github.com/kagent-dev/kagent/go/cli/internal/config" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" "github.com/kagent-dev/kagent/go/pkg/client" - "github.com/kagent-dev/kagent/go/pkg/sse" "trpc.group/trpc-go/trpc-a2a-go/protocol" ) @@ -72,101 +68,6 @@ func (p *portForward) Stop() { } } -func StreamEvents(ch <-chan *sse.Event, usage *autogen_client.ModelsUsage, verbose bool) { - // Tool call requests and executions are sent as separate messages, but we should print them together - // so if we receive a tool call request, we buffer it until we receive the corresponding tool call execution - // We only need to buffer one request and one execution at a time - var bufferedToolCallRequest *autogen_client.ToolCallRequestEvent - // This is a map of agent source to whether we are currently streaming from that agent - // If we are then we don't want to print the whole TextMessage, but only the content of the ModelStreamingEvent - streaming := map[string]bool{} - for event := range ch { - ev, err := autogen_client.ParseEvent(event.Data) - if err != nil { - // TODO: verbose logging - continue - } - switch typed := ev.(type) { - case *autogen_client.TextMessage: - // c.Println(typed.Content) - usage.Add(typed.ModelsUsage) - // If we are streaming from this agent, don't print the whole TextMessage, but only the content of the ModelStreamingEvent - if streaming[typed.Source] { - fmt.Fprintln(os.Stdout) - continue - } - // Do not re-print the user's input, or system message asking for input - if typed.Source == "user" || typed.Source == "system" { - continue - } - if verbose { - enc := json.NewEncoder(os.Stdout) - enc.SetIndent("", " ") - if err := enc.Encode(typed); err != nil { - fmt.Fprintf(os.Stderr, "Error encoding event: %v\n", err) - continue - } - } else { - fmt.Fprintf(os.Stdout, "%s: %s\n", config.BoldYellow("Event Type"), "TextMessage") - fmt.Fprintf(os.Stdout, "%s: %s\n", config.BoldGreen("Source"), typed.Source) - fmt.Fprintln(os.Stdout) - fmt.Fprintln(os.Stdout, typed.Content) - fmt.Fprintln(os.Stdout, "----------------------------------") - fmt.Fprintln(os.Stdout) - } - case *autogen_client.ModelClientStreamingChunkEvent: - usage.Add(typed.ModelsUsage) - streaming[typed.Source] = true - if verbose { - enc := json.NewEncoder(os.Stdout) - enc.SetIndent("", " ") - if err := enc.Encode(typed); err != nil { - fmt.Fprintf(os.Stderr, "Error encoding event: %v\n", err) - continue - } - } else { - fmt.Fprintf(os.Stdout, "%s", typed.Content) - } - case *autogen_client.ToolCallRequestEvent: - bufferedToolCallRequest = typed - case *autogen_client.ToolCallExecutionEvent: - if bufferedToolCallRequest == nil { - fmt.Fprintf(os.Stderr, "Received tool call execution before request: %v\n", typed) - continue - } - usage.Add(typed.ModelsUsage) - if verbose { - enc := json.NewEncoder(os.Stdout) - out := map[string]interface{}{ - "request": bufferedToolCallRequest, - "execution": typed, - } - enc.SetIndent("", " ") - if err := enc.Encode(out); err != nil { - fmt.Fprintf(os.Stderr, "Error encoding event: %v\n", err) - continue - } - } else { - fmt.Fprintf(os.Stdout, "%s: %s\n", config.BoldYellow("Event Type"), "ToolCall(s)") - fmt.Fprintf(os.Stdout, "%s: %s\n", config.BoldGreen("Source"), typed.Source) - tw := table.NewWriter() - tw.AppendHeader(table.Row{"#", "Name", "Arguments"}) - for idx, functionRequest := range bufferedToolCallRequest.Content { - tw.AppendRow(table.Row{idx, functionRequest.Name, functionRequest.Arguments}) - } - fmt.Fprintln(os.Stdout, tw.Render()) - } - - if !verbose { - fmt.Fprintln(os.Stdout, "----------------------------------") - fmt.Fprintln(os.Stdout) - } - - bufferedToolCallRequest = nil - } - } -} - func StreamA2AEvents(ch <-chan protocol.StreamingMessageEvent, verbose bool) { for event := range ch { if verbose { diff --git a/go/config/crd/bases/kagent.dev_agents.yaml b/go/config/crd/bases/kagent.dev_agents.yaml index 52a035ac9..0eb5cd7f1 100644 --- a/go/config/crd/bases/kagent.dev_agents.yaml +++ b/go/config/crd/bases/kagent.dev_agents.yaml @@ -15,14 +15,18 @@ spec: scope: Namespaced versions: - additionalPrinterColumns: - - description: Whether or not the agent has been accepted by the system. - jsonPath: .status.conditions[0].status - name: Accepted - type: string - description: The ModelConfig resource referenced by this agent. jsonPath: .spec.modelConfig name: ModelConfig type: string + - description: Whether or not the agent is ready to serve requests. + jsonPath: .status.conditions[?(@.type=='Ready')].status + name: Ready + type: string + - description: Whether or not the agent has been accepted by the system. + jsonPath: .status.conditions[?(@.type=='Accepted')].status + name: Accepted + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -100,6 +104,1949 @@ spec: minItems: 1 type: array type: object + deployment: + properties: + annotations: + additionalProperties: + type: string + type: object + env: + items: + description: EnvVar represents an environment variable present + in a Container. + properties: + name: + description: Name of the environment variable. Must be a + C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any service environment variables. If a variable cannot be resolved, + the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. + "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless of whether the variable + exists or not. + Defaults to "". + type: string + valueFrom: + description: Source for the environment variable's value. + Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: Specify whether the ConfigMap or its + key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: |- + Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`, + spec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs. + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select in the + specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported. + properties: + containerName: + description: 'Container name: required for volumes, + optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of the + exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the pod's + namespace + properties: + key: + description: The key of the secret to select from. Must + be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: Specify whether the Secret or its key + must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + imagePullSecrets: + items: + description: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + type: array + labels: + additionalProperties: + type: string + type: object + replicas: + description: If not specified, the default value is 1. + format: int32 + minimum: 1 + type: integer + volumes: + items: + description: Volume represents a named volume in a pod that + may be accessed by any container in the pod. + properties: + awsElasticBlockStore: + description: |- + awsElasticBlockStore represents an AWS Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + Deprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree + awsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + properties: + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: string + partition: + description: |- + partition is the partition in the volume that you want to mount. + If omitted, the default is to mount by volume name. + Examples: For volume /dev/sda1, you specify the partition as "1". + Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). + format: int32 + type: integer + readOnly: + description: |- + readOnly value true will force the readOnly setting in VolumeMounts. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: boolean + volumeID: + description: |- + volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: string + required: + - volumeID + type: object + azureDisk: + description: |- + azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. + Deprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type + are redirected to the disk.csi.azure.com CSI driver. + properties: + cachingMode: + description: 'cachingMode is the Host Caching mode: + None, Read Only, Read Write.' + type: string + diskName: + description: diskName is the Name of the data disk in + the blob storage + type: string + diskURI: + description: diskURI is the URI of data disk in the + blob storage + type: string + fsType: + default: ext4 + description: |- + fsType is Filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + kind: + description: 'kind expected values are Shared: multiple + blob disks per storage account Dedicated: single + blob disk per storage account Managed: azure managed + data disk (only in managed availability set). defaults + to shared' + type: string + readOnly: + default: false + description: |- + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + required: + - diskName + - diskURI + type: object + azureFile: + description: |- + azureFile represents an Azure File Service mount on the host and bind mount to the pod. + Deprecated: AzureFile is deprecated. All operations for the in-tree azureFile type + are redirected to the file.csi.azure.com CSI driver. + properties: + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretName: + description: secretName is the name of secret that + contains Azure Storage Account Name and Key + type: string + shareName: + description: shareName is the azure share Name + type: string + required: + - secretName + - shareName + type: object + cephfs: + description: |- + cephFS represents a Ceph FS mount on the host that shares a pod's lifetime. + Deprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported. + properties: + monitors: + description: |- + monitors is Required: Monitors is a collection of Ceph monitors + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + items: + type: string + type: array + x-kubernetes-list-type: atomic + path: + description: 'path is Optional: Used as the mounted + root, rather than the full Ceph tree, default is /' + type: string + readOnly: + description: |- + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: boolean + secretFile: + description: |- + secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: string + secretRef: + description: |- + secretRef is Optional: SecretRef is reference to the authentication secret for User, default is empty. + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + user: + description: |- + user is optional: User is the rados user name, default is admin + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: string + required: + - monitors + type: object + cinder: + description: |- + cinder represents a cinder volume attached and mounted on kubelets host machine. + Deprecated: Cinder is deprecated. All operations for the in-tree cinder type + are redirected to the cinder.csi.openstack.org CSI driver. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: boolean + secretRef: + description: |- + secretRef is optional: points to a secret object containing parameters used to connect + to OpenStack. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + volumeID: + description: |- + volumeID used to identify the volume in cinder. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: string + required: + - volumeID + type: object + configMap: + description: configMap represents a configMap that should + populate this volume + properties: + defaultMode: + description: |- + defaultMode is optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the ConfigMap, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path within a + volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional specify whether the ConfigMap + or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + csi: + description: csi (Container Storage Interface) represents + ephemeral storage that is handled by certain external + CSI drivers. + properties: + driver: + description: |- + driver is the name of the CSI driver that handles this volume. + Consult with your admin for the correct name as registered in the cluster. + type: string + fsType: + description: |- + fsType to mount. Ex. "ext4", "xfs", "ntfs". + If not provided, the empty value is passed to the associated CSI driver + which will determine the default filesystem to apply. + type: string + nodePublishSecretRef: + description: |- + nodePublishSecretRef is a reference to the secret object containing + sensitive information to pass to the CSI driver to complete the CSI + NodePublishVolume and NodeUnpublishVolume calls. + This field is optional, and may be empty if no secret is required. If the + secret object contains more than one secret, all secret references are passed. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + readOnly: + description: |- + readOnly specifies a read-only configuration for the volume. + Defaults to false (read/write). + type: boolean + volumeAttributes: + additionalProperties: + type: string + description: |- + volumeAttributes stores driver-specific properties that are passed to the CSI + driver. Consult your driver's documentation for supported values. + type: object + required: + - driver + type: object + downwardAPI: + description: downwardAPI represents downward API about the + pod that should populate this volume + properties: + defaultMode: + description: |- + Optional: mode bits to use on created files by default. Must be a + Optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: Items is a list of downward API volume + file + items: + description: DownwardAPIVolumeFile represents information + to create the file containing the pod field + properties: + fieldRef: + description: 'Required: Selects a field of the + pod: only annotations, labels, name, namespace + and uid are supported.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select in + the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: 'Required: Path is the relative + path name of the file to be created. Must not + be absolute or contain the ''..'' path. Must + be utf-8 encoded. The first item of the relative + path must not start with ''..''' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of + the exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + required: + - path + type: object + type: array + x-kubernetes-list-type: atomic + type: object + emptyDir: + description: |- + emptyDir represents a temporary directory that shares a pod's lifetime. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + properties: + medium: + description: |- + medium represents what type of storage medium should back this directory. + The default is "" which means to use the node's default medium. + Must be an empty string (default) or Memory. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + type: string + sizeLimit: + anyOf: + - type: integer + - type: string + description: |- + sizeLimit is the total amount of local storage required for this EmptyDir volume. + The size limit is also applicable for memory medium. + The maximum usage on memory medium EmptyDir would be the minimum value between + the SizeLimit specified here and the sum of memory limits of all containers in a pod. + The default is nil which means that the limit is undefined. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + ephemeral: + description: |- + ephemeral represents a volume that is handled by a cluster storage driver. + The volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts, + and deleted when the pod is removed. + + Use this if: + a) the volume is only needed while the pod runs, + b) features of normal volumes like restoring from snapshot or capacity + tracking are needed, + c) the storage driver is specified through a storage class, and + d) the storage driver supports dynamic volume provisioning through + a PersistentVolumeClaim (see EphemeralVolumeSource for more + information on the connection between this volume type + and PersistentVolumeClaim). + + Use PersistentVolumeClaim or one of the vendor-specific + APIs for volumes that persist for longer than the lifecycle + of an individual pod. + + Use CSI for light-weight local ephemeral volumes if the CSI driver is meant to + be used that way - see the documentation of the driver for + more information. + + A pod can use both types of ephemeral volumes and + persistent volumes at the same time. + properties: + volumeClaimTemplate: + description: |- + Will be used to create a stand-alone PVC to provision the volume. + The pod in which this EphemeralVolumeSource is embedded will be the + owner of the PVC, i.e. the PVC will be deleted together with the + pod. The name of the PVC will be `-` where + `` is the name from the `PodSpec.Volumes` array + entry. Pod validation will reject the pod if the concatenated name + is not valid for a PVC (for example, too long). + + An existing PVC with that name that is not owned by the pod + will *not* be used for the pod to avoid using an unrelated + volume by mistake. Starting the pod is then blocked until + the unrelated PVC is removed. If such a pre-created PVC is + meant to be used by the pod, the PVC has to updated with an + owner reference to the pod once the pod exists. Normally + this should not be necessary, but it may be useful when + manually reconstructing a broken cluster. + + This field is read-only and no changes will be made by Kubernetes + to the PVC after it has been created. + + Required, must not be nil. + properties: + metadata: + description: |- + May contain labels and annotations that will be copied into the PVC + when creating it. No other fields are allowed and will be rejected during + validation. + type: object + spec: + description: |- + The specification for the PersistentVolumeClaim. The entire content is + copied unchanged into the PVC that gets created from this + template. The same fields as in a PersistentVolumeClaim + are also valid here. + properties: + accessModes: + description: |- + accessModes contains the desired access modes the volume should have. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 + items: + type: string + type: array + x-kubernetes-list-type: atomic + dataSource: + description: |- + dataSource field can be used to specify either: + * An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot) + * An existing PVC (PersistentVolumeClaim) + If the provisioner or an external controller can support the specified data source, + it will create a new volume based on the contents of the specified data source. + When the AnyVolumeDataSource feature gate is enabled, dataSource contents will be copied to dataSourceRef, + and dataSourceRef contents will be copied to dataSource when dataSourceRef.namespace is not specified. + If the namespace is specified, then dataSourceRef will not be copied to dataSource. + properties: + apiGroup: + description: |- + APIGroup is the group for the resource being referenced. + If APIGroup is not specified, the specified Kind must be in the core API group. + For any other third-party types, APIGroup is required. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + required: + - kind + - name + type: object + x-kubernetes-map-type: atomic + dataSourceRef: + description: |- + dataSourceRef specifies the object from which to populate the volume with data, if a non-empty + volume is desired. This may be any object from a non-empty API group (non + core object) or a PersistentVolumeClaim object. + When this field is specified, volume binding will only succeed if the type of + the specified object matches some installed volume populator or dynamic + provisioner. + This field will replace the functionality of the dataSource field and as such + if both fields are non-empty, they must have the same value. For backwards + compatibility, when namespace isn't specified in dataSourceRef, + both fields (dataSource and dataSourceRef) will be set to the same + value automatically if one of them is empty and the other is non-empty. + When namespace is specified in dataSourceRef, + dataSource isn't set to the same value and must be empty. + There are three important differences between dataSource and dataSourceRef: + * While dataSource only allows two specific types of objects, dataSourceRef + allows any non-core object, as well as PersistentVolumeClaim objects. + * While dataSource ignores disallowed values (dropping them), dataSourceRef + preserves all values, and generates an error if a disallowed value is + specified. + * While dataSource only allows local objects, dataSourceRef allows objects + in any namespaces. + (Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled. + (Alpha) Using the namespace field of dataSourceRef requires the CrossNamespaceVolumeDataSource feature gate to be enabled. + properties: + apiGroup: + description: |- + APIGroup is the group for the resource being referenced. + If APIGroup is not specified, the specified Kind must be in the core API group. + For any other third-party types, APIGroup is required. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + namespace: + description: |- + Namespace is the namespace of resource being referenced + Note that when a namespace is specified, a gateway.networking.k8s.io/ReferenceGrant object is required in the referent namespace to allow that namespace's owner to accept the reference. See the ReferenceGrant documentation for details. + (Alpha) This field requires the CrossNamespaceVolumeDataSource feature gate to be enabled. + type: string + required: + - kind + - name + type: object + resources: + description: |- + resources represents the minimum resources the volume should have. + If RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements + that are lower than previous value but must still be higher than capacity recorded in the + status field of the claim. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Requests describes the minimum amount of compute resources required. + If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, + otherwise to an implementation-defined value. Requests cannot exceed Limits. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + type: object + selector: + description: selector is a label query over + volumes to consider for binding. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. If the operator is Exists or DoesNotExist, + the values array must be empty. This array is replaced during a strategic + merge patch. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: |- + matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels + map is equivalent to an element of matchExpressions, whose key field is "key", the + operator is "In", and the values array contains only "value". The requirements are ANDed. + type: object + type: object + x-kubernetes-map-type: atomic + storageClassName: + description: |- + storageClassName is the name of the StorageClass required by the claim. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 + type: string + volumeAttributesClassName: + description: |- + volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. + If specified, the CSI driver will create or update the volume with the attributes defined + in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, + it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass + will be applied to the claim but it's not allowed to reset this field to empty string once it is set. + If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass + will be set by the persistentvolume controller if it exists. + If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be + set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource + exists. + More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ + (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default). + type: string + volumeMode: + description: |- + volumeMode defines what type of volume is required by the claim. + Value of Filesystem is implied when not included in claim spec. + type: string + volumeName: + description: volumeName is the binding reference + to the PersistentVolume backing this claim. + type: string + type: object + required: + - spec + type: object + type: object + fc: + description: fc represents a Fibre Channel resource that + is attached to a kubelet's host machine and then exposed + to the pod. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + lun: + description: 'lun is Optional: FC target lun number' + format: int32 + type: integer + readOnly: + description: |- + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + targetWWNs: + description: 'targetWWNs is Optional: FC target worldwide + names (WWNs)' + items: + type: string + type: array + x-kubernetes-list-type: atomic + wwids: + description: |- + wwids Optional: FC volume world wide identifiers (wwids) + Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. + items: + type: string + type: array + x-kubernetes-list-type: atomic + type: object + flexVolume: + description: |- + flexVolume represents a generic volume resource that is + provisioned/attached using an exec based plugin. + Deprecated: FlexVolume is deprecated. Consider using a CSIDriver instead. + properties: + driver: + description: driver is the name of the driver to use + for this volume. + type: string + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script. + type: string + options: + additionalProperties: + type: string + description: 'options is Optional: this field holds + extra command options if any.' + type: object + readOnly: + description: |- + readOnly is Optional: defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef is Optional: secretRef is reference to the secret object containing + sensitive information to pass to the plugin scripts. This may be + empty if no secret object is specified. If the secret object + contains more than one secret, all secrets are passed to the plugin + scripts. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + required: + - driver + type: object + flocker: + description: |- + flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running. + Deprecated: Flocker is deprecated and the in-tree flocker type is no longer supported. + properties: + datasetName: + description: |- + datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker + should be considered as deprecated + type: string + datasetUUID: + description: datasetUUID is the UUID of the dataset. + This is unique identifier of a Flocker dataset + type: string + type: object + gcePersistentDisk: + description: |- + gcePersistentDisk represents a GCE Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + Deprecated: GCEPersistentDisk is deprecated. All operations for the in-tree + gcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + properties: + fsType: + description: |- + fsType is filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: string + partition: + description: |- + partition is the partition in the volume that you want to mount. + If omitted, the default is to mount by volume name. + Examples: For volume /dev/sda1, you specify the partition as "1". + Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + format: int32 + type: integer + pdName: + description: |- + pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: boolean + required: + - pdName + type: object + gitRepo: + description: |- + gitRepo represents a git repository at a particular revision. + Deprecated: GitRepo is deprecated. To provision a container with a git repo, mount an + EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir + into the Pod's container. + properties: + directory: + description: |- + directory is the target directory name. + Must not contain or start with '..'. If '.' is supplied, the volume directory will be the + git repository. Otherwise, if specified, the volume will contain the git repository in + the subdirectory with the given name. + type: string + repository: + description: repository is the URL + type: string + revision: + description: revision is the commit hash for the specified + revision. + type: string + required: + - repository + type: object + glusterfs: + description: |- + glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. + Deprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported. + More info: https://examples.k8s.io/volumes/glusterfs/README.md + properties: + endpoints: + description: |- + endpoints is the endpoint name that details Glusterfs topology. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: string + path: + description: |- + path is the Glusterfs volume path. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: string + readOnly: + description: |- + readOnly here will force the Glusterfs volume to be mounted with read-only permissions. + Defaults to false. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: boolean + required: + - endpoints + - path + type: object + hostPath: + description: |- + hostPath represents a pre-existing file or directory on the host + machine that is directly exposed to the container. This is generally + used for system agents or other privileged things that are allowed + to see the host machine. Most containers will NOT need this. + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + properties: + path: + description: |- + path of the directory on the host. + If the path is a symlink, it will follow the link to the real path. + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + type: string + type: + description: |- + type for HostPath Volume + Defaults to "" + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + type: string + required: + - path + type: object + image: + description: |- + image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine. + The volume is resolved at pod startup depending on which PullPolicy value is provided: + + - Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. + - Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. + - IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. + + The volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation. + A failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message. + The types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field. + The OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images. + The volume will be mounted read-only (ro) and non-executable files (noexec). + Sub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath) before 1.33. + The field spec.securityContext.fsGroupChangePolicy has no effect on this volume type. + properties: + pullPolicy: + description: |- + Policy for pulling OCI objects. Possible values are: + Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. + Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. + IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. + Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. + type: string + reference: + description: |- + Required: Image or artifact reference to be used. + Behaves in the same way as pod.spec.containers[*].image. + Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. + More info: https://kubernetes.io/docs/concepts/containers/images + This field is optional to allow higher level config management to default or override + container images in workload controllers like Deployments and StatefulSets. + type: string + type: object + iscsi: + description: |- + iscsi represents an ISCSI Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + More info: https://examples.k8s.io/volumes/iscsi/README.md + properties: + chapAuthDiscovery: + description: chapAuthDiscovery defines whether support + iSCSI Discovery CHAP authentication + type: boolean + chapAuthSession: + description: chapAuthSession defines whether support + iSCSI Session CHAP authentication + type: boolean + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi + type: string + initiatorName: + description: |- + initiatorName is the custom iSCSI Initiator Name. + If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface + : will be created for the connection. + type: string + iqn: + description: iqn is the target iSCSI Qualified Name. + type: string + iscsiInterface: + default: default + description: |- + iscsiInterface is the interface Name that uses an iSCSI transport. + Defaults to 'default' (tcp). + type: string + lun: + description: lun represents iSCSI Target Lun number. + format: int32 + type: integer + portals: + description: |- + portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port + is other than default (typically TCP ports 860 and 3260). + items: + type: string + type: array + x-kubernetes-list-type: atomic + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + type: boolean + secretRef: + description: secretRef is the CHAP Secret for iSCSI + target and initiator authentication + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + targetPortal: + description: |- + targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port + is other than default (typically TCP ports 860 and 3260). + type: string + required: + - iqn + - lun + - targetPortal + type: object + name: + description: |- + name of the volume. + Must be a DNS_LABEL and unique within the pod. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + nfs: + description: |- + nfs represents an NFS mount on the host that shares a pod's lifetime + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + properties: + path: + description: |- + path that is exported by the NFS server. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: string + readOnly: + description: |- + readOnly here will force the NFS export to be mounted with read-only permissions. + Defaults to false. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: boolean + server: + description: |- + server is the hostname or IP address of the NFS server. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: string + required: + - path + - server + type: object + persistentVolumeClaim: + description: |- + persistentVolumeClaimVolumeSource represents a reference to a + PersistentVolumeClaim in the same namespace. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims + properties: + claimName: + description: |- + claimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims + type: string + readOnly: + description: |- + readOnly Will force the ReadOnly setting in VolumeMounts. + Default false. + type: boolean + required: + - claimName + type: object + photonPersistentDisk: + description: |- + photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine. + Deprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + pdID: + description: pdID is the ID that identifies Photon Controller + persistent disk + type: string + required: + - pdID + type: object + portworxVolume: + description: |- + portworxVolume represents a portworx volume attached and mounted on kubelets host machine. + Deprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type + are redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate + is on. + properties: + fsType: + description: |- + fSType represents the filesystem type to mount + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs". Implicitly inferred to be "ext4" if unspecified. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + volumeID: + description: volumeID uniquely identifies a Portworx + volume + type: string + required: + - volumeID + type: object + projected: + description: projected items for all in one resources secrets, + configmaps, and downward API + properties: + defaultMode: + description: |- + defaultMode are the mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + sources: + description: |- + sources is the list of volume projections. Each entry in this list + handles one source. + items: + description: |- + Projection that may be projected along with other supported volume types. + Exactly one of these fields must be set. + properties: + clusterTrustBundle: + description: |- + ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field + of ClusterTrustBundle objects in an auto-updating file. + + Alpha, gated by the ClusterTrustBundleProjection feature gate. + + ClusterTrustBundle objects can either be selected by name, or by the + combination of signer name and a label selector. + + Kubelet performs aggressive normalization of the PEM contents written + into the pod filesystem. Esoteric PEM features such as inter-block + comments and block headers are stripped. Certificates are deduplicated. + The ordering of certificates within the file is arbitrary, and Kubelet + may change the order over time. + properties: + labelSelector: + description: |- + Select all ClusterTrustBundles that match this label selector. Only has + effect if signerName is set. Mutually-exclusive with name. If unset, + interpreted as "match nothing". If set but empty, interpreted as "match + everything". + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. If the operator is Exists or DoesNotExist, + the values array must be empty. This array is replaced during a strategic + merge patch. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: |- + matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels + map is equivalent to an element of matchExpressions, whose key field is "key", the + operator is "In", and the values array contains only "value". The requirements are ANDed. + type: object + type: object + x-kubernetes-map-type: atomic + name: + description: |- + Select a single ClusterTrustBundle by object name. Mutually-exclusive + with signerName and labelSelector. + type: string + optional: + description: |- + If true, don't block pod startup if the referenced ClusterTrustBundle(s) + aren't available. If using name, then the named ClusterTrustBundle is + allowed not to exist. If using signerName, then the combination of + signerName and labelSelector is allowed to match zero + ClusterTrustBundles. + type: boolean + path: + description: Relative path from the volume + root to write the bundle. + type: string + signerName: + description: |- + Select all ClusterTrustBundles that match this signer name. + Mutually-exclusive with name. The contents of all selected + ClusterTrustBundles will be unified and deduplicated. + type: string + required: + - path + type: object + configMap: + description: configMap information about the configMap + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the ConfigMap, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional specify whether the + ConfigMap or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + downwardAPI: + description: downwardAPI information about the + downwardAPI data to project + properties: + items: + description: Items is a list of DownwardAPIVolume + file + items: + description: DownwardAPIVolumeFile represents + information to create the file containing + the pod field + properties: + fieldRef: + description: 'Required: Selects a field + of the pod: only annotations, labels, + name, namespace and uid are supported.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: 'Required: Path is the + relative path name of the file to + be created. Must not be absolute or + contain the ''..'' path. Must be utf-8 + encoded. The first item of the relative + path must not start with ''..''' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + required: + - path + type: object + type: array + x-kubernetes-list-type: atomic + type: object + secret: + description: secret information about the secret + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the Secret, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional field specify whether + the Secret or its key must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + serviceAccountToken: + description: serviceAccountToken is information + about the serviceAccountToken data to project + properties: + audience: + description: |- + audience is the intended audience of the token. A recipient of a token + must identify itself with an identifier specified in the audience of the + token, and otherwise should reject the token. The audience defaults to the + identifier of the apiserver. + type: string + expirationSeconds: + description: |- + expirationSeconds is the requested duration of validity of the service + account token. As the token approaches expiration, the kubelet volume + plugin will proactively rotate the service account token. The kubelet will + start trying to rotate the token if the token is older than 80 percent of + its time to live or if the token is older than 24 hours.Defaults to 1 hour + and must be at least 10 minutes. + format: int64 + type: integer + path: + description: |- + path is the path relative to the mount point of the file to project the + token into. + type: string + required: + - path + type: object + type: object + type: array + x-kubernetes-list-type: atomic + type: object + quobyte: + description: |- + quobyte represents a Quobyte mount on the host that shares a pod's lifetime. + Deprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported. + properties: + group: + description: |- + group to map volume access to + Default is no group + type: string + readOnly: + description: |- + readOnly here will force the Quobyte volume to be mounted with read-only permissions. + Defaults to false. + type: boolean + registry: + description: |- + registry represents a single or multiple Quobyte Registry services + specified as a string as host:port pair (multiple entries are separated with commas) + which acts as the central registry for volumes + type: string + tenant: + description: |- + tenant owning the given Quobyte volume in the Backend + Used with dynamically provisioned Quobyte volumes, value is set by the plugin + type: string + user: + description: |- + user to map volume access to + Defaults to serivceaccount user + type: string + volume: + description: volume is a string that references an already + created Quobyte volume by name. + type: string + required: + - registry + - volume + type: object + rbd: + description: |- + rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. + Deprecated: RBD is deprecated and the in-tree rbd type is no longer supported. + More info: https://examples.k8s.io/volumes/rbd/README.md + properties: + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd + type: string + image: + description: |- + image is the rados image name. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + keyring: + default: /etc/ceph/keyring + description: |- + keyring is the path to key ring for RBDUser. + Default is /etc/ceph/keyring. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + monitors: + description: |- + monitors is a collection of Ceph monitors. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + items: + type: string + type: array + x-kubernetes-list-type: atomic + pool: + default: rbd + description: |- + pool is the rados pool name. + Default is rbd. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: boolean + secretRef: + description: |- + secretRef is name of the authentication secret for RBDUser. If provided + overrides keyring. + Default is nil. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + user: + default: admin + description: |- + user is the rados user name. + Default is admin. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + required: + - image + - monitors + type: object + scaleIO: + description: |- + scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. + Deprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported. + properties: + fsType: + default: xfs + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". + Default is "xfs". + type: string + gateway: + description: gateway is the host address of the ScaleIO + API Gateway. + type: string + protectionDomain: + description: protectionDomain is the name of the ScaleIO + Protection Domain for the configured storage. + type: string + readOnly: + description: |- + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef references to the secret for ScaleIO user and other + sensitive information. If this is not provided, Login operation will fail. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + sslEnabled: + description: sslEnabled Flag enable/disable SSL communication + with Gateway, default false + type: boolean + storageMode: + default: ThinProvisioned + description: |- + storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. + Default is ThinProvisioned. + type: string + storagePool: + description: storagePool is the ScaleIO Storage Pool + associated with the protection domain. + type: string + system: + description: system is the name of the storage system + as configured in ScaleIO. + type: string + volumeName: + description: |- + volumeName is the name of a volume already created in the ScaleIO system + that is associated with this volume source. + type: string + required: + - gateway + - secretRef + - system + type: object + secret: + description: |- + secret represents a secret that should populate this volume. + More info: https://kubernetes.io/docs/concepts/storage/volumes#secret + properties: + defaultMode: + description: |- + defaultMode is Optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values + for mode bits. Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: |- + items If unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the Secret, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path within a + volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + optional: + description: optional field specify whether the Secret + or its keys must be defined + type: boolean + secretName: + description: |- + secretName is the name of the secret in the pod's namespace to use. + More info: https://kubernetes.io/docs/concepts/storage/volumes#secret + type: string + type: object + storageos: + description: |- + storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. + Deprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef specifies the secret to use for obtaining the StorageOS API + credentials. If not specified, default values will be attempted. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + volumeName: + description: |- + volumeName is the human-readable name of the StorageOS volume. Volume + names are only unique within a namespace. + type: string + volumeNamespace: + description: |- + volumeNamespace specifies the scope of the volume within StorageOS. If no + namespace is specified then the Pod's namespace will be used. This allows the + Kubernetes name scoping to be mirrored within StorageOS for tighter integration. + Set VolumeName to any name to override the default behaviour. + Set to "default" if you are not using namespaces within StorageOS. + Namespaces that do not pre-exist within StorageOS will be created. + type: string + type: object + vsphereVolume: + description: |- + vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine. + Deprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type + are redirected to the csi.vsphere.vmware.com CSI driver. + properties: + fsType: + description: |- + fsType is filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + storagePolicyID: + description: storagePolicyID is the storage Policy Based + Management (SPBM) profile ID associated with the StoragePolicyName. + type: string + storagePolicyName: + description: storagePolicyName is the storage Policy + Based Management (SPBM) profile name. + type: string + volumePath: + description: volumePath is the path that identifies + vSphere volume vmdk + type: string + required: + - volumePath + type: object + required: + - name + type: object + type: array + type: object description: type: string memory: @@ -234,9 +2181,18 @@ spec: - type type: object type: array + configHash: + description: |- + This is used to determine if the agent config has changed. + If it has changed, the agent will be restarted. + format: byte + type: string observedGeneration: format: int64 type: integer + required: + - configHash + - observedGeneration type: object type: object served: true diff --git a/go/config/crd/bases/kagent.dev_modelconfigs.yaml b/go/config/crd/bases/kagent.dev_modelconfigs.yaml index 10318d797..42dc4e865 100644 --- a/go/config/crd/bases/kagent.dev_modelconfigs.yaml +++ b/go/config/crd/bases/kagent.dev_modelconfigs.yaml @@ -46,6 +46,7 @@ spec: metadata: type: object spec: + description: ModelConfigSpec defines the desired state of ModelConfig. properties: anthropic: description: Anthropic-specific configuration @@ -101,9 +102,8 @@ spec: type: string apiKeySecretRef: description: The reference to the secret that contains the API key. - Can either be a reference to the name of a secret in the same namespace - as the referencing ModelConfig, or a reference to the name of a - Secret in a different namespace in the form / + Must be a reference to the name of a secret in the same namespace + as the referencing ModelConfig type: string azureOpenAI: description: Azure OpenAI-specific configuration @@ -137,8 +137,11 @@ spec: additionalProperties: type: string type: object - geminiVertexAI: + gemini: description: Gemini-specific configuration + type: object + geminiVertexAI: + description: Gemini Vertex AI-specific configuration properties: candidateCount: description: Candidate count @@ -248,6 +251,7 @@ spec: - OpenAI - AzureOpenAI - Ollama + - Gemini - GeminiVertexAI - AnthropicVertexAI type: string @@ -264,6 +268,8 @@ spec: rule: '!(has(self.azureOpenAI) && self.provider != ''AzureOpenAI'')' - message: provider.ollama must be nil if the provider is not Ollama rule: '!(has(self.ollama) && self.provider != ''Ollama'')' + - message: provider.gemini must be nil if the provider is not Gemini + rule: '!(has(self.gemini) && self.provider != ''Gemini'')' - message: provider.geminiVertexAI must be nil if the provider is not GeminiVertexAI rule: '!(has(self.geminiVertexAI) && self.provider != ''GeminiVertexAI'')' diff --git a/go/config/crd/bases/kagent.dev_toolservers.yaml b/go/config/crd/bases/kagent.dev_toolservers.yaml index 685d04e6d..942794ad6 100644 --- a/go/config/crd/bases/kagent.dev_toolservers.yaml +++ b/go/config/crd/bases/kagent.dev_toolservers.yaml @@ -42,6 +42,7 @@ spec: description: ToolServerSpec defines the desired state of ToolServer. properties: config: + description: Only one of stdio, sse, or streamableHttp can be specified. properties: sse: properties: @@ -201,7 +202,14 @@ spec: required: - url type: object + type: + type: string type: object + x-kubernetes-validations: + - message: Exactly one of stdio, sse, or streamableHttp must be specified + rule: (has(self.stdio) && !has(self.sse) && !has(self.streamableHttp)) + || (!has(self.stdio) && has(self.sse) && !has(self.streamableHttp)) + || (!has(self.stdio) && !has(self.sse) && has(self.streamableHttp)) description: type: string required: @@ -296,10 +304,12 @@ spec: - provider - version type: object + description: + type: string name: type: string required: - - component + - description - name type: object type: array diff --git a/go/config/rbac/role.yaml b/go/config/rbac/role.yaml index 80e63d2b8..a61ac0648 100644 --- a/go/config/rbac/role.yaml +++ b/go/config/rbac/role.yaml @@ -36,7 +36,6 @@ rules: - agents - memories - modelconfigs - - teams verbs: - create - delete @@ -51,7 +50,6 @@ rules: - agents/finalizers - memories/finalizers - modelconfigs/finalizers - - teams/finalizers verbs: - update - apiGroups: @@ -60,7 +58,6 @@ rules: - agents/status - memories/status - modelconfigs/status - - teams/status verbs: - get - patch diff --git a/go/controller/PROJECT b/go/controller/PROJECT deleted file mode 100644 index 50ec91df4..000000000 --- a/go/controller/PROJECT +++ /dev/null @@ -1,47 +0,0 @@ -# Code generated by tool. DO NOT EDIT. -# This file is used to track the info used to scaffold your project -# and allow the plugins properly work. -# More info: https://book.kubebuilder.io/reference/project-config.html -domain: kagent.dev -layout: -- go.kubebuilder.io/v4 -projectName: controller -repo: github.com/kagent-dev/kagent/go/controller -resources: -- api: - crdVersion: v1 - namespaced: true - controller: true - domain: kagent.dev - group: agent - kind: AutogenTeam - path: github.com/kagent-dev/kagent/go/controller/api/v1alpha1 - version: v1alpha1 -- api: - crdVersion: v1 - namespaced: true - controller: true - domain: kagent.dev - group: agent - kind: AutogenAgent - path: github.com/kagent-dev/kagent/go/controller/api/v1alpha1 - version: v1alpha1 -- api: - crdVersion: v1 - namespaced: true - controller: true - domain: kagent.dev - group: agent - kind: AutogenModelConfig - path: github.com/kagent-dev/kagent/go/controller/api/v1alpha1 - version: v1alpha1 -- api: - crdVersion: v1 - namespaced: true - controller: true - domain: kagent.dev - group: agent - kind: ToolServer - path: github.com/kagent-dev/kagent/go/controller/api/v1alpha1 - version: v1alpha1 -version: "3" diff --git a/go/controller/api/v1alpha1/agent_types.go b/go/controller/api/v1alpha1/agent_types.go index 4fc00352e..44c31cf57 100644 --- a/go/controller/api/v1alpha1/agent_types.go +++ b/go/controller/api/v1alpha1/agent_types.go @@ -19,17 +19,17 @@ package v1alpha1 import ( "encoding/json" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "trpc.group/trpc-go/trpc-a2a-go/server" ) -const ( - AgentConditionTypeAccepted = "Accepted" -) +type AgentType string // AgentSpec defines the desired state of Agent. type AgentSpec struct { + // +optional Description string `json:"description,omitempty"` // +kubebuilder:validation:MinLength=1 SystemMessage string `json:"systemMessage,omitempty"` @@ -53,6 +53,25 @@ type AgentSpec struct { // Read more about the A2A protocol here: https://github.com/google/A2A // +optional A2AConfig *A2AConfig `json:"a2aConfig,omitempty"` + // +optional + Deployment *DeploymentSpec `json:"deployment,omitempty"` +} + +type DeploymentSpec struct { + // If not specified, the default value is 1. + // +optional + // +kubebuilder:validation:Minimum=1 + Replicas *int32 `json:"replicas,omitempty"` + // +optional + ImagePullSecrets []corev1.LocalObjectReference `json:"imagePullSecrets,omitempty"` + // +optional + Volumes []corev1.Volume `json:"volumes,omitempty"` + // +optional + Labels map[string]string `json:"labels,omitempty"` + // +optional + Annotations map[string]string `json:"annotations,omitempty"` + // +optional + Env []corev1.EnvVar `json:"env,omitempty"` } // ToolProviderType represents the tool provider type @@ -104,16 +123,25 @@ type A2AConfig struct { type AgentSkill server.AgentSkill +const ( + AgentConditionTypeAccepted = "Accepted" + AgentConditionTypeReady = "Ready" +) + // AgentStatus defines the observed state of Agent. type AgentStatus struct { - ObservedGeneration int64 `json:"observedGeneration,omitempty"` - Conditions []metav1.Condition `json:"conditions,omitempty"` + ObservedGeneration int64 `json:"observedGeneration"` + // This is used to determine if the agent config has changed. + // If it has changed, the agent will be restarted. + ConfigHash []byte `json:"configHash"` + Conditions []metav1.Condition `json:"conditions,omitempty"` } // +kubebuilder:object:root=true // +kubebuilder:subresource:status -// +kubebuilder:printcolumn:name="Accepted",type="string",JSONPath=".status.conditions[0].status",description="Whether or not the agent has been accepted by the system." // +kubebuilder:printcolumn:name="ModelConfig",type="string",JSONPath=".spec.modelConfig",description="The ModelConfig resource referenced by this agent." +// +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status",description="Whether or not the agent is ready to serve requests." +// +kubebuilder:printcolumn:name="Accepted",type="string",JSONPath=".status.conditions[?(@.type=='Accepted')].status",description="Whether or not the agent has been accepted by the system." // Agent is the Schema for the agents API. type Agent struct { @@ -136,7 +164,3 @@ type AgentList struct { func init() { SchemeBuilder.Register(&Agent{}, &AgentList{}) } - -func (a *Agent) GetModelConfigName() string { - return a.Spec.ModelConfig -} diff --git a/go/controller/api/v1alpha1/modelconfig_types.go b/go/controller/api/v1alpha1/modelconfig_types.go index 0e9b836a2..bb4f0b8ec 100644 --- a/go/controller/api/v1alpha1/modelconfig_types.go +++ b/go/controller/api/v1alpha1/modelconfig_types.go @@ -25,16 +25,17 @@ const ( ) // ModelProvider represents the model provider type -// +kubebuilder:validation:Enum=Anthropic;OpenAI;AzureOpenAI;Ollama;GeminiVertexAI;AnthropicVertexAI +// +kubebuilder:validation:Enum=Anthropic;OpenAI;AzureOpenAI;Ollama;Gemini;GeminiVertexAI;AnthropicVertexAI type ModelProvider string const ( - Anthropic ModelProvider = "Anthropic" - AzureOpenAI ModelProvider = "AzureOpenAI" - OpenAI ModelProvider = "OpenAI" - Ollama ModelProvider = "Ollama" - GeminiVertexAI ModelProvider = "GeminiVertexAI" - AnthropicVertexAI ModelProvider = "AnthropicVertexAI" + ModelProviderAnthropic ModelProvider = "Anthropic" + ModelProviderAzureOpenAI ModelProvider = "AzureOpenAI" + ModelProviderOpenAI ModelProvider = "OpenAI" + ModelProviderOllama ModelProvider = "Ollama" + ModelProviderGemini ModelProvider = "Gemini" + ModelProviderGeminiVertexAI ModelProvider = "GeminiVertexAI" + ModelProviderAnthropicVertexAI ModelProvider = "AnthropicVertexAI" ) type BaseVertexAIConfig struct { @@ -199,15 +200,17 @@ type OllamaConfig struct { Options map[string]string `json:"options,omitempty"` } +type GeminiConfig struct{} + // ModelConfigSpec defines the desired state of ModelConfig. // // +kubebuilder:validation:XValidation:message="provider.openAI must be nil if the provider is not OpenAI",rule="!(has(self.openAI) && self.provider != 'OpenAI')" // +kubebuilder:validation:XValidation:message="provider.anthropic must be nil if the provider is not Anthropic",rule="!(has(self.anthropic) && self.provider != 'Anthropic')" // +kubebuilder:validation:XValidation:message="provider.azureOpenAI must be nil if the provider is not AzureOpenAI",rule="!(has(self.azureOpenAI) && self.provider != 'AzureOpenAI')" // +kubebuilder:validation:XValidation:message="provider.ollama must be nil if the provider is not Ollama",rule="!(has(self.ollama) && self.provider != 'Ollama')" +// +kubebuilder:validation:XValidation:message="provider.gemini must be nil if the provider is not Gemini",rule="!(has(self.gemini) && self.provider != 'Gemini')" // +kubebuilder:validation:XValidation:message="provider.geminiVertexAI must be nil if the provider is not GeminiVertexAI",rule="!(has(self.geminiVertexAI) && self.provider != 'GeminiVertexAI')" // +kubebuilder:validation:XValidation:message="provider.anthropicVertexAI must be nil if the provider is not AnthropicVertexAI",rule="!(has(self.anthropicVertexAI) && self.provider != 'AnthropicVertexAI')" - type ModelConfigSpec struct { Model string `json:"model"` @@ -215,7 +218,7 @@ type ModelConfigSpec struct { // +kubebuilder:default=OpenAI Provider ModelProvider `json:"provider"` - // The reference to the secret that contains the API key. Can either be a reference to the name of a secret in the same namespace as the referencing ModelConfig, or a reference to the name of a Secret in a different namespace in the form / + // The reference to the secret that contains the API key. Must be a reference to the name of a secret in the same namespace as the referencing ModelConfig // +optional APIKeySecretRef string `json:"apiKeySecretRef"` @@ -250,6 +253,10 @@ type ModelConfigSpec struct { // Gemini-specific configuration // +optional + Gemini *GeminiConfig `json:"gemini,omitempty"` + + // Gemini Vertex AI-specific configuration + // +optional GeminiVertexAI *GeminiVertexAIConfig `json:"geminiVertexAI,omitempty"` // Anthropic-specific configuration diff --git a/go/controller/api/v1alpha1/team_types.go b/go/controller/api/v1alpha1/team_types.go deleted file mode 100644 index 36cd7636f..000000000 --- a/go/controller/api/v1alpha1/team_types.go +++ /dev/null @@ -1,113 +0,0 @@ -/* -Copyright 2025. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1 - -import ( - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -const ( - TeamConditionTypeAccepted = "Accepted" -) - -// TeamSpec defines the desired state of Team. -type TeamSpec struct { - // Each Participant can either be a reference to the name of an Agent in the same namespace as the referencing Team, or a reference to the name of an Agent in a different namespace in the form / - Participants []string `json:"participants"` - Description string `json:"description"` - // Can either be a reference to the name of a ModelConfig in the same namespace as the referencing Team, or a reference to the name of a ModelConfig in a different namespace in the form / - ModelConfig string `json:"modelConfig"` - // +kubebuilder:validation:Optional - RoundRobinTeamConfig *RoundRobinTeamConfig `json:"roundRobinTeamConfig"` - // +kubebuilder:validation:Optional - TerminationCondition TerminationCondition `json:"terminationCondition"` - MaxTurns int64 `json:"maxTurns"` -} - -type RoundRobinTeamConfig struct{} - -type TerminationCondition struct { - // ONEOF: maxMessageTermination, textMentionTermination, orTermination - MaxMessageTermination *MaxMessageTermination `json:"maxMessageTermination,omitempty"` - TextMentionTermination *TextMentionTermination `json:"textMentionTermination,omitempty"` - TextMessageTermination *TextMessageTermination `json:"textMessageTermination,omitempty"` - FinalTextMessageTermination *FinalTextMessageTermination `json:"finalTextMessageTermination,omitempty"` - StopMessageTermination *StopMessageTermination `json:"stopMessageTermination,omitempty"` - OrTermination *OrTermination `json:"orTermination,omitempty"` -} - -type MaxMessageTermination struct { - MaxMessages int `json:"maxMessages"` -} - -type TextMentionTermination struct { - Text string `json:"text"` -} - -type TextMessageTermination struct { - Source string `json:"source"` -} - -type FinalTextMessageTermination struct { - Source string `json:"source"` -} - -type StopMessageTermination struct{} - -type OrTermination struct { - Conditions []OrTerminationCondition `json:"conditions"` -} - -type OrTerminationCondition struct { - MaxMessageTermination *MaxMessageTermination `json:"maxMessageTermination,omitempty"` - TextMentionTermination *TextMentionTermination `json:"textMentionTermination,omitempty"` -} - -// TeamStatus defines the observed state of Team. -type TeamStatus struct { - Conditions []metav1.Condition `json:"conditions"` - ObservedGeneration int64 `json:"observedGeneration"` -} - -// +kubebuilder:object:root=true -// +kubebuilder:subresource:status - -// Team is the Schema for the teams API. -type Team struct { - metav1.TypeMeta `json:",inline"` - metav1.ObjectMeta `json:"metadata,omitempty"` - - Spec TeamSpec `json:"spec,omitempty"` - Status TeamStatus `json:"status,omitempty"` -} - -// +kubebuilder:object:root=true - -// TeamList contains a list of Team. -type TeamList struct { - metav1.TypeMeta `json:",inline"` - metav1.ListMeta `json:"metadata,omitempty"` - Items []Team `json:"items"` -} - -func init() { - SchemeBuilder.Register(&Team{}, &TeamList{}) -} - -func (t *Team) GetModelConfigName() string { - return t.Spec.ModelConfig -} diff --git a/go/controller/api/v1alpha1/toolserver_types.go b/go/controller/api/v1alpha1/toolserver_types.go index e0bea6305..76cf3947f 100644 --- a/go/controller/api/v1alpha1/toolserver_types.go +++ b/go/controller/api/v1alpha1/toolserver_types.go @@ -17,6 +17,10 @@ limitations under the License. package v1alpha1 import ( + "database/sql" + "database/sql/driver" + "encoding/json" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -26,12 +30,40 @@ type ToolServerSpec struct { Config ToolServerConfig `json:"config"` } +type ToolServerType string + +const ( + ToolServerTypeStdio ToolServerType = "stdio" + ToolServerTypeSse ToolServerType = "sse" + ToolServerTypeStreamableHttp ToolServerType = "streamableHttp" +) + +// Only one of stdio, sse, or streamableHttp can be specified. +// +kubebuilder:validation:XValidation:rule="(has(self.stdio) && !has(self.sse) && !has(self.streamableHttp)) || (!has(self.stdio) && has(self.sse) && !has(self.streamableHttp)) || (!has(self.stdio) && !has(self.sse) && has(self.streamableHttp))",message="Exactly one of stdio, sse, or streamableHttp must be specified" type ToolServerConfig struct { + // +optional + Type ToolServerType `json:"type"` Stdio *StdioMcpServerConfig `json:"stdio,omitempty"` Sse *SseMcpServerConfig `json:"sse,omitempty"` StreamableHttp *StreamableHttpServerConfig `json:"streamableHttp,omitempty"` } +var _ sql.Scanner = (*ToolServerConfig)(nil) + +func (t *ToolServerConfig) Scan(src any) error { + switch v := src.(type) { + case []uint8: + return json.Unmarshal(v, t) + } + return nil +} + +var _ driver.Valuer = (*ToolServerConfig)(nil) + +func (t ToolServerConfig) Value() (driver.Value, error) { + return json.Marshal(t) +} + type ValueSourceType string const ( @@ -89,7 +121,7 @@ type SseMcpServerConfig struct { type StreamableHttpServerConfig struct { HttpToolServerConfig `json:",inline"` - TerminateOnClose bool `json:"terminateOnClose,omitempty"` + TerminateOnClose *bool `json:"terminateOnClose,omitempty"` } // ToolServerStatus defines the observed state of ToolServer. @@ -103,8 +135,9 @@ type ToolServerStatus struct { } type MCPTool struct { - Name string `json:"name"` - Component Component `json:"component"` + Name string `json:"name"` + Description string `json:"description"` + Component *Component `json:"component,omitempty"` } type Component struct { diff --git a/go/controller/api/v1alpha1/zz_generated.deepcopy.go b/go/controller/api/v1alpha1/zz_generated.deepcopy.go index 2d5d52b0a..f887a5ff2 100644 --- a/go/controller/api/v1alpha1/zz_generated.deepcopy.go +++ b/go/controller/api/v1alpha1/zz_generated.deepcopy.go @@ -22,7 +22,8 @@ package v1alpha1 import ( "encoding/json" - "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" ) @@ -176,6 +177,11 @@ func (in *AgentSpec) DeepCopyInto(out *AgentSpec) { *out = new(A2AConfig) (*in).DeepCopyInto(*out) } + if in.Deployment != nil { + in, out := &in.Deployment, &out.Deployment + *out = new(DeploymentSpec) + (*in).DeepCopyInto(*out) + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AgentSpec. @@ -191,9 +197,14 @@ func (in *AgentSpec) DeepCopy() *AgentSpec { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *AgentStatus) DeepCopyInto(out *AgentStatus) { *out = *in + if in.ConfigHash != nil { + in, out := &in.ConfigHash, &out.ConfigHash + *out = make([]byte, len(*in)) + copy(*out, *in) + } if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) + *out = make([]metav1.Condition, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -339,16 +350,69 @@ func (in *Component) DeepCopy() *Component { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FinalTextMessageTermination) DeepCopyInto(out *FinalTextMessageTermination) { +func (in *DeploymentSpec) DeepCopyInto(out *DeploymentSpec) { + *out = *in + if in.Replicas != nil { + in, out := &in.Replicas, &out.Replicas + *out = new(int32) + **out = **in + } + if in.ImagePullSecrets != nil { + in, out := &in.ImagePullSecrets, &out.ImagePullSecrets + *out = make([]v1.LocalObjectReference, len(*in)) + copy(*out, *in) + } + if in.Volumes != nil { + in, out := &in.Volumes, &out.Volumes + *out = make([]v1.Volume, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Labels != nil { + in, out := &in.Labels, &out.Labels + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.Annotations != nil { + in, out := &in.Annotations, &out.Annotations + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.Env != nil { + in, out := &in.Env, &out.Env + *out = make([]v1.EnvVar, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DeploymentSpec. +func (in *DeploymentSpec) DeepCopy() *DeploymentSpec { + if in == nil { + return nil + } + out := new(DeploymentSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GeminiConfig) DeepCopyInto(out *GeminiConfig) { *out = *in } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FinalTextMessageTermination. -func (in *FinalTextMessageTermination) DeepCopy() *FinalTextMessageTermination { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GeminiConfig. +func (in *GeminiConfig) DeepCopy() *GeminiConfig { if in == nil { return nil } - out := new(FinalTextMessageTermination) + out := new(GeminiConfig) in.DeepCopyInto(out) return out } @@ -388,12 +452,12 @@ func (in *HttpToolServerConfig) DeepCopyInto(out *HttpToolServerConfig) { } if in.Timeout != nil { in, out := &in.Timeout, &out.Timeout - *out = new(v1.Duration) + *out = new(metav1.Duration) **out = **in } if in.SseReadTimeout != nil { in, out := &in.SseReadTimeout, &out.SseReadTimeout - *out = new(v1.Duration) + *out = new(metav1.Duration) **out = **in } } @@ -411,7 +475,11 @@ func (in *HttpToolServerConfig) DeepCopy() *HttpToolServerConfig { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *MCPTool) DeepCopyInto(out *MCPTool) { *out = *in - in.Component.DeepCopyInto(&out.Component) + if in.Component != nil { + in, out := &in.Component, &out.Component + *out = new(Component) + (*in).DeepCopyInto(*out) + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MCPTool. @@ -424,21 +492,6 @@ func (in *MCPTool) DeepCopy() *MCPTool { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaxMessageTermination) DeepCopyInto(out *MaxMessageTermination) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaxMessageTermination. -func (in *MaxMessageTermination) DeepCopy() *MaxMessageTermination { - if in == nil { - return nil - } - out := new(MaxMessageTermination) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *McpServerTool) DeepCopyInto(out *McpServerTool) { *out = *in @@ -543,7 +596,7 @@ func (in *MemoryStatus) DeepCopyInto(out *MemoryStatus) { *out = *in if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) + *out = make([]metav1.Condition, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -654,6 +707,11 @@ func (in *ModelConfigSpec) DeepCopyInto(out *ModelConfigSpec) { *out = new(OllamaConfig) (*in).DeepCopyInto(*out) } + if in.Gemini != nil { + in, out := &in.Gemini, &out.Gemini + *out = new(GeminiConfig) + **out = **in + } if in.GeminiVertexAI != nil { in, out := &in.GeminiVertexAI, &out.GeminiVertexAI *out = new(GeminiVertexAIConfig) @@ -681,7 +739,7 @@ func (in *ModelConfigStatus) DeepCopyInto(out *ModelConfigStatus) { *out = *in if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) + *out = make([]metav1.Condition, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -765,53 +823,6 @@ func (in *OpenAIConfig) DeepCopy() *OpenAIConfig { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OrTermination) DeepCopyInto(out *OrTermination) { - *out = *in - if in.Conditions != nil { - in, out := &in.Conditions, &out.Conditions - *out = make([]OrTerminationCondition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OrTermination. -func (in *OrTermination) DeepCopy() *OrTermination { - if in == nil { - return nil - } - out := new(OrTermination) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *OrTerminationCondition) DeepCopyInto(out *OrTerminationCondition) { - *out = *in - if in.MaxMessageTermination != nil { - in, out := &in.MaxMessageTermination, &out.MaxMessageTermination - *out = new(MaxMessageTermination) - **out = **in - } - if in.TextMentionTermination != nil { - in, out := &in.TextMentionTermination, &out.TextMentionTermination - *out = new(TextMentionTermination) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OrTerminationCondition. -func (in *OrTerminationCondition) DeepCopy() *OrTerminationCondition { - if in == nil { - return nil - } - out := new(OrTerminationCondition) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *PineconeConfig) DeepCopyInto(out *PineconeConfig) { *out = *in @@ -832,21 +843,6 @@ func (in *PineconeConfig) DeepCopy() *PineconeConfig { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RoundRobinTeamConfig) DeepCopyInto(out *RoundRobinTeamConfig) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoundRobinTeamConfig. -func (in *RoundRobinTeamConfig) DeepCopy() *RoundRobinTeamConfig { - if in == nil { - return nil - } - out := new(RoundRobinTeamConfig) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SseMcpServerConfig) DeepCopyInto(out *SseMcpServerConfig) { *out = *in @@ -897,25 +893,15 @@ func (in *StdioMcpServerConfig) DeepCopy() *StdioMcpServerConfig { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StopMessageTermination) DeepCopyInto(out *StopMessageTermination) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StopMessageTermination. -func (in *StopMessageTermination) DeepCopy() *StopMessageTermination { - if in == nil { - return nil - } - out := new(StopMessageTermination) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *StreamableHttpServerConfig) DeepCopyInto(out *StreamableHttpServerConfig) { *out = *in in.HttpToolServerConfig.DeepCopyInto(&out.HttpToolServerConfig) + if in.TerminateOnClose != nil { + in, out := &in.TerminateOnClose, &out.TerminateOnClose + *out = new(bool) + **out = **in + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StreamableHttpServerConfig. @@ -928,188 +914,6 @@ func (in *StreamableHttpServerConfig) DeepCopy() *StreamableHttpServerConfig { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Team) DeepCopyInto(out *Team) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - in.Status.DeepCopyInto(&out.Status) -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Team. -func (in *Team) DeepCopy() *Team { - if in == nil { - return nil - } - out := new(Team) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *Team) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TeamList) DeepCopyInto(out *TeamList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]Team, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TeamList. -func (in *TeamList) DeepCopy() *TeamList { - if in == nil { - return nil - } - out := new(TeamList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *TeamList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TeamSpec) DeepCopyInto(out *TeamSpec) { - *out = *in - if in.Participants != nil { - in, out := &in.Participants, &out.Participants - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.RoundRobinTeamConfig != nil { - in, out := &in.RoundRobinTeamConfig, &out.RoundRobinTeamConfig - *out = new(RoundRobinTeamConfig) - **out = **in - } - in.TerminationCondition.DeepCopyInto(&out.TerminationCondition) -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TeamSpec. -func (in *TeamSpec) DeepCopy() *TeamSpec { - if in == nil { - return nil - } - out := new(TeamSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TeamStatus) DeepCopyInto(out *TeamStatus) { - *out = *in - if in.Conditions != nil { - in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TeamStatus. -func (in *TeamStatus) DeepCopy() *TeamStatus { - if in == nil { - return nil - } - out := new(TeamStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TerminationCondition) DeepCopyInto(out *TerminationCondition) { - *out = *in - if in.MaxMessageTermination != nil { - in, out := &in.MaxMessageTermination, &out.MaxMessageTermination - *out = new(MaxMessageTermination) - **out = **in - } - if in.TextMentionTermination != nil { - in, out := &in.TextMentionTermination, &out.TextMentionTermination - *out = new(TextMentionTermination) - **out = **in - } - if in.TextMessageTermination != nil { - in, out := &in.TextMessageTermination, &out.TextMessageTermination - *out = new(TextMessageTermination) - **out = **in - } - if in.FinalTextMessageTermination != nil { - in, out := &in.FinalTextMessageTermination, &out.FinalTextMessageTermination - *out = new(FinalTextMessageTermination) - **out = **in - } - if in.StopMessageTermination != nil { - in, out := &in.StopMessageTermination, &out.StopMessageTermination - *out = new(StopMessageTermination) - **out = **in - } - if in.OrTermination != nil { - in, out := &in.OrTermination, &out.OrTermination - *out = new(OrTermination) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminationCondition. -func (in *TerminationCondition) DeepCopy() *TerminationCondition { - if in == nil { - return nil - } - out := new(TerminationCondition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TextMentionTermination) DeepCopyInto(out *TextMentionTermination) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TextMentionTermination. -func (in *TextMentionTermination) DeepCopy() *TextMentionTermination { - if in == nil { - return nil - } - out := new(TextMentionTermination) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TextMessageTermination) DeepCopyInto(out *TextMessageTermination) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TextMessageTermination. -func (in *TextMessageTermination) DeepCopy() *TextMessageTermination { - if in == nil { - return nil - } - out := new(TextMessageTermination) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Tool) DeepCopyInto(out *Tool) { *out = *in @@ -1245,7 +1049,7 @@ func (in *ToolServerStatus) DeepCopyInto(out *ToolServerStatus) { *out = *in if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]v1.Condition, len(*in)) + *out = make([]metav1.Condition, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } diff --git a/go/controller/cmd/main.go b/go/controller/cmd/main.go index 294480bf0..3724a3ffb 100644 --- a/go/controller/cmd/main.go +++ b/go/controller/cmd/main.go @@ -20,28 +20,23 @@ import ( "context" "crypto/tls" "flag" - "fmt" "net/http" "net/http/pprof" "os" "path/filepath" "strings" - "time" "github.com/kagent-dev/kagent/go/internal/version" - "github.com/go-logr/logr" "k8s.io/apimachinery/pkg/types" - autogen "github.com/kagent-dev/kagent/go/controller/internal/autogen" "github.com/kagent-dev/kagent/go/controller/translator" "github.com/kagent-dev/kagent/go/internal/a2a" - a2a_manager "github.com/kagent-dev/kagent/go/internal/a2a/manager" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" "github.com/kagent-dev/kagent/go/internal/database" versionmetrics "github.com/kagent-dev/kagent/go/internal/metrics" a2a_reconciler "github.com/kagent-dev/kagent/go/controller/internal/a2a" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" "github.com/kagent-dev/kagent/go/internal/httpserver" common "github.com/kagent-dev/kagent/go/internal/utils" @@ -64,7 +59,7 @@ import ( metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" "sigs.k8s.io/controller-runtime/pkg/webhook" - agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "github.com/kagent-dev/kagent/go/controller/internal/controller" "github.com/kagent-dev/kagent/go/internal/goruntime" // +kubebuilder:scaffold:imports @@ -84,7 +79,7 @@ var ( func init() { utilruntime.Must(clientgoscheme.AddToScheme(scheme)) - utilruntime.Must(agentv1alpha1.AddToScheme(scheme)) + utilruntime.Must(v1alpha1.AddToScheme(scheme)) // +kubebuilder:scaffold:scheme ctrl.SetLogger(zap.New(zap.UseDevMode(true))) @@ -99,7 +94,6 @@ func main() { var probeAddr string var secureMetrics bool var enableHTTP2 bool - var autogenStudioBaseURL string var defaultModelConfig types.NamespacedName var tlsOpts []func(*tls.Config) var httpServerAddr string @@ -127,8 +121,6 @@ func main() { flag.BoolVar(&enableHTTP2, "enable-http2", false, "If set, HTTP/2 will be enabled for the metrics and webhook servers") - flag.StringVar(&autogenStudioBaseURL, "autogen-base-url", "http://127.0.0.1:8081/api", "The base url of the Autogen Studio server.") - flag.StringVar(&defaultModelConfig.Name, "default-model-config-name", "default-model-config", "The name of the default model config.") flag.StringVar(&defaultModelConfig.Namespace, "default-model-config-namespace", kagentNamespace, "The namespace of the default model config.") flag.StringVar(&httpServerAddr, "http-server-address", ":8083", "The address the HTTP server binds to.") @@ -297,87 +289,64 @@ func main() { dbClient := database.NewClient(dbManager) - autogenClient := autogen_client.New( - autogenStudioBaseURL, - ) - - // wait for autogen to become ready on port 8081 before starting the manager - if err := waitForAutogenReady(context.Background(), setupLog, autogenClient, time.Minute*5, time.Second*15); err != nil { - setupLog.Error(err, "failed to wait for autogen to become ready") - os.Exit(1) - } - kubeClient := mgr.GetClient() - apiTranslator := translator.NewAutogenApiTranslator( + apiTranslator := translator.NewAdkApiTranslator( kubeClient, defaultModelConfig, ) - a2aStorage := a2a_manager.NewStorage(dbClient) + a2aHandler := a2a.NewA2AHttpMux(httpserver.APIPathA2A) - a2aHandler := a2a.NewA2AHttpMux(httpserver.APIPathA2A, a2aStorage) - - a2aReconciler := a2a_reconciler.NewAutogenReconciler( - autogenClient, + a2aReconciler := a2a_reconciler.NewReconciler( a2aHandler, a2aBaseUrl+httpserver.APIPathA2A, - dbClient, ) - autogenReconciler := autogen.NewAutogenReconciler( + rcnclr := reconciler.NewKagentReconciler( apiTranslator, kubeClient, - autogenClient, dbClient, defaultModelConfig, a2aReconciler, ) - if err = (&controller.AutogenTeamReconciler{ + if err = (&controller.AgentReconciler{ Client: kubeClient, Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, + Reconciler: rcnclr, }).SetupWithManager(mgr); err != nil { - setupLog.Error(err, "unable to create controller", "controller", "AutogenTeam") + setupLog.Error(err, "unable to create controller", "controller", "Agent") os.Exit(1) } - if err = (&controller.AutogenAgentReconciler{ + if err = (&controller.ModelConfigReconciler{ Client: kubeClient, Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, + Reconciler: rcnclr, }).SetupWithManager(mgr); err != nil { - setupLog.Error(err, "unable to create controller", "controller", "AutogenAgent") + setupLog.Error(err, "unable to create controller", "controller", "ModelConfig") os.Exit(1) } - if err = (&controller.AutogenModelConfigReconciler{ + if err = (&controller.SecretReconciler{ Client: kubeClient, Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, + Reconciler: rcnclr, }).SetupWithManager(mgr); err != nil { - setupLog.Error(err, "unable to create controller", "controller", "AutogenModelConfig") - os.Exit(1) - } - if err = (&controller.AutogenSecretReconciler{ - Client: kubeClient, - Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, - }).SetupWithManager(mgr); err != nil { - setupLog.Error(err, "unable to create controller", "controller", "AutogenSecret") + setupLog.Error(err, "unable to create controller", "controller", "Secret") os.Exit(1) } if err = (&controller.ToolServerReconciler{ Client: mgr.GetClient(), Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, + Reconciler: rcnclr, }).SetupWithManager(mgr); err != nil { setupLog.Error(err, "unable to create controller", "controller", "ToolServer") os.Exit(1) } - if err = (&controller.AutogenMemoryReconciler{ + if err = (&controller.MemoryReconciler{ Client: kubeClient, Scheme: mgr.GetScheme(), - Reconciler: autogenReconciler, + Reconciler: rcnclr, }).SetupWithManager(mgr); err != nil { setupLog.Error(err, "unable to create controller", "controller", "Memory") os.Exit(1) @@ -415,7 +384,6 @@ func main() { httpServer, err := httpserver.NewHTTPServer(httpserver.ServerConfig{ BindAddr: httpServerAddr, - AutogenClient: autogenClient, KubeClient: kubeClient, A2AHandler: a2aHandler, WatchedNamespaces: watchNamespacesList, @@ -433,38 +401,6 @@ func main() { } } -func waitForAutogenReady( - ctx context.Context, - log logr.Logger, - client autogen_client.Client, - timeout, interval time.Duration, -) error { - log.Info("waiting for autogen to become ready") - return waitForReady(func() error { - version, err := client.GetVersion(ctx) - if err != nil { - log.Error(err, "autogen is not ready") - return err - } - log.Info("autogen is ready", "version", version) - return nil - }, timeout, interval) -} - -func waitForReady(f func() error, timeout, interval time.Duration) error { - deadline := time.Now().Add(timeout) - for { - if time.Now().After(deadline) { - return fmt.Errorf("timed out after %v", timeout) - } - if err := f(); err == nil { - return nil - } - - time.Sleep(interval) - } -} - // configureNamespaceWatching sets up the controller manager to watch specific namespaces // based on the provided configuration. It returns the list of namespaces being watched, // or nil if watching all namespaces. diff --git a/go/controller/internal/a2a/a2a_reconciler.go b/go/controller/internal/a2a/a2a_reconciler.go index ea7541dbc..310876ab2 100644 --- a/go/controller/internal/a2a/a2a_reconciler.go +++ b/go/controller/internal/a2a/a2a_reconciler.go @@ -2,13 +2,14 @@ package a2a import ( "context" + "fmt" "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "github.com/kagent-dev/kagent/go/internal/a2a" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/database" + "github.com/kagent-dev/kagent/go/internal/adk" common "github.com/kagent-dev/kagent/go/internal/utils" ctrl "sigs.k8s.io/controller-runtime" + a2aclient "trpc.group/trpc-go/trpc-a2a-go/client" ) var ( @@ -16,59 +17,55 @@ var ( ) type A2AReconciler interface { - ReconcileAutogenAgent( + ReconcileAgent( ctx context.Context, agent *v1alpha1.Agent, - autogenTeam *database.Agent, + adkConfig *adk.AgentConfig, ) error - ReconcileAutogenAgentDeletion( + ReconcileAgentDeletion( agentRef string, ) } type a2aReconciler struct { - a2aTranslator AutogenA2ATranslator - autogenClient autogen_client.Client - a2aHandler a2a.A2AHandlerMux + a2aHandler a2a.A2AHandlerMux + a2aBaseUrl string } -func NewAutogenReconciler( - autogenClient autogen_client.Client, +func NewReconciler( a2aHandler a2a.A2AHandlerMux, a2aBaseUrl string, - dbService database.Client, ) A2AReconciler { return &a2aReconciler{ - a2aTranslator: NewAutogenA2ATranslator(a2aBaseUrl, autogenClient, dbService), - autogenClient: autogenClient, - a2aHandler: a2aHandler, + a2aHandler: a2aHandler, + a2aBaseUrl: a2aBaseUrl, } } -func (a *a2aReconciler) ReconcileAutogenAgent( +func (a *a2aReconciler) ReconcileAgent( ctx context.Context, agent *v1alpha1.Agent, - autogenTeam *database.Agent, + adkConfig *adk.AgentConfig, ) error { - params, err := a.a2aTranslator.TranslateHandlerForAgent(ctx, agent, autogenTeam) + cardCopy := adkConfig.AgentCard + // Modify card for kagent proxy + agentRef := common.GetObjectRef(agent) + cardCopy.URL = fmt.Sprintf("%s/%s/", a.a2aBaseUrl, agentRef) + + client, err := a2aclient.NewA2AClient(adkConfig.AgentCard.URL) if err != nil { return err } - agentRef := common.GetObjectRef(agent) - if params == nil { - reconcileLog.Info("No a2a handler found for agent, a2a will be disabled", "agent", agentRef) - return nil - } - return a.a2aHandler.SetAgentHandler( agentRef, - params, + client, + cardCopy, ) } -func (a *a2aReconciler) ReconcileAutogenAgentDeletion( +func (a *a2aReconciler) ReconcileAgentDeletion( agentRef string, ) { a.a2aHandler.RemoveAgentHandler(agentRef) diff --git a/go/controller/internal/a2a/autogen_a2a_translator.go b/go/controller/internal/a2a/autogen_a2a_translator.go deleted file mode 100644 index d6c585450..000000000 --- a/go/controller/internal/a2a/autogen_a2a_translator.go +++ /dev/null @@ -1,270 +0,0 @@ -package a2a - -import ( - "context" - "errors" - "fmt" - "log" - - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - "github.com/kagent-dev/kagent/go/internal/a2a" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/database" - "github.com/kagent-dev/kagent/go/internal/utils" - common "github.com/kagent-dev/kagent/go/internal/utils" - "gorm.io/gorm" - "k8s.io/utils/ptr" - "trpc.group/trpc-go/trpc-a2a-go/server" -) - -// translates A2A Handlers from autogen agents/teams -type AutogenA2ATranslator interface { - TranslateHandlerForAgent( - ctx context.Context, - agent *v1alpha1.Agent, - autogenTeam *database.Agent, - ) (*a2a.A2AHandlerParams, error) -} - -type autogenA2ATranslator struct { - a2aBaseUrl string - autogenClient autogen_client.Client - dbService database.Client -} - -var _ AutogenA2ATranslator = &autogenA2ATranslator{} - -func NewAutogenA2ATranslator( - a2aBaseUrl string, - autogenClient autogen_client.Client, - dbService database.Client, -) AutogenA2ATranslator { - return &autogenA2ATranslator{ - a2aBaseUrl: a2aBaseUrl, - autogenClient: autogenClient, - dbService: dbService, - } -} - -func (a *autogenA2ATranslator) TranslateHandlerForAgent( - ctx context.Context, - agent *v1alpha1.Agent, - autogenTeam *database.Agent, -) (*a2a.A2AHandlerParams, error) { - card, err := a.translateCardForAgent(agent) - if err != nil { - return nil, err - } - if card == nil { - return nil, nil - } - - handler, err := a.makeHandlerForTeam(autogenTeam, a.dbService) - if err != nil { - return nil, err - } - - return &a2a.A2AHandlerParams{ - AgentCard: *card, - TaskHandler: handler, - }, nil -} - -func (a *autogenA2ATranslator) translateCardForAgent( - agent *v1alpha1.Agent, -) (*server.AgentCard, error) { - a2AConfig := agent.Spec.A2AConfig - if a2AConfig == nil { - return nil, nil - } - - agentRef := common.GetObjectRef(agent) - - skills := a2AConfig.Skills - if len(skills) == 0 { - return nil, fmt.Errorf("no skills found for agent %s", agentRef) - } - - var convertedSkills []server.AgentSkill - for _, skill := range skills { - convertedSkills = append(convertedSkills, server.AgentSkill(skill)) - } - - return &server.AgentCard{ - Name: agentRef, - Description: agent.Spec.Description, - URL: fmt.Sprintf("%s/%s/", a.a2aBaseUrl, agentRef), - //Provider: nil, - Version: fmt.Sprintf("%v", agent.Generation), - //DocumentationURL: nil, - //Authentication: nil, - Capabilities: server.AgentCapabilities{ - Streaming: ptr.To(true), - }, - DefaultInputModes: []string{"text"}, - DefaultOutputModes: []string{"text"}, - Skills: convertedSkills, - }, nil -} - -func (a *autogenA2ATranslator) makeHandlerForTeam( - autogenTeam *database.Agent, - dbService database.Client, -) (a2a.MessageHandler, error) { - return &taskHandler{ - team: autogenTeam, - client: a.autogenClient, - dbService: dbService, - }, nil -} - -type taskHandler struct { - team *database.Agent - client autogen_client.Client - dbService database.Client -} - -func (t *taskHandler) HandleMessage(ctx context.Context, task string, contextID *string) ([]autogen_client.Event, error) { - var taskResult *autogen_client.TaskResult - if contextID != nil && *contextID != "" { - log.Printf("Handling message for session %s", *contextID) - session, err := t.getOrCreateSession(ctx, *contextID) - if err != nil { - return nil, fmt.Errorf("failed to get session: %w", err) - } - - messages, err := t.prepareMessages(ctx, session) - if err != nil { - return nil, fmt.Errorf("failed to prepare messages: %w", err) - } - - // Debug logging - log.Printf("DEBUG: About to call InvokeTask with Messages - len: %d, nil: %v", len(messages), messages == nil) - - resp, err := t.client.InvokeTask(ctx, &autogen_client.InvokeTaskRequest{ - Task: task, - TeamConfig: &t.team.Component, - Messages: messages, - }) - if err != nil { - return nil, fmt.Errorf("failed to invoke task: %w", err) - } - taskResult = &resp.TaskResult - } else { - - resp, err := t.client.InvokeTask(ctx, &autogen_client.InvokeTaskRequest{ - Task: task, - TeamConfig: &t.team.Component, - }) - if err != nil { - return nil, fmt.Errorf("failed to invoke task: %w", err) - } - taskResult = &resp.TaskResult - } - - return taskResult.Messages, nil -} - -// getOrCreateSession gets a session from the database or creates a new one if it doesn't exist -func (t *taskHandler) getOrCreateSession(ctx context.Context, contextID string) (*database.Session, error) { - session, err := t.dbService.GetSession(contextID, common.GetGlobalUserID()) - if err != nil { - if errors.Is(err, gorm.ErrRecordNotFound) { - session = &database.Session{ - ID: contextID, - UserID: common.GetGlobalUserID(), - AgentID: &t.team.ID, - Name: contextID, - } - err := t.dbService.CreateSession(session) - if err != nil { - return nil, fmt.Errorf("failed to create session: %w", err) - } - } else { - return nil, fmt.Errorf("failed to get session: %w", err) - } - } - return session, nil -} - -func (t *taskHandler) prepareMessages(ctx context.Context, session *database.Session) ([]autogen_client.Event, error) { - messages, err := t.dbService.ListMessagesForSession(session.ID, common.GetGlobalUserID()) - if err != nil { - return nil, fmt.Errorf("failed to get messages for session: %w", err) - } - - log.Printf("Retrieved %d messages for session %s", len(messages), session.ID) - - parsedMessages, err := database.ParseMessages(messages) - if err != nil { - return nil, fmt.Errorf("failed to parse messages: %w", err) - } - - autogenEvents, err := utils.ConvertMessagesToAutogenEvents(parsedMessages) - if err != nil { - return nil, fmt.Errorf("failed to convert messages to autogen events: %w", err) - } - return autogenEvents, nil -} - -func (t *taskHandler) HandleMessageStream(ctx context.Context, task string, contextID *string) (<-chan autogen_client.Event, error) { - if contextID != nil && *contextID != "" { - session, err := t.getOrCreateSession(ctx, *contextID) - if err != nil { - return nil, fmt.Errorf("failed to get session: %w", err) - } - - messages, err := t.prepareMessages(ctx, session) - if err != nil { - return nil, fmt.Errorf("failed to prepare messages: %w", err) - } - - stream, err := t.client.InvokeTaskStream(ctx, &autogen_client.InvokeTaskRequest{ - Task: task, - TeamConfig: &t.team.Component, - Messages: messages, - }) - if err != nil { - return nil, fmt.Errorf("failed to invoke task: %w", err) - } - - events := make(chan autogen_client.Event) - go func() { - defer close(events) - for event := range stream { - parsedEvent, err := autogen_client.ParseEvent(event.Data) - if err != nil { - log.Printf("failed to parse event: %v", err) - continue - } - events <- parsedEvent - } - }() - - return events, nil - } else { - - stream, err := t.client.InvokeTaskStream(ctx, &autogen_client.InvokeTaskRequest{ - Task: task, - TeamConfig: &t.team.Component, - }) - if err != nil { - return nil, fmt.Errorf("failed to invoke task: %w", err) - } - - events := make(chan autogen_client.Event, 10) - go func() { - defer close(events) - for event := range stream { - parsedEvent, err := autogen_client.ParseEvent(event.Data) - if err != nil { - log.Printf("failed to parse event: %v", err) - continue - } - events <- parsedEvent - } - }() - - return events, nil - } -} diff --git a/go/controller/internal/a2a/autogen_a2a_translator_test.go b/go/controller/internal/a2a/autogen_a2a_translator_test.go deleted file mode 100644 index e901128af..000000000 --- a/go/controller/internal/a2a/autogen_a2a_translator_test.go +++ /dev/null @@ -1,495 +0,0 @@ -package a2a_test - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - "github.com/kagent-dev/kagent/go/controller/internal/a2a" - "github.com/kagent-dev/kagent/go/internal/autogen/api" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/autogen/client/fake" - "github.com/kagent-dev/kagent/go/internal/database" - fake_db "github.com/kagent-dev/kagent/go/internal/database/fake" - common "github.com/kagent-dev/kagent/go/internal/utils" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/utils/ptr" -) - -// Helper function to create a mock autogen team with proper Component -func createMockAutogenTeam(id int, label string) *database.Agent { - return &database.Agent{ - Component: api.Component{ - Provider: "test.provider", - ComponentType: "team", - Version: 1, - Description: "Test team component", - Label: label, - Config: map[string]interface{}{}, - }, - } -} - -func TestNewAutogenA2ATranslator(t *testing.T) { - mockClient := fake.NewMockAutogenClient() - baseURL := "http://localhost:8083" - dbService := fake_db.NewClient() - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - assert.NotNil(t, translator) - assert.Implements(t, (*a2a.AutogenA2ATranslator)(nil), translator) -} - -func TestTranslateHandlerForAgent(t *testing.T) { - ctx := context.Background() - baseURL := "http://localhost:8083" - - t.Run("should return handler params for valid agent with A2A config", func(t *testing.T) { - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - { - ID: "skill1", - Name: "Test Skill", - Description: ptr.To("A test skill"), - }, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - - require.NoError(t, err) - require.NotNil(t, result) - assert.Equal(t, "test-namespace/test-agent", result.AgentCard.Name) - assert.Equal(t, "Test agent", result.AgentCard.Description) - assert.Equal(t, "http://localhost:8083/test-namespace/test-agent/", result.AgentCard.URL) - assert.Equal(t, "1", result.AgentCard.Version) - assert.Equal(t, []string{"text"}, result.AgentCard.DefaultInputModes) - assert.Equal(t, []string{"text"}, result.AgentCard.DefaultOutputModes) - assert.Len(t, result.AgentCard.Skills, 1) - assert.Equal(t, "skill1", result.AgentCard.Skills[0].ID) - assert.NotNil(t, result.TaskHandler) - }) - - t.Run("should return nil for agent without A2A config", func(t *testing.T) { - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: nil, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - - require.NoError(t, err) - assert.Nil(t, result) - }) - - t.Run("should return error for agent with A2A config but no skills", func(t *testing.T) { - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{}, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - - require.Error(t, err) - assert.Contains(t, err.Error(), "no skills found for agent test-namespace/test-agent") - assert.Nil(t, result) - }) -} - -func TestTaskHandlerWithSession(t *testing.T) { - ctx := context.Background() - baseURL := "http://localhost:8083" - - t.Run("should use existing session when session ID provided", func(t *testing.T) { - sessionID := "test-session" - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - - // Create a session in the in-memory client - session := &database.Session{ - ID: sessionID, - UserID: "admin@kagent.dev", - AgentID: ptr.To(uint(1)), - } - err := dbService.CreateSession(session) - require.NoError(t, err) - assert.Equal(t, sessionID, session.ID) - assert.Equal(t, "test-session", session.ID) - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test the handler - events, err := result.TaskHandler.HandleMessage(ctx, task, ptr.To(sessionID)) - require.NoError(t, err) - require.Len(t, events, 1) - - // Check that we got a TextMessage with the expected content - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - assert.Equal(t, "assistant", textMsg.Source) - }) - - t.Run("should create new session when session not found", func(t *testing.T) { - sessionID := "new-session" - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - // Don't create any session - this will trigger the NotFound behavior - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test the handler - this should create a new session and then invoke it - events, err := result.TaskHandler.HandleMessage(ctx, task, ptr.To(sessionID)) - require.NoError(t, err) - require.Len(t, events, 1) - - // Check that we got a TextMessage with the expected content - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - - // Verify the session was created - createdSession, err := dbService.GetSession(sessionID, "admin@kagent.dev") - require.NoError(t, err) - assert.Equal(t, sessionID, createdSession.ID) - }) -} - -func TestTaskHandlerWithoutSession(t *testing.T) { - ctx := context.Background() - baseURL := "http://localhost:8083" - - t.Run("should invoke task directly when no session ID provided", func(t *testing.T) { - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test the handler without session ID - events, err := result.TaskHandler.HandleMessage(ctx, task, nil) - require.NoError(t, err) - require.Len(t, events, 1) - - // Check that we got a TextMessage with the expected content - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - assert.Equal(t, "assistant", textMsg.Source) - }) - - t.Run("should invoke task directly when empty session ID provided", func(t *testing.T) { - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test the handler with empty session ID - events, err := result.TaskHandler.HandleMessage(ctx, task, nil) - require.NoError(t, err) - require.Len(t, events, 1) - - // Check that we got a TextMessage with the expected content - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - }) -} - -func TestTaskHandlerMessageContentExtraction(t *testing.T) { - ctx := context.Background() - baseURL := "http://localhost:8083" - - t.Run("should extract string content from TextMessage events", func(t *testing.T) { - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test the handler - events, err := result.TaskHandler.HandleMessage(ctx, task, nil) - require.NoError(t, err) - require.Len(t, events, 1) - - // Test that GetLastStringMessage works correctly - lastString := autogen_client.GetLastStringMessage(events) - assert.Equal(t, "Session task completed: test task", lastString) - }) - - t.Run("should handle empty event list", func(t *testing.T) { - // Test that GetLastStringMessage handles empty list gracefully - lastString := autogen_client.GetLastStringMessage([]autogen_client.Event{}) - assert.Equal(t, "", lastString) - }) -} - -func TestTaskHandlerStreamingSupport(t *testing.T) { - ctx := context.Background() - baseURL := "http://localhost:8083" - - t.Run("should support streaming without session", func(t *testing.T) { - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test streaming - eventChan, err := result.TaskHandler.HandleMessageStream(ctx, task, nil) - require.NoError(t, err) - require.NotNil(t, eventChan) - - // Collect events from channel - var events []autogen_client.Event - for event := range eventChan { - events = append(events, event) - } - - require.Len(t, events, 1) - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - }) - - t.Run("should support streaming with session", func(t *testing.T) { - sessionID := "test-session" - task := "test task" - - mockClient := fake.NewMockAutogenClient() - dbService := fake_db.NewClient() - - // Create a session - err := dbService.CreateSession(&database.Session{ - ID: sessionID, - UserID: "admin@kagent.dev", - }) - require.NoError(t, err) - - translator := a2a.NewAutogenA2ATranslator(baseURL, mockClient, dbService) - - agent := &v1alpha1.Agent{ - ObjectMeta: metav1.ObjectMeta{ - Name: "test-agent", - Namespace: "test-namespace", - Generation: 1, - }, - Spec: v1alpha1.AgentSpec{ - Description: "Test agent", - A2AConfig: &v1alpha1.A2AConfig{ - Skills: []v1alpha1.AgentSkill{ - {ID: "skill1", Name: "Test Skill"}, - }, - }, - }, - } - - autogenTeam := createMockAutogenTeam(123, common.GetObjectRef(agent)) - - result, err := translator.TranslateHandlerForAgent(ctx, agent, autogenTeam) - require.NoError(t, err) - require.NotNil(t, result) - - // Test streaming with session - eventChan, err := result.TaskHandler.HandleMessageStream(ctx, task, ptr.To(sessionID)) - require.NoError(t, err) - require.NotNil(t, eventChan) - - // Collect events from channel - var events []autogen_client.Event - for event := range eventChan { - events = append(events, event) - } - - require.Len(t, events, 1) - textMsg, ok := events[0].(*autogen_client.TextMessage) - require.True(t, ok, "Expected TextMessage event") - assert.Equal(t, "Session task completed: test task", textMsg.Content) - }) -} diff --git a/go/controller/internal/autogen/autogen_reconciler.go b/go/controller/internal/autogen/autogen_reconciler.go deleted file mode 100644 index 5c36695d9..000000000 --- a/go/controller/internal/autogen/autogen_reconciler.go +++ /dev/null @@ -1,946 +0,0 @@ -package autogen - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "reflect" - "strings" - "sync" - - "github.com/hashicorp/go-multierror" - "gorm.io/gorm" - k8s_errors "k8s.io/apimachinery/pkg/api/errors" - "k8s.io/apimachinery/pkg/api/meta" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - "github.com/kagent-dev/kagent/go/controller/internal/a2a" - "github.com/kagent-dev/kagent/go/controller/translator" - "github.com/kagent-dev/kagent/go/internal/autogen/api" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/database" - common "github.com/kagent-dev/kagent/go/internal/utils" - "k8s.io/apimachinery/pkg/types" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" -) - -var ( - reconcileLog = ctrl.Log.WithName("reconciler") -) - -type AutogenReconciler interface { - ReconcileAutogenAgent(ctx context.Context, req ctrl.Request) error - ReconcileAutogenModelConfig(ctx context.Context, req ctrl.Request) error - ReconcileAutogenTeam(ctx context.Context, req ctrl.Request) error - ReconcileAutogenApiKeySecret(ctx context.Context, req ctrl.Request) error - ReconcileAutogenToolServer(ctx context.Context, req ctrl.Request) error - ReconcileAutogenMemory(ctx context.Context, req ctrl.Request) error -} - -type autogenReconciler struct { - autogenTranslator translator.ApiTranslator - a2aReconciler a2a.A2AReconciler - - kube client.Client - autogenClient autogen_client.Client - dbClient database.Client - - defaultModelConfig types.NamespacedName - upsertLock sync.Mutex -} - -func NewAutogenReconciler( - translator translator.ApiTranslator, - kube client.Client, - autogenClient autogen_client.Client, - dbClient database.Client, - defaultModelConfig types.NamespacedName, - a2aReconciler a2a.A2AReconciler, -) AutogenReconciler { - return &autogenReconciler{ - autogenTranslator: translator, - kube: kube, - autogenClient: autogenClient, - dbClient: dbClient, - defaultModelConfig: defaultModelConfig, - a2aReconciler: a2aReconciler, - } -} - -func (a *autogenReconciler) ReconcileAutogenAgent(ctx context.Context, req ctrl.Request) error { - // reconcile the agent team itself - - // TODO(sbx0r): missing finalizer logic - - agent := &v1alpha1.Agent{} - if err := a.kube.Get(ctx, req.NamespacedName, agent); err != nil { - if k8s_errors.IsNotFound(err) { - return a.handleAgentDeletion(req) - } - - return fmt.Errorf("failed to get agent %s/%s: %w", req.Namespace, req.Name, err) - } - - return a.handleExistingAgent(ctx, agent, req) -} - -func (a *autogenReconciler) handleAgentDeletion(req ctrl.Request) error { - // TODO(sbx0r): handle deletion of agents with multiple teams assignment - - // agents, err := a.findTeamsUsingAgent(ctx, req) - // if err != nil { - // return fmt.Errorf("failed to find teams for agent %s/%s: %v", req.Namespace, req.Name, err) - // } - // if len(agents) > 1 { - // reconcileLog.Info("agent with multiple dependencies was deleted", - // "namespace", req.Namespace, - // "name", req.Name, - // "agents", agents) - // } - - // remove a2a handler if it exists - a.a2aReconciler.ReconcileAutogenAgentDeletion(req.NamespacedName.String()) - - if err := a.dbClient.DeleteAgent(req.NamespacedName.String()); err != nil { - return fmt.Errorf("failed to delete agent %s: %w", - req.NamespacedName.String(), err) - } - - reconcileLog.Info("Agent was deleted", "namespace", req.Namespace, "name", req.Name) - return nil -} - -func (a *autogenReconciler) handleExistingAgent(ctx context.Context, agent *v1alpha1.Agent, req ctrl.Request) error { - isNewAgent := agent.Status.ObservedGeneration == 0 - isUpdatedAgent := agent.Generation > agent.Status.ObservedGeneration - - if isNewAgent { - reconcileLog.Info("New agent was created", - "namespace", req.Namespace, - "name", req.Name, - "generation", agent.Generation) - } else if isUpdatedAgent { - reconcileLog.Info("Agent was updated", - "namespace", req.Namespace, - "name", req.Name, - "oldGeneration", agent.Status.ObservedGeneration, - "newGeneration", agent.Generation) - } - - err := a.reconcileAgents(ctx, agent) - if err != nil { - return fmt.Errorf("failed to reconcile agent %s/%s: %w", - req.Namespace, req.Name, err) - } - - teams, err := a.findTeamsUsingAgent(ctx, req) - if err != nil { - return fmt.Errorf("failed to find teams for agent %s/%s: %w", - req.Namespace, req.Name, err) - } - - return a.reconcileTeams(ctx, teams...) -} - -func (a *autogenReconciler) reconcileAgentStatus(ctx context.Context, agent *v1alpha1.Agent, err error) error { - var ( - status metav1.ConditionStatus - message string - reason string - ) - if err != nil { - status = metav1.ConditionFalse - message = err.Error() - reason = "AgentReconcileFailed" - reconcileLog.Error(err, "failed to reconcile agent", "agent", common.GetObjectRef(agent)) - } else { - status = metav1.ConditionTrue - reason = "AgentReconciled" - } - - conditionChanged := meta.SetStatusCondition(&agent.Status.Conditions, metav1.Condition{ - Type: v1alpha1.AgentConditionTypeAccepted, - Status: status, - LastTransitionTime: metav1.Now(), - Reason: reason, - Message: message, - }) - - // update the status if it has changed or the generation has changed - if conditionChanged || agent.Status.ObservedGeneration != agent.Generation { - agent.Status.ObservedGeneration = agent.Generation - if err := a.kube.Status().Update(ctx, agent); err != nil { - return fmt.Errorf("failed to update agent status: %v", err) - } - } - return nil -} - -func (a *autogenReconciler) ReconcileAutogenModelConfig(ctx context.Context, req ctrl.Request) error { - modelConfig := &v1alpha1.ModelConfig{} - if err := a.kube.Get(ctx, req.NamespacedName, modelConfig); err != nil { - return fmt.Errorf("failed to get model %s: %v", req.Name, err) - } - - agents, err := a.findAgentsUsingModel(ctx, req) - if err != nil { - return fmt.Errorf("failed to find agents for model %s: %v", req.Name, err) - } - - if err := a.reconcileAgents(ctx, agents...); err != nil { - return fmt.Errorf("failed to reconcile agents for model %s: %v", req.Name, err) - } - - teams, err := a.findTeamsUsingModel(ctx, req) - if err != nil { - return fmt.Errorf("failed to find teams for model %s: %v", req.Name, err) - } - - return a.reconcileModelConfigStatus( - ctx, - modelConfig, - a.reconcileTeams(ctx, teams...), - ) -} - -func (a *autogenReconciler) reconcileModelConfigStatus(ctx context.Context, modelConfig *v1alpha1.ModelConfig, err error) error { - var ( - status metav1.ConditionStatus - message string - reason string - ) - if err != nil { - status = metav1.ConditionFalse - message = err.Error() - reason = "ModelConfigReconcileFailed" - reconcileLog.Error(err, "failed to reconcile model config", "modelConfig", common.GetObjectRef(modelConfig)) - } else { - status = metav1.ConditionTrue - reason = "ModelConfigReconciled" - } - - conditionChanged := meta.SetStatusCondition(&modelConfig.Status.Conditions, metav1.Condition{ - Type: v1alpha1.ModelConfigConditionTypeAccepted, - Status: status, - LastTransitionTime: metav1.Now(), - Reason: reason, - Message: message, - }) - - // update the status if it has changed or the generation has changed - if conditionChanged || modelConfig.Status.ObservedGeneration != modelConfig.Generation { - modelConfig.Status.ObservedGeneration = modelConfig.Generation - if err := a.kube.Status().Update(ctx, modelConfig); err != nil { - return fmt.Errorf("failed to update model config status: %v", err) - } - } - return nil -} - -func (a *autogenReconciler) ReconcileAutogenTeam(ctx context.Context, req ctrl.Request) error { - team := &v1alpha1.Team{} - if err := a.kube.Get(ctx, req.NamespacedName, team); err != nil { - return fmt.Errorf("failed to get team %s: %v", req.Name, err) - } - - return a.reconcileTeamStatus(ctx, team, a.reconcileTeams(ctx, team)) -} - -func (a *autogenReconciler) reconcileTeamStatus(ctx context.Context, team *v1alpha1.Team, err error) error { - var ( - status metav1.ConditionStatus - message string - reason string - ) - if err != nil { - status = metav1.ConditionFalse - message = err.Error() - reconcileLog.Error(err, "failed to reconcile team", "team", common.GetObjectRef(team)) - reason = "TeamReconcileFailed" - } else { - status = metav1.ConditionTrue - reason = "TeamReconciled" - } - - conditionChanged := meta.SetStatusCondition(&team.Status.Conditions, metav1.Condition{ - Type: v1alpha1.TeamConditionTypeAccepted, - Status: status, - LastTransitionTime: metav1.Now(), - Reason: reason, - Message: message, - }) - - if conditionChanged || team.Status.ObservedGeneration != team.Generation { - team.Status.ObservedGeneration = team.Generation - if err := a.kube.Status().Update(ctx, team); err != nil { - return fmt.Errorf("failed to update team status: %v", err) - } - } - - return nil -} - -func (a *autogenReconciler) ReconcileAutogenApiKeySecret(ctx context.Context, req ctrl.Request) error { - agents, err := a.findAgentsUsingApiKeySecret(ctx, req) - if err != nil { - return fmt.Errorf("failed to find agents for secret %s: %v", req.Name, err) - } - - if err := a.reconcileAgents(ctx, agents...); err != nil { - return fmt.Errorf("failed to reconcile agents for secret %s: %v", req.Name, err) - } - - teams, err := a.findTeamsUsingApiKeySecret(ctx, req) - if err != nil { - return fmt.Errorf("failed to find teams for api key secret %s: %v", req.Name, err) - } - - return a.reconcileTeams(ctx, teams...) -} - -func (a *autogenReconciler) ReconcileAutogenToolServer(ctx context.Context, req ctrl.Request) error { - // reconcile the agent team itself - toolServer := &v1alpha1.ToolServer{} - if err := a.kube.Get(ctx, req.NamespacedName, toolServer); err != nil { - // if the tool server is not found, we can ignore it - if k8s_errors.IsNotFound(err) { - return nil - } - return fmt.Errorf("failed to get tool server %s: %v", req.Name, err) - } - - reconcileErr := a.reconcileToolServer(ctx, toolServer) - - // update the tool server status as the agents depend on it - if err := a.reconcileToolServerStatus( - ctx, - toolServer, - common.GetObjectRef(toolServer), - reconcileErr, - ); err != nil { - return fmt.Errorf("failed to reconcile tool server %s: %v", req.Name, err) - } - - // find and reconcile all agents which use this tool server - agents, err := a.findAgentsUsingToolServer(ctx, req) - if err != nil { - return fmt.Errorf("failed to find teams for agent %s: %v", req.Name, err) - } - - if err := a.reconcileAgents(ctx, agents...); err != nil { - return fmt.Errorf("failed to reconcile agents for tool server %s, see status for more details", req.Name) - } - - return nil -} - -func (a *autogenReconciler) reconcileToolServerStatus( - ctx context.Context, - toolServer *v1alpha1.ToolServer, - serverRef string, - err error, -) error { - discoveredTools, discoveryErr := a.getDiscoveredMCPTools(ctx, serverRef) - if discoveryErr != nil { - err = multierror.Append(err, discoveryErr) - } - - var ( - status metav1.ConditionStatus - message string - reason string - ) - if err != nil { - status = metav1.ConditionFalse - message = err.Error() - reason = "AgentReconcileFailed" - reconcileLog.Error(err, "failed to reconcile agent", "tool_server", common.GetObjectRef(toolServer)) - } else { - status = metav1.ConditionTrue - reason = "AgentReconciled" - } - conditionChanged := meta.SetStatusCondition(&toolServer.Status.Conditions, metav1.Condition{ - Type: v1alpha1.AgentConditionTypeAccepted, - Status: status, - LastTransitionTime: metav1.Now(), - Reason: reason, - Message: message, - }) - - // only update if the status has changed to prevent looping the reconciler - if !conditionChanged && - toolServer.Status.ObservedGeneration == toolServer.Generation && - reflect.DeepEqual(toolServer.Status.DiscoveredTools, discoveredTools) { - return nil - } - - toolServer.Status.ObservedGeneration = toolServer.Generation - toolServer.Status.DiscoveredTools = discoveredTools - - if err := a.kube.Status().Update(ctx, toolServer); err != nil { - return fmt.Errorf("failed to update agent status: %v", err) - } - - return nil -} - -func (a *autogenReconciler) ReconcileAutogenMemory(ctx context.Context, req ctrl.Request) error { - memory := &v1alpha1.Memory{} - if err := a.kube.Get(ctx, req.NamespacedName, memory); err != nil { - if k8s_errors.IsNotFound(err) { - return a.handleMemoryDeletion(req) - } - - return fmt.Errorf("failed to get memory %s: %v", req.Name, err) - } - - agents, err := a.findAgentsUsingMemory(ctx, req) - if err != nil { - return fmt.Errorf("failed to find agents using memory %s: %v", req.Name, err) - } - - return a.reconcileMemoryStatus(ctx, memory, a.reconcileAgents(ctx, agents...)) -} - -func (a *autogenReconciler) handleMemoryDeletion(req ctrl.Request) error { - - // TODO(sbx0r): implement memory deletion - - return nil -} - -func (a *autogenReconciler) reconcileMemoryStatus(ctx context.Context, memory *v1alpha1.Memory, err error) error { - var ( - status metav1.ConditionStatus - message string - reason string - ) - if err != nil { - status = metav1.ConditionFalse - message = err.Error() - reason = "MemoryReconcileFailed" - reconcileLog.Error(err, "failed to reconcile memory", "memory", common.GetObjectRef(memory)) - } else { - status = metav1.ConditionTrue - reason = "MemoryReconciled" - } - - conditionChanged := meta.SetStatusCondition(&memory.Status.Conditions, metav1.Condition{ - Type: v1alpha1.MemoryConditionTypeAccepted, - Status: status, - LastTransitionTime: metav1.Now(), - Reason: reason, - Message: message, - }) - - if conditionChanged || memory.Status.ObservedGeneration != memory.Generation { - memory.Status.ObservedGeneration = memory.Generation - if err := a.kube.Status().Update(ctx, memory); err != nil { - return fmt.Errorf("failed to update memory status: %v", err) - } - } - return nil -} - -func (a *autogenReconciler) reconcileTeams(ctx context.Context, teams ...*v1alpha1.Team) error { - errs := map[types.NamespacedName]error{} - for _, team := range teams { - autogenTeam, err := a.autogenTranslator.TranslateGroupChatForTeam(ctx, team) - if err != nil { - errs[types.NamespacedName{Name: team.Name, Namespace: team.Namespace}] = fmt.Errorf( - "failed to translate team %s/%s: %v", team.Namespace, team.Name, err) - continue - } - if err := a.upsertAgent(ctx, autogenTeam); err != nil { - errs[types.NamespacedName{Name: team.Name, Namespace: team.Namespace}] = fmt.Errorf( - "failed to upsert team %s/%s: %v", team.Namespace, team.Name, err) - continue - } - } - - if len(errs) > 0 { - return fmt.Errorf("failed to reconcile teams: %v", errs) - } - - return nil -} - -func (a *autogenReconciler) reconcileAgents(ctx context.Context, agents ...*v1alpha1.Agent) error { - var multiErr *multierror.Error - for _, agent := range agents { - reconcileErr := a.reconcileAgent(ctx, agent) - // Append error but still try to reconcile the agent status - if reconcileErr != nil { - multiErr = multierror.Append(multiErr, fmt.Errorf( - "failed to reconcile agent %s/%s: %v", agent.Namespace, agent.Name, reconcileErr)) - } - if err := a.reconcileAgentStatus(ctx, agent, reconcileErr); err != nil { - multiErr = multierror.Append(multiErr, fmt.Errorf( - "failed to reconcile agent status %s/%s: %v", agent.Namespace, agent.Name, err)) - } - } - - return multiErr.ErrorOrNil() -} - -func (a *autogenReconciler) reconcileAgent(ctx context.Context, agent *v1alpha1.Agent) error { - autogenTeam, err := a.autogenTranslator.TranslateGroupChatForAgent(ctx, agent) - if err != nil { - return fmt.Errorf("failed to translate agent %s/%s: %v", agent.Namespace, agent.Name, err) - } - if err := a.reconcileA2A(ctx, autogenTeam, agent); err != nil { - return fmt.Errorf("failed to reconcile A2A for agent %s/%s: %v", agent.Namespace, agent.Name, err) - } - if err := a.upsertAgent(ctx, autogenTeam); err != nil { - return fmt.Errorf("failed to upsert agent %s/%s: %v", agent.Namespace, agent.Name, err) - } - - return nil -} - -func (a *autogenReconciler) reconcileToolServer(ctx context.Context, server *v1alpha1.ToolServer) error { - toolServer, err := a.autogenTranslator.TranslateToolServer(ctx, server) - if err != nil { - return fmt.Errorf("failed to translate tool server %s/%s: %v", server.Namespace, server.Name, err) - } - err = a.upsertToolServer(ctx, toolServer) - if err != nil { - return fmt.Errorf("failed to upsert tool server %s/%s: %v", server.Namespace, server.Name, err) - } - - return nil -} - -func (a *autogenReconciler) upsertAgent(ctx context.Context, agent *database.Agent) error { - // lock to prevent races - a.upsertLock.Lock() - defer a.upsertLock.Unlock() - // validate the team - req := autogen_client.ValidationRequest{ - Component: &agent.Component, - } - resp, err := a.autogenClient.Validate(ctx, &req) - if err != nil { - return fmt.Errorf("failed to validate agent %s: %v", agent.Name, err) - } - if !resp.IsValid { - return fmt.Errorf("agent %s is invalid: %v", agent.Name, resp.ErrorMsg()) - } - - // delete if team exists - existingAgent, err := a.dbClient.GetAgent(agent.Name) - if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { - return fmt.Errorf("failed to get existing agent %s: %v", agent.Name, err) - } - if existingAgent != nil { - agent.ID = existingAgent.ID - } - - return a.dbClient.UpsertAgent(agent) -} - -func (a *autogenReconciler) upsertToolServer(ctx context.Context, toolServer *database.ToolServer) error { - // lock to prevent races - a.upsertLock.Lock() - defer a.upsertLock.Unlock() - - // delete if toolServer exists - existingToolServer, err := a.dbClient.GetToolServer(toolServer.Component.Label) - if err != nil && !strings.Contains(err.Error(), "not found") { - return fmt.Errorf("failed to get existing toolServer %s: %v", toolServer.Component.Label, err) - } - if existingToolServer != nil { - toolServer.ID = existingToolServer.ID - err = a.dbClient.UpdateToolServer(toolServer) - if err != nil { - return fmt.Errorf("failed to delete existing toolServer %s: %v", toolServer.Component.Label, err) - } - } else { - existingToolServer, err = a.dbClient.CreateToolServer(toolServer) - if err != nil { - return fmt.Errorf("failed to create toolServer %s: %v", toolServer.Component.Label, err) - } - existingToolServer, err = a.dbClient.GetToolServer(toolServer.Component.Label) - if err != nil { - return fmt.Errorf("failed to get existing toolServer %s: %v", toolServer.Component.Label, err) - } - } - - tools, err := a.autogenClient.FetchTools(ctx, &autogen_client.ToolServerRequest{ - Server: &existingToolServer.Component, - }) - if err != nil { - return fmt.Errorf("failed to fetch tools for toolServer %s: %v", toolServer.Component.Label, err) - } - - if err := a.dbClient.RefreshToolsForServer(toolServer.Component.Label, tools.Tools); err != nil { - return fmt.Errorf("failed to refresh tools for toolServer %s: %v", toolServer.Component.Label, err) - } - - return nil -} - -func (a *autogenReconciler) findAgentsUsingModel(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { - var agentsList v1alpha1.AgentList - if err := a.kube.List( - ctx, - &agentsList, - ); err != nil { - return nil, fmt.Errorf("failed to list agents: %v", err) - } - - var agents []*v1alpha1.Agent - for i := range agentsList.Items { - agent := &agentsList.Items[i] - agentNamespaced, err := common.ParseRefString(agent.Spec.ModelConfig, agent.Namespace) - - if err != nil { - reconcileLog.Error(err, "failed to parse Agent ModelConfig", - "errorDetails", err.Error(), - ) - continue - } - - if agentNamespaced == req.NamespacedName { - agents = append(agents, agent) - } - } - - return agents, nil -} - -func (a *autogenReconciler) findAgentsUsingApiKeySecret(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { - var modelsList v1alpha1.ModelConfigList - if err := a.kube.List( - ctx, - &modelsList, - ); err != nil { - return nil, fmt.Errorf("failed to list ModelConfigs: %v", err) - } - - var models []string - for _, model := range modelsList.Items { - if model.Spec.APIKeySecretRef == "" { - continue - } - secretNamespaced, err := common.ParseRefString(model.Spec.APIKeySecretRef, model.Namespace) - if err != nil { - reconcileLog.Error(err, "failed to parse ModelConfig APIKeySecretRef", - "errorDetails", err.Error(), - ) - continue - } - - if secretNamespaced == req.NamespacedName { - models = append(models, model.Name) - } - } - - var agents []*v1alpha1.Agent - uniqueAgents := make(map[string]bool) - - for _, modelName := range models { - agentsUsingModel, err := a.findAgentsUsingModel(ctx, ctrl.Request{ - NamespacedName: types.NamespacedName{ - Namespace: req.Namespace, - Name: modelName, - }, - }) - if err != nil { - return nil, fmt.Errorf("failed to find agents for model %s: %v", modelName, err) - } - - for _, agent := range agentsUsingModel { - key := common.GetObjectRef(agent) - if !uniqueAgents[key] { - uniqueAgents[key] = true - agents = append(agents, agent) - } - } - } - - return agents, nil -} - -func (a *autogenReconciler) findAgentsUsingMemory(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { - var agentsList v1alpha1.AgentList - if err := a.kube.List( - ctx, - &agentsList, - ); err != nil { - return nil, fmt.Errorf("failed to list agents: %v", err) - } - - var agents []*v1alpha1.Agent - for i := range agentsList.Items { - agent := &agentsList.Items[i] - for _, memory := range agent.Spec.Memory { - memoryNamespaced, err := common.ParseRefString(memory, agent.Namespace) - - if err != nil { - reconcileLog.Error(err, "failed to parse Agent Memory", - "errorDetails", err.Error(), - ) - continue - } - - if memoryNamespaced == req.NamespacedName { - agents = append(agents, agent) - break - } - } - } - - return agents, nil -} - -func (a *autogenReconciler) findTeamsUsingAgent(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Team, error) { - var teamsList v1alpha1.TeamList - if err := a.kube.List( - ctx, - &teamsList, - ); err != nil { - return nil, fmt.Errorf("failed to list teams: %v", err) - } - - var teams []*v1alpha1.Team - for i := range teamsList.Items { - team := &teamsList.Items[i] - for _, participant := range team.Spec.Participants { - participantNamespaced, err := common.ParseRefString(participant, team.Namespace) - - if err != nil { - reconcileLog.Error(err, "failed to parse Team participant", - "errorDetails", err.Error(), - ) - continue - } - - if participantNamespaced == req.NamespacedName { - teams = append(teams, team) - break - } - } - } - - return teams, nil -} - -func (a *autogenReconciler) findTeamsUsingModel(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Team, error) { - var teamsList v1alpha1.TeamList - if err := a.kube.List( - ctx, - &teamsList, - ); err != nil { - return nil, fmt.Errorf("failed to list Teams: %v", err) - } - - var teams []*v1alpha1.Team - for i := range teamsList.Items { - team := &teamsList.Items[i] - modelNamespaced, err := common.ParseRefString(team.Spec.ModelConfig, team.Namespace) - - if err != nil { - reconcileLog.Error(err, "failed to parse Team ModelConfig", - "errorDetails", err.Error(), - ) - continue - } - - if modelNamespaced == req.NamespacedName { - teams = append(teams, team) - } - } - - return teams, nil -} - -func (a *autogenReconciler) findTeamsUsingApiKeySecret(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Team, error) { - var modelsList v1alpha1.ModelConfigList - if err := a.kube.List( - ctx, - &modelsList, - ); err != nil { - return nil, fmt.Errorf("failed to list ModelConfigs: %v", err) - } - - var models []string - for _, model := range modelsList.Items { - secretNamespaced, err := common.ParseRefString(model.Spec.APIKeySecretRef, model.Namespace) - - if err != nil { - switch e := err.(type) { - case *common.EmptyReferenceError: - reconcileLog.V(4).Info("ModelConfig has empty APIKeySecretRef, skipping", - "model", model.Name, - "namespace", model.Namespace, - ) - default: - reconcileLog.Error(err, "failed to parse ModelConfig APIKeySecretRef", - "errorDetails", e.Error(), - "model", model.Name, - "namespace", model.Namespace, - ) - } - continue - } - - if secretNamespaced == req.NamespacedName { - models = append(models, model.Name) - } - } - - var teams []*v1alpha1.Team - uniqueTeams := make(map[string]bool) - - for _, modelName := range models { - teamsUsingModel, err := a.findTeamsUsingModel(ctx, ctrl.Request{ - NamespacedName: types.NamespacedName{ - Namespace: req.Namespace, - Name: modelName, - }, - }) - if err != nil { - return nil, fmt.Errorf("failed to find teams for model %s: %v", modelName, err) - } - - for _, team := range teamsUsingModel { - key := common.GetObjectRef(team) - if !uniqueTeams[key] { - uniqueTeams[key] = true - teams = append(teams, team) - } - } - } - - return teams, nil -} - -func (a *autogenReconciler) findAgentsUsingToolServer(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { - var agentsList v1alpha1.AgentList - if err := a.kube.List( - ctx, - &agentsList, - ); err != nil { - return nil, fmt.Errorf("failed to list agents: %v", err) - } - - var agents []*v1alpha1.Agent - appendAgentIfUsesToolServer := func(agent *v1alpha1.Agent) { - for _, tool := range agent.Spec.Tools { - if tool.McpServer == nil { - return - } - - toolServerNamespaced, err := common.ParseRefString(tool.McpServer.ToolServer, agent.Namespace) - if err != nil { - reconcileLog.Error(err, "failed to parse Agent ToolServer", - "errorDetails", err.Error(), - ) - continue - } - - if toolServerNamespaced == req.NamespacedName { - agents = append(agents, agent) - return - } - } - } - - for _, agent := range agentsList.Items { - agent := agent - appendAgentIfUsesToolServer(&agent) - } - - return agents, nil - -} - -func (a *autogenReconciler) getDiscoveredMCPTools(ctx context.Context, serverRef string) ([]*v1alpha1.MCPTool, error) { - allTools, err := a.dbClient.ListTools() - if err != nil { - return nil, err - } - - var discoveredTools []*v1alpha1.MCPTool - for _, tool := range allTools { - if tool.ServerName == serverRef { - mcpTool, err := convertTool(&tool) - if err != nil { - return nil, fmt.Errorf("failed to convert tool: %v", err) - } - discoveredTools = append(discoveredTools, mcpTool) - } - } - - return discoveredTools, nil -} - -func (a *autogenReconciler) reconcileA2A( - ctx context.Context, - team *database.Agent, - agent *v1alpha1.Agent, -) error { - return a.a2aReconciler.ReconcileAutogenAgent(ctx, agent, team) -} - -func convertTool(tool *database.Tool) (*v1alpha1.MCPTool, error) { - if tool.Component.Config == nil { - return nil, fmt.Errorf("missing component or config") - } - config := tool.Component.Config - var mcpToolConfig api.MCPToolConfig - if err := unmarshalFromMap(config, &mcpToolConfig); err != nil { - return nil, fmt.Errorf("failed to unmarshal tool config: %v", err) - } - component, err := convertComponentToApiType(&tool.Component) - if err != nil { - return nil, fmt.Errorf("failed to convert component: %v", err) - } - - return &v1alpha1.MCPTool{ - Name: mcpToolConfig.Tool.Name, - Component: component, - }, nil -} - -func convertComponentToApiType(component *api.Component) (v1alpha1.Component, error) { - anyConfig, err := convertMapToAnytype(component.Config) - if err != nil { - return v1alpha1.Component{}, err - } - return v1alpha1.Component{ - Provider: component.Provider, - ComponentType: component.ComponentType, - Version: component.Version, - ComponentVersion: component.ComponentVersion, - Description: component.Description, - Label: component.Label, - Config: anyConfig, - }, nil -} - -func convertMapToAnytype(m map[string]interface{}) (map[string]v1alpha1.AnyType, error) { - anyConfig := make(map[string]v1alpha1.AnyType) - for k, v := range m { - b, err := json.Marshal(v) - if err != nil { - return nil, err - } - anyConfig[k] = v1alpha1.AnyType{ - RawMessage: b, - } - } - return anyConfig, nil -} - -func unmarshalFromMap(m map[string]interface{}, v interface{}) error { - b, err := json.Marshal(m) - if err != nil { - return err - } - return json.Unmarshal(b, v) -} diff --git a/go/controller/internal/controller/autogenagent_controller.go b/go/controller/internal/controller/agent_controller.go similarity index 70% rename from go/controller/internal/controller/autogenagent_controller.go rename to go/controller/internal/controller/agent_controller.go index 5012b29fd..c78523354 100644 --- a/go/controller/internal/controller/autogenagent_controller.go +++ b/go/controller/internal/controller/agent_controller.go @@ -19,8 +19,8 @@ package controller import ( "context" - "github.com/kagent-dev/kagent/go/controller/internal/autogen" - + appsv1 "k8s.io/api/apps/v1" + corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/ptr" ctrl "sigs.k8s.io/controller-runtime" @@ -29,31 +29,36 @@ import ( "sigs.k8s.io/controller-runtime/pkg/log" agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" ) -// AutogenAgentReconciler reconciles a AutogenAgent object -type AutogenAgentReconciler struct { +// AgentReconciler reconciles a Agent object +type AgentReconciler struct { client.Client Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler + Reconciler reconciler.KagentReconciler } // +kubebuilder:rbac:groups=kagent.dev,resources=agents,verbs=get;list;watch;create;update;patch;delete // +kubebuilder:rbac:groups=kagent.dev,resources=agents/status,verbs=get;update;patch // +kubebuilder:rbac:groups=kagent.dev,resources=agents/finalizers,verbs=update -func (r *AutogenAgentReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { +func (r *AgentReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { _ = log.FromContext(ctx) - return ctrl.Result{}, r.Reconciler.ReconcileAutogenAgent(ctx, req) + return ctrl.Result{}, r.Reconciler.ReconcileKagentAgent(ctx, req) } // SetupWithManager sets up the controller with the Manager. -func (r *AutogenAgentReconciler) SetupWithManager(mgr ctrl.Manager) error { +func (r *AgentReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). WithOptions(controller.Options{ NeedLeaderElection: ptr.To(true), }). For(&agentv1alpha1.Agent{}). - Named("autogenagent"). + Owns(&appsv1.Deployment{}). + Owns(&corev1.ConfigMap{}). + Owns(&corev1.Service{}). + Owns(&corev1.ServiceAccount{}). + Named("agent"). Complete(r) } diff --git a/go/controller/internal/controller/autogenteam_controller.go b/go/controller/internal/controller/autogenteam_controller.go deleted file mode 100644 index 1567f25bc..000000000 --- a/go/controller/internal/controller/autogenteam_controller.go +++ /dev/null @@ -1,59 +0,0 @@ -/* -Copyright 2025. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package controller - -import ( - "context" - - "github.com/kagent-dev/kagent/go/controller/internal/autogen" - - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/utils/ptr" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" - "sigs.k8s.io/controller-runtime/pkg/controller" - "sigs.k8s.io/controller-runtime/pkg/log" - - agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" -) - -// AutogenTeamReconciler reconciles a AutogenTeam object -type AutogenTeamReconciler struct { - client.Client - Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler -} - -// +kubebuilder:rbac:groups=kagent.dev,resources=teams,verbs=get;list;watch;create;update;patch;delete -// +kubebuilder:rbac:groups=kagent.dev,resources=teams/status,verbs=get;update;patch -// +kubebuilder:rbac:groups=kagent.dev,resources=teams/finalizers,verbs=update - -func (r *AutogenTeamReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { - _ = log.FromContext(ctx) - return ctrl.Result{}, r.Reconciler.ReconcileAutogenTeam(ctx, req) -} - -// SetupWithManager sets up the controller with the Manager. -func (r *AutogenTeamReconciler) SetupWithManager(mgr ctrl.Manager) error { - return ctrl.NewControllerManagedBy(mgr). - WithOptions(controller.Options{ - NeedLeaderElection: ptr.To(true), - }). - For(&agentv1alpha1.Team{}). - Named("autogenteam"). - Complete(r) -} diff --git a/go/controller/internal/controller/autogenmemory_controller.go b/go/controller/internal/controller/memory_controller.go similarity index 76% rename from go/controller/internal/controller/autogenmemory_controller.go rename to go/controller/internal/controller/memory_controller.go index cbd15353c..8e23d3358 100644 --- a/go/controller/internal/controller/autogenmemory_controller.go +++ b/go/controller/internal/controller/memory_controller.go @@ -19,7 +19,7 @@ package controller import ( "context" - "github.com/kagent-dev/kagent/go/controller/internal/autogen" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/ptr" @@ -31,24 +31,24 @@ import ( agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" ) -// AutogenMemoryReconciler reconciles a AutogenMemory object -type AutogenMemoryReconciler struct { +// MemoryReconciler reconciles a Memory object +type MemoryReconciler struct { client.Client Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler + Reconciler reconciler.KagentReconciler } // +kubebuilder:rbac:groups=kagent.dev,resources=memories,verbs=get;list;watch;create;update;patch;delete // +kubebuilder:rbac:groups=kagent.dev,resources=memories/status,verbs=get;update;patch // +kubebuilder:rbac:groups=kagent.dev,resources=memories/finalizers,verbs=update -func (r *AutogenMemoryReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { +func (r *MemoryReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { _ = log.FromContext(ctx) - return ctrl.Result{}, r.Reconciler.ReconcileAutogenMemory(ctx, req) + return ctrl.Result{}, r.Reconciler.ReconcileKagentMemory(ctx, req) } // SetupWithManager sets up the controller with the Manager. -func (r *AutogenMemoryReconciler) SetupWithManager(mgr ctrl.Manager) error { +func (r *MemoryReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). WithOptions(controller.Options{ NeedLeaderElection: ptr.To(true), diff --git a/go/controller/internal/controller/autogenmodelconfig_controller.go b/go/controller/internal/controller/modelconfig_controller.go similarity index 74% rename from go/controller/internal/controller/autogenmodelconfig_controller.go rename to go/controller/internal/controller/modelconfig_controller.go index 9d925e3d2..9cccff9ed 100644 --- a/go/controller/internal/controller/autogenmodelconfig_controller.go +++ b/go/controller/internal/controller/modelconfig_controller.go @@ -19,7 +19,7 @@ package controller import ( "context" - "github.com/kagent-dev/kagent/go/controller/internal/autogen" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/ptr" @@ -31,29 +31,29 @@ import ( agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" ) -// AutogenModelConfigReconciler reconciles a AutogenModelConfig object -type AutogenModelConfigReconciler struct { +// ModelConfigReconciler reconciles a ModelConfig object +type ModelConfigReconciler struct { client.Client Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler + Reconciler reconciler.KagentReconciler } // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs,verbs=get;list;watch;create;update;patch;delete // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs/status,verbs=get;update;patch // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs/finalizers,verbs=update -func (r *AutogenModelConfigReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { +func (r *ModelConfigReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { _ = log.FromContext(ctx) - return ctrl.Result{}, r.Reconciler.ReconcileAutogenModelConfig(ctx, req) + return ctrl.Result{}, r.Reconciler.ReconcileKagentModelConfig(ctx, req) } // SetupWithManager sets up the controller with the Manager. -func (r *AutogenModelConfigReconciler) SetupWithManager(mgr ctrl.Manager) error { +func (r *ModelConfigReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). WithOptions(controller.Options{ NeedLeaderElection: ptr.To(true), }). For(&agentv1alpha1.ModelConfig{}). - Named("autogenmodelconfig"). + Named("modelconfig"). Complete(r) } diff --git a/go/controller/internal/controller/autogensecret_controller.go b/go/controller/internal/controller/secret_controller.go similarity index 73% rename from go/controller/internal/controller/autogensecret_controller.go rename to go/controller/internal/controller/secret_controller.go index f0d0699b6..3fa5e5414 100644 --- a/go/controller/internal/controller/autogensecret_controller.go +++ b/go/controller/internal/controller/secret_controller.go @@ -19,7 +19,7 @@ package controller import ( "context" - "github.com/kagent-dev/kagent/go/controller/internal/autogen" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/runtime" @@ -30,29 +30,29 @@ import ( "sigs.k8s.io/controller-runtime/pkg/log" ) -// AutogenModelConfigReconciler reconciles a Secret object which contains a model config -type AutogenSecretReconciler struct { +// SecretReconciler reconciles a Secret object which contains a model config +type SecretReconciler struct { client.Client Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler + Reconciler reconciler.KagentReconciler } // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs,verbs=get;list;watch;create;update;patch;delete // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs/status,verbs=get;update;patch // +kubebuilder:rbac:groups=kagent.dev,resources=modelconfigs/finalizers,verbs=update -func (r *AutogenSecretReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { +func (r *SecretReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { _ = log.FromContext(ctx) - return ctrl.Result{}, r.Reconciler.ReconcileAutogenApiKeySecret(ctx, req) + return ctrl.Result{}, r.Reconciler.ReconcileKagentApiKeySecret(ctx, req) } // SetupWithManager sets up the controller with the Manager. -func (r *AutogenSecretReconciler) SetupWithManager(mgr ctrl.Manager) error { +func (r *SecretReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). WithOptions(controller.Options{ NeedLeaderElection: ptr.To(true), }). For(&v1.Secret{}). - Named("autogenapikeysecret"). + Named("secret"). Complete(r) } diff --git a/go/controller/internal/controller/toolserver_controller.go b/go/controller/internal/controller/toolserver_controller.go index 080efa0ef..1987254f8 100644 --- a/go/controller/internal/controller/toolserver_controller.go +++ b/go/controller/internal/controller/toolserver_controller.go @@ -20,7 +20,8 @@ import ( "context" "time" - "github.com/kagent-dev/kagent/go/controller/internal/autogen" + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/controller/internal/reconciler" "k8s.io/apimachinery/pkg/runtime" "k8s.io/utils/ptr" @@ -28,15 +29,13 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/controller" "sigs.k8s.io/controller-runtime/pkg/log" - - agentv1alpha1 "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" ) // ToolServerReconciler reconciles a ToolServer object type ToolServerReconciler struct { client.Client Scheme *runtime.Scheme - Reconciler autogen.AutogenReconciler + Reconciler reconciler.KagentReconciler } // +kubebuilder:rbac:groups=agent.kagent.dev,resources=toolservers,verbs=get;list;watch;create;update;patch;delete @@ -49,7 +48,7 @@ func (r *ToolServerReconciler) Reconcile(ctx context.Context, req ctrl.Request) return ctrl.Result{ // loop forever because we need to refresh tools server status RequeueAfter: 60 * time.Second, - }, r.Reconciler.ReconcileAutogenToolServer(ctx, req) + }, r.Reconciler.ReconcileKagentToolServer(ctx, req) } // SetupWithManager sets up the controller with the Manager. @@ -58,7 +57,7 @@ func (r *ToolServerReconciler) SetupWithManager(mgr ctrl.Manager) error { WithOptions(controller.Options{ NeedLeaderElection: ptr.To(true), }). - For(&agentv1alpha1.ToolServer{}). + For(&v1alpha1.ToolServer{}). Named("toolserver"). Complete(r) } diff --git a/go/controller/internal/reconciler/reconciler.go b/go/controller/internal/reconciler/reconciler.go new file mode 100644 index 000000000..d4b9a002b --- /dev/null +++ b/go/controller/internal/reconciler/reconciler.go @@ -0,0 +1,747 @@ +package reconciler + +import ( + "bytes" + "context" + "crypto/sha256" + "fmt" + "reflect" + "sync" + + "github.com/hashicorp/go-multierror" + appsv1 "k8s.io/api/apps/v1" + k8s_errors "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/api/meta" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/utils/ptr" + + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/controller/internal/a2a" + "github.com/kagent-dev/kagent/go/controller/translator" + "github.com/kagent-dev/kagent/go/internal/adk" + "github.com/kagent-dev/kagent/go/internal/database" + "github.com/kagent-dev/kagent/go/internal/utils" + "github.com/kagent-dev/kagent/go/internal/version" + mcp_client "github.com/mark3labs/mcp-go/client" + "github.com/mark3labs/mcp-go/client/transport" + "github.com/mark3labs/mcp-go/mcp" + "k8s.io/apimachinery/pkg/types" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +var ( + reconcileLog = ctrl.Log.WithName("reconciler") +) + +type KagentReconciler interface { + ReconcileKagentAgent(ctx context.Context, req ctrl.Request) error + ReconcileKagentModelConfig(ctx context.Context, req ctrl.Request) error + ReconcileKagentApiKeySecret(ctx context.Context, req ctrl.Request) error + ReconcileKagentToolServer(ctx context.Context, req ctrl.Request) error + ReconcileKagentMemory(ctx context.Context, req ctrl.Request) error +} + +type kagentReconciler struct { + adkTranslator translator.AdkApiTranslator + a2aReconciler a2a.A2AReconciler + + kube client.Client + dbClient database.Client + + defaultModelConfig types.NamespacedName + + // TODO: Remove this lock since we have a DB which we can batch anyway + upsertLock sync.Mutex +} + +func NewKagentReconciler( + translator translator.AdkApiTranslator, + kube client.Client, + dbClient database.Client, + defaultModelConfig types.NamespacedName, + a2aReconciler a2a.A2AReconciler, +) KagentReconciler { + return &kagentReconciler{ + adkTranslator: translator, + kube: kube, + dbClient: dbClient, + defaultModelConfig: defaultModelConfig, + a2aReconciler: a2aReconciler, + } +} + +func (a *kagentReconciler) ReconcileKagentAgent(ctx context.Context, req ctrl.Request) error { + // TODO(sbx0r): missing finalizer logic + + agent := &v1alpha1.Agent{} + if err := a.kube.Get(ctx, req.NamespacedName, agent); err != nil { + if k8s_errors.IsNotFound(err) { + return a.handleAgentDeletion(req) + } + + return fmt.Errorf("failed to get agent %s/%s: %w", req.Namespace, req.Name, err) + } + + return a.handleExistingAgent(ctx, agent, req) +} + +func (a *kagentReconciler) handleAgentDeletion(req ctrl.Request) error { + // TODO(sbx0r): handle deletion of agents with multiple teams assignment + + // agents, err := a.findTeamsUsingAgent(ctx, req) + // if err != nil { + // return fmt.Errorf("failed to find teams for agent %s/%s: %v", req.Namespace, req.Name, err) + // } + // if len(agents) > 1 { + // reconcileLog.Info("agent with multiple dependencies was deleted", + // "namespace", req.Namespace, + // "name", req.Name, + // "agents", agents) + // } + + // remove a2a handler if it exists + a.a2aReconciler.ReconcileAgentDeletion(req.NamespacedName.String()) + + if err := a.dbClient.DeleteAgent(req.NamespacedName.String()); err != nil { + return fmt.Errorf("failed to delete agent %s: %w", + req.NamespacedName.String(), err) + } + + reconcileLog.Info("Agent was deleted", "namespace", req.Namespace, "name", req.Name) + return nil +} + +func (a *kagentReconciler) handleExistingAgent(ctx context.Context, agent *v1alpha1.Agent, req ctrl.Request) error { + reconcileLog.Info("Agent Event", + "namespace", req.Namespace, + "name", req.Name, + "oldGeneration", agent.Status.ObservedGeneration, + "newGeneration", agent.Generation) + + return a.reconcileAgents(ctx, agent) +} + +func (a *kagentReconciler) reconcileAgentStatus(ctx context.Context, agent *v1alpha1.Agent, configHash *[sha256.Size]byte, inputErr error) error { + var ( + status metav1.ConditionStatus + message string + reason string + ) + if inputErr != nil { + status = metav1.ConditionFalse + message = inputErr.Error() + reason = "AgentReconcileFailed" + reconcileLog.Error(inputErr, "failed to reconcile agent", "agent", utils.GetObjectRef(agent)) + } else { + status = metav1.ConditionTrue + reason = "AgentReconciled" + } + + conditionChanged := meta.SetStatusCondition(&agent.Status.Conditions, metav1.Condition{ + Type: v1alpha1.AgentConditionTypeAccepted, + Status: status, + LastTransitionTime: metav1.Now(), + Reason: reason, + Message: message, + }) + + deployedCondition := metav1.Condition{ + Type: v1alpha1.AgentConditionTypeReady, + Status: metav1.ConditionUnknown, + LastTransitionTime: metav1.Now(), + } + + // Check if the deployment exists + deployment := &appsv1.Deployment{} + if err := a.kube.Get(ctx, types.NamespacedName{Namespace: agent.Namespace, Name: agent.Name}, deployment); err != nil { + deployedCondition.Status = metav1.ConditionUnknown + deployedCondition.Reason = "DeploymentNotFound" + deployedCondition.Message = err.Error() + } else { + replicas := int32(1) + if deployment.Spec.Replicas != nil { + replicas = *deployment.Spec.Replicas + } + if deployment.Status.AvailableReplicas == replicas { + deployedCondition.Status = metav1.ConditionTrue + deployedCondition.Reason = "DeploymentReady" + deployedCondition.Message = "Deployment is ready" + } else { + deployedCondition.Status = metav1.ConditionFalse + deployedCondition.Reason = "DeploymentNotReady" + deployedCondition.Message = fmt.Sprintf("Deployment is not ready, %d/%d pods are ready", deployment.Status.AvailableReplicas, replicas) + } + } + + conditionChanged = meta.SetStatusCondition(&agent.Status.Conditions, deployedCondition) + + // Only update the config hash if the config hash has changed and there was no error + configHashChanged := configHash != nil && !bytes.Equal((agent.Status.ConfigHash)[:], (*configHash)[:]) + + // update the status if it has changed or the generation has changed + if conditionChanged || agent.Status.ObservedGeneration != agent.Generation || configHashChanged { + // If the config hash is nil, it means there was an error during the reconciliation + if configHash != nil { + agent.Status.ConfigHash = (*configHash)[:] + } + agent.Status.ObservedGeneration = agent.Generation + if err := a.kube.Status().Update(ctx, agent); err != nil { + return fmt.Errorf("failed to update agent status: %v", err) + } + } + return nil +} + +func (a *kagentReconciler) ReconcileKagentModelConfig(ctx context.Context, req ctrl.Request) error { + modelConfig := &v1alpha1.ModelConfig{} + if err := a.kube.Get(ctx, req.NamespacedName, modelConfig); err != nil { + return fmt.Errorf("failed to get model %s: %v", req.Name, err) + } + + agents, err := a.findAgentsUsingModel(ctx, req) + if err != nil { + return fmt.Errorf("failed to find agents for model %s: %v", req.Name, err) + } + + return a.reconcileModelConfigStatus( + ctx, + modelConfig, + a.reconcileAgents(ctx, agents...), + ) +} + +func (a *kagentReconciler) reconcileModelConfigStatus(ctx context.Context, modelConfig *v1alpha1.ModelConfig, err error) error { + var ( + status metav1.ConditionStatus + message string + reason string + ) + if err != nil { + status = metav1.ConditionFalse + message = err.Error() + reason = "ModelConfigReconcileFailed" + reconcileLog.Error(err, "failed to reconcile model config", "modelConfig", utils.GetObjectRef(modelConfig)) + } else { + status = metav1.ConditionTrue + reason = "ModelConfigReconciled" + } + + conditionChanged := meta.SetStatusCondition(&modelConfig.Status.Conditions, metav1.Condition{ + Type: v1alpha1.ModelConfigConditionTypeAccepted, + Status: status, + LastTransitionTime: metav1.Now(), + Reason: reason, + Message: message, + }) + + // update the status if it has changed or the generation has changed + if conditionChanged || modelConfig.Status.ObservedGeneration != modelConfig.Generation { + modelConfig.Status.ObservedGeneration = modelConfig.Generation + if err := a.kube.Status().Update(ctx, modelConfig); err != nil { + return fmt.Errorf("failed to update model config status: %v", err) + } + } + return nil +} + +func (a *kagentReconciler) ReconcileKagentApiKeySecret(ctx context.Context, req ctrl.Request) error { + agents, err := a.findAgentsUsingApiKeySecret(ctx, req) + if err != nil { + return fmt.Errorf("failed to find agents for secret %s: %v", req.Name, err) + } + + return a.reconcileAgents(ctx, agents...) +} + +func (a *kagentReconciler) ReconcileKagentToolServer(ctx context.Context, req ctrl.Request) error { + // reconcile the agent team itself + toolServer := &v1alpha1.ToolServer{} + if err := a.kube.Get(ctx, req.NamespacedName, toolServer); err != nil { + // if the tool server is not found, we can ignore it + if k8s_errors.IsNotFound(err) { + return nil + } + return fmt.Errorf("failed to get tool server %s: %v", req.Name, err) + } + + reconcileErr := a.reconcileToolServer(ctx, toolServer) + + // update the tool server status as the agents depend on it + if err := a.reconcileToolServerStatus( + ctx, + toolServer, + utils.GetObjectRef(toolServer), + reconcileErr, + ); err != nil { + return fmt.Errorf("failed to reconcile tool server %s: %v", req.Name, err) + } + + // find and reconcile all agents which use this tool server + agents, err := a.findAgentsUsingToolServer(ctx, req) + if err != nil { + return fmt.Errorf("failed to find teams for agent %s: %v", req.Name, err) + } + + if err := a.reconcileAgents(ctx, agents...); err != nil { + return fmt.Errorf("failed to reconcile agents for tool server %s, see status for more details", req.Name) + } + + return nil +} + +func (a *kagentReconciler) reconcileToolServerStatus( + ctx context.Context, + toolServer *v1alpha1.ToolServer, + serverRef string, + err error, +) error { + discoveredTools, discoveryErr := a.getDiscoveredMCPTools(ctx, serverRef) + if discoveryErr != nil { + err = multierror.Append(err, discoveryErr) + } + + var ( + status metav1.ConditionStatus + message string + reason string + ) + if err != nil { + status = metav1.ConditionFalse + message = err.Error() + reason = "AgentReconcileFailed" + reconcileLog.Error(err, "failed to reconcile agent", "tool_server", utils.GetObjectRef(toolServer)) + } else { + status = metav1.ConditionTrue + reason = "AgentReconciled" + } + conditionChanged := meta.SetStatusCondition(&toolServer.Status.Conditions, metav1.Condition{ + Type: v1alpha1.AgentConditionTypeAccepted, + Status: status, + LastTransitionTime: metav1.Now(), + Reason: reason, + Message: message, + }) + + // only update if the status has changed to prevent looping the reconciler + if !conditionChanged && + toolServer.Status.ObservedGeneration == toolServer.Generation && + reflect.DeepEqual(toolServer.Status.DiscoveredTools, discoveredTools) { + return nil + } + + toolServer.Status.ObservedGeneration = toolServer.Generation + toolServer.Status.DiscoveredTools = discoveredTools + + if err := a.kube.Status().Update(ctx, toolServer); err != nil { + return fmt.Errorf("failed to update agent status: %v", err) + } + + return nil +} + +func (a *kagentReconciler) ReconcileKagentMemory(ctx context.Context, req ctrl.Request) error { + memory := &v1alpha1.Memory{} + if err := a.kube.Get(ctx, req.NamespacedName, memory); err != nil { + if k8s_errors.IsNotFound(err) { + return a.handleMemoryDeletion(req) + } + + return fmt.Errorf("failed to get memory %s: %v", req.Name, err) + } + + agents, err := a.findAgentsUsingMemory(ctx, req) + if err != nil { + return fmt.Errorf("failed to find agents using memory %s: %v", req.Name, err) + } + + return a.reconcileMemoryStatus(ctx, memory, a.reconcileAgents(ctx, agents...)) +} + +func (a *kagentReconciler) handleMemoryDeletion(req ctrl.Request) error { + + // TODO(sbx0r): implement memory deletion + + return nil +} + +func (a *kagentReconciler) reconcileMemoryStatus(ctx context.Context, memory *v1alpha1.Memory, err error) error { + var ( + status metav1.ConditionStatus + message string + reason string + ) + if err != nil { + status = metav1.ConditionFalse + message = err.Error() + reason = "MemoryReconcileFailed" + reconcileLog.Error(err, "failed to reconcile memory", "memory", utils.GetObjectRef(memory)) + } else { + status = metav1.ConditionTrue + reason = "MemoryReconciled" + } + + conditionChanged := meta.SetStatusCondition(&memory.Status.Conditions, metav1.Condition{ + Type: v1alpha1.MemoryConditionTypeAccepted, + Status: status, + LastTransitionTime: metav1.Now(), + Reason: reason, + Message: message, + }) + + if conditionChanged || memory.Status.ObservedGeneration != memory.Generation { + memory.Status.ObservedGeneration = memory.Generation + if err := a.kube.Status().Update(ctx, memory); err != nil { + return fmt.Errorf("failed to update memory status: %v", err) + } + } + return nil +} + +func (a *kagentReconciler) reconcileAgents(ctx context.Context, agents ...*v1alpha1.Agent) error { + var multiErr *multierror.Error + for _, agent := range agents { + configHash, reconcileErr := a.reconcileAgent(ctx, agent) + // Append error but still try to reconcile the agent status + if reconcileErr != nil { + multiErr = multierror.Append(multiErr, fmt.Errorf( + "failed to reconcile agent %s/%s: %v", agent.Namespace, agent.Name, reconcileErr)) + } + if err := a.reconcileAgentStatus(ctx, agent, configHash, reconcileErr); err != nil { + multiErr = multierror.Append(multiErr, fmt.Errorf( + "failed to reconcile agent status %s/%s: %v", agent.Namespace, agent.Name, err)) + } + } + + return multiErr.ErrorOrNil() +} + +func (a *kagentReconciler) reconcileAgent(ctx context.Context, agent *v1alpha1.Agent) (*[sha256.Size]byte, error) { + agentOutputs, err := a.adkTranslator.TranslateAgent(ctx, agent) + if err != nil { + return nil, fmt.Errorf("failed to translate agent %s/%s: %v", agent.Namespace, agent.Name, err) + } + if err := a.reconcileA2A(ctx, agent, agentOutputs.Config); err != nil { + return nil, fmt.Errorf("failed to reconcile A2A for agent %s/%s: %v", agent.Namespace, agent.Name, err) + } + if err := a.upsertAgent(ctx, agent, agentOutputs); err != nil { + return nil, fmt.Errorf("failed to upsert agent %s/%s: %v", agent.Namespace, agent.Name, err) + } + + return &agentOutputs.ConfigHash, nil +} + +func (a *kagentReconciler) reconcileToolServer(ctx context.Context, server *v1alpha1.ToolServer) error { + toolServer, err := a.adkTranslator.TranslateToolServer(ctx, server) + if err != nil { + return fmt.Errorf("failed to translate tool server %s/%s: %v", server.Namespace, server.Name, err) + } + err = a.upsertToolServer(ctx, toolServer) + if err != nil { + return fmt.Errorf("failed to upsert tool server %s/%s: %v", server.Namespace, server.Name, err) + } + + return nil +} + +func (a *kagentReconciler) upsertAgent(ctx context.Context, agent *v1alpha1.Agent, agentOutputs *translator.AgentOutputs) error { + // lock to prevent races + a.upsertLock.Lock() + defer a.upsertLock.Unlock() + + dbAgent := &database.Agent{ + ID: agentOutputs.Config.Name, + Config: agentOutputs.Config, + } + + if err := a.dbClient.StoreAgent(dbAgent); err != nil { + return fmt.Errorf("failed to store agent %s: %v", agentOutputs.Config.Name, err) + } + + // If the config hash has not changed, we can skip the patch + if bytes.Equal(agentOutputs.ConfigHash[:], agent.Status.ConfigHash) { + return nil + } + + for _, obj := range agentOutputs.Manifest { + if err := a.kube.Patch(ctx, obj, client.Apply, &client.PatchOptions{ + FieldManager: "kagent-controller", + Force: ptr.To(true), + }); err != nil { + return fmt.Errorf("failed to patch agent output %s: %v", agentOutputs.Config.Name, err) + } + } + + return nil +} + +func (a *kagentReconciler) upsertToolServer(ctx context.Context, toolServer *database.ToolServer) error { + // lock to prevent races + a.upsertLock.Lock() + defer a.upsertLock.Unlock() + + if _, err := a.dbClient.StoreToolServer(toolServer); err != nil { + return fmt.Errorf("failed to store toolServer %s: %v", toolServer.Name, err) + } + + toolServer, err := a.dbClient.GetToolServer(toolServer.Name) + if err != nil { + return fmt.Errorf("failed to get toolServer %s: %v", toolServer.Name, err) + } + + var tools []*v1alpha1.MCPTool + switch { + case toolServer.Config.Sse != nil: + sseHttpClient, err := transport.NewSSE(toolServer.Config.Sse.URL) + if err != nil { + return fmt.Errorf("failed to create sse client for toolServer %s: %v", toolServer.Name, err) + } + tools, err = a.listTools(ctx, sseHttpClient, toolServer) + if err != nil { + return fmt.Errorf("failed to fetch tools for toolServer %s: %v", toolServer.Name, err) + } + case toolServer.Config.StreamableHttp != nil: + streamableHttpClient, err := transport.NewStreamableHTTP(toolServer.Config.StreamableHttp.URL) + if err != nil { + return fmt.Errorf("failed to create streamable http client for toolServer %s: %v", toolServer.Name, err) + } + tools, err = a.listTools(ctx, streamableHttpClient, toolServer) + if err != nil { + return fmt.Errorf("failed to fetch tools for toolServer %s: %v", toolServer.Name, err) + } + case toolServer.Config.Stdio != nil: + // Can't list tools for stdio + return fmt.Errorf("stdio tool servers are not supported") + default: + return fmt.Errorf("unsupported tool server type: %v", toolServer.Config.Type) + } + + if err := a.dbClient.RefreshToolsForServer(toolServer.Name, tools...); err != nil { + return fmt.Errorf("failed to refresh tools for toolServer %s: %v", toolServer.Name, err) + } + + return nil +} + +func (a *kagentReconciler) listTools(ctx context.Context, tsp transport.Interface, toolServer *database.ToolServer) ([]*v1alpha1.MCPTool, error) { + client := mcp_client.NewClient(tsp) + err := client.Start(ctx) + if err != nil { + return nil, fmt.Errorf("failed to start client for toolServer %s: %v", toolServer.Name, err) + } + defer client.Close() + _, err = client.Initialize(ctx, mcp.InitializeRequest{ + Params: mcp.InitializeParams{ + ProtocolVersion: mcp.LATEST_PROTOCOL_VERSION, + Capabilities: mcp.ClientCapabilities{}, + ClientInfo: mcp.Implementation{ + Name: "kagent-controller", + Version: version.Version, + }, + }, + }) + if err != nil { + return nil, fmt.Errorf("failed to initialize client for toolServer %s: %v", toolServer.Name, err) + } + result, err := client.ListTools(ctx, mcp.ListToolsRequest{}) + if err != nil { + return nil, fmt.Errorf("failed to list tools for toolServer %s: %v", toolServer.Name, err) + } + + tools := make([]*v1alpha1.MCPTool, 0, len(result.Tools)) + for _, tool := range result.Tools { + tools = append(tools, &v1alpha1.MCPTool{ + Name: tool.Name, + Description: tool.Description, + }) + } + + return tools, nil +} + +func (a *kagentReconciler) findAgentsUsingModel(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { + var agentsList v1alpha1.AgentList + if err := a.kube.List( + ctx, + &agentsList, + ); err != nil { + return nil, fmt.Errorf("failed to list agents: %v", err) + } + + var agents []*v1alpha1.Agent + for i := range agentsList.Items { + agent := &agentsList.Items[i] + agentNamespaced, err := utils.ParseRefString(agent.Spec.ModelConfig, agent.Namespace) + + if err != nil { + reconcileLog.Error(err, "failed to parse Agent ModelConfig", + "errorDetails", err.Error(), + ) + continue + } + + if agentNamespaced == req.NamespacedName { + agents = append(agents, agent) + } + } + + return agents, nil +} + +func (a *kagentReconciler) findAgentsUsingApiKeySecret(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { + var modelsList v1alpha1.ModelConfigList + if err := a.kube.List( + ctx, + &modelsList, + ); err != nil { + return nil, fmt.Errorf("failed to list ModelConfigs: %v", err) + } + + var models []string + for _, model := range modelsList.Items { + if model.Spec.APIKeySecretRef == "" { + continue + } + secretNamespaced, err := utils.ParseRefString(model.Spec.APIKeySecretRef, model.Namespace) + if err != nil { + reconcileLog.Error(err, "failed to parse ModelConfig APIKeySecretRef", + "errorDetails", err.Error(), + ) + continue + } + + if secretNamespaced == req.NamespacedName { + models = append(models, model.Name) + } + } + + var agents []*v1alpha1.Agent + uniqueAgents := make(map[string]bool) + + for _, modelName := range models { + agentsUsingModel, err := a.findAgentsUsingModel(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: req.Namespace, + Name: modelName, + }, + }) + if err != nil { + return nil, fmt.Errorf("failed to find agents for model %s: %v", modelName, err) + } + + for _, agent := range agentsUsingModel { + key := utils.GetObjectRef(agent) + if !uniqueAgents[key] { + uniqueAgents[key] = true + agents = append(agents, agent) + } + } + } + + return agents, nil +} + +func (a *kagentReconciler) findAgentsUsingMemory(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { + var agentsList v1alpha1.AgentList + if err := a.kube.List( + ctx, + &agentsList, + ); err != nil { + return nil, fmt.Errorf("failed to list agents: %v", err) + } + + var agents []*v1alpha1.Agent + for i := range agentsList.Items { + agent := &agentsList.Items[i] + for _, memory := range agent.Spec.Memory { + memoryNamespaced, err := utils.ParseRefString(memory, agent.Namespace) + + if err != nil { + reconcileLog.Error(err, "failed to parse Agent Memory", + "errorDetails", err.Error(), + ) + continue + } + + if memoryNamespaced == req.NamespacedName { + agents = append(agents, agent) + break + } + } + } + + return agents, nil +} + +func (a *kagentReconciler) findAgentsUsingToolServer(ctx context.Context, req ctrl.Request) ([]*v1alpha1.Agent, error) { + var agentsList v1alpha1.AgentList + if err := a.kube.List( + ctx, + &agentsList, + ); err != nil { + return nil, fmt.Errorf("failed to list agents: %v", err) + } + + var agents []*v1alpha1.Agent + appendAgentIfUsesToolServer := func(agent *v1alpha1.Agent) { + for _, tool := range agent.Spec.Tools { + if tool.McpServer == nil { + return + } + + toolServerNamespaced, err := utils.ParseRefString(tool.McpServer.ToolServer, agent.Namespace) + if err != nil { + reconcileLog.Error(err, "failed to parse Agent ToolServer", + "errorDetails", err.Error(), + ) + continue + } + + if toolServerNamespaced == req.NamespacedName { + agents = append(agents, agent) + return + } + } + } + + for _, agent := range agentsList.Items { + agent := agent + appendAgentIfUsesToolServer(&agent) + } + + return agents, nil + +} + +func (a *kagentReconciler) getDiscoveredMCPTools(ctx context.Context, serverRef string) ([]*v1alpha1.MCPTool, error) { + allTools, err := a.dbClient.ListToolsForServer(serverRef) + if err != nil { + return nil, err + } + + var discoveredTools []*v1alpha1.MCPTool + for _, tool := range allTools { + mcpTool, err := convertTool(&tool) + if err != nil { + return nil, fmt.Errorf("failed to convert tool: %v", err) + } + discoveredTools = append(discoveredTools, mcpTool) + } + + return discoveredTools, nil +} + +func (a *kagentReconciler) reconcileA2A( + ctx context.Context, + agent *v1alpha1.Agent, + adkConfig *adk.AgentConfig, +) error { + return a.a2aReconciler.ReconcileAgent(ctx, agent, adkConfig) +} + +func convertTool(tool *database.Tool) (*v1alpha1.MCPTool, error) { + return &v1alpha1.MCPTool{ + Name: tool.ID, + Description: tool.Description, + }, nil +} diff --git a/go/controller/translator/adk_api_translator.go b/go/controller/translator/adk_api_translator.go new file mode 100644 index 000000000..ad30244fc --- /dev/null +++ b/go/controller/translator/adk_api_translator.go @@ -0,0 +1,771 @@ +package translator + +import ( + "context" + "crypto/sha256" + "encoding/binary" + "encoding/json" + "fmt" + "maps" + "os" + "slices" + "strings" + + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/internal/adk" + "github.com/kagent-dev/kagent/go/internal/database" + "github.com/kagent-dev/kagent/go/internal/utils" + common "github.com/kagent-dev/kagent/go/internal/utils" + "github.com/kagent-dev/kagent/go/internal/version" + appsv1 "k8s.io/api/apps/v1" + corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/apimachinery/pkg/util/intstr" + "k8s.io/utils/ptr" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" + ctrllog "sigs.k8s.io/controller-runtime/pkg/log" + "trpc.group/trpc-go/trpc-a2a-go/server" +) + +type AgentOutputs struct { + Manifest []client.Object `json:"manifest,omitempty"` + + Config *adk.AgentConfig `json:"config,omitempty"` + ConfigHash [sha256.Size]byte `json:"configHash"` +} + +var adkLog = ctrllog.Log.WithName("adk") + +type AdkApiTranslator interface { + TranslateAgent( + ctx context.Context, + agent *v1alpha1.Agent, + ) (*AgentOutputs, error) + TranslateToolServer(ctx context.Context, toolServer *v1alpha1.ToolServer) (*database.ToolServer, error) +} + +func NewAdkApiTranslator(kube client.Client, defaultModelConfig types.NamespacedName) AdkApiTranslator { + return &adkApiTranslator{ + kube: kube, + defaultModelConfig: defaultModelConfig, + } +} + +type adkApiTranslator struct { + kube client.Client + defaultModelConfig types.NamespacedName +} + +const MAX_DEPTH = 10 + +type tState struct { + // used to prevent infinite loops + // The recursion limit is 10 + depth uint8 + // used to enforce DAG + // The final member of the list will be the "parent" agent + visitedAgents []string +} + +func (s *tState) with(agent *v1alpha1.Agent) *tState { + s.depth++ + s.visitedAgents = append(s.visitedAgents, common.GetObjectRef(agent)) + return s +} + +func (t *tState) isVisited(agentName string) bool { + return slices.Contains(t.visitedAgents, agentName) +} + +func (a *adkApiTranslator) TranslateAgent( + ctx context.Context, + agent *v1alpha1.Agent, +) (*AgentOutputs, error) { + + adkAgent, envVars, err := a.translateDeclarativeAgent(ctx, agent, &tState{}) + if err != nil { + return nil, err + } + + agentJson, err := json.Marshal(adkAgent) + if err != nil { + return nil, err + } + + byt, err := json.Marshal(struct { + EnvVars []corev1.EnvVar + Deployment *v1alpha1.DeploymentSpec + }{ + EnvVars: envVars, + Deployment: agent.Spec.Deployment, + }) + if err != nil { + return nil, err + } + + hash := sha256.Sum256(append(byt, agentJson...)) + configHash := binary.BigEndian.Uint64(hash[:8]) + + outputs, err := a.translateOutputs(ctx, agent, configHash, agentJson, envVars...) + if err != nil { + return nil, err + } + outputs.Config = adkAgent + outputs.ConfigHash = hash + + return outputs, nil +} + +func (a *adkApiTranslator) translateOutputs(_ context.Context, agent *v1alpha1.Agent, configHash uint64, configJson []byte, envVars ...corev1.EnvVar) (*AgentOutputs, error) { + outputs := &AgentOutputs{} + + podLabels := map[string]string{ + "app": "kagent", + "kagent": agent.Name, + } + + deploymentLabels := maps.Clone(agent.Labels) + if deploymentLabels == nil { + deploymentLabels = make(map[string]string) + } + maps.Copy(deploymentLabels, podLabels) + + objMeta := metav1.ObjectMeta{ + Name: agent.Name, + Namespace: agent.Namespace, + Annotations: agent.Annotations, + Labels: deploymentLabels, + } + if agent.Spec.Deployment != nil { + envVars = append(envVars, agent.Spec.Deployment.Env...) + } + + outputs.Manifest = append(outputs.Manifest, &corev1.ServiceAccount{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "v1", + Kind: "ServiceAccount", + }, + ObjectMeta: objMeta, + }) + + outputs.Manifest = append(outputs.Manifest, &corev1.ConfigMap{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "v1", + Kind: "ConfigMap", + }, + ObjectMeta: objMeta, + Data: map[string]string{ + "config.json": string(configJson), + }, + }) + + spec := defaultDeploymentSpec(objMeta.Name, podLabels, configHash, envVars...) + outputs.Manifest = append(outputs.Manifest, &appsv1.Deployment{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "apps/v1", + Kind: "Deployment", + }, + ObjectMeta: objMeta, + Spec: spec, + }) + + outputs.Manifest = append(outputs.Manifest, &corev1.Service{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "v1", + Kind: "Service", + }, + ObjectMeta: objMeta, + Spec: corev1.ServiceSpec{ + Selector: map[string]string{ + "app": "kagent", + "kagent": agent.Name, + }, + Ports: []corev1.ServicePort{ + { + Name: "http", + Port: 8080, + TargetPort: intstr.FromInt(8080), + }, + }, + Type: corev1.ServiceTypeClusterIP, + }, + }) + + for _, obj := range outputs.Manifest { + if err := controllerutil.SetControllerReference(agent, obj, a.kube.Scheme()); err != nil { + return nil, err + } + } + + return outputs, nil +} + +func defaultDeploymentSpec(name string, labels map[string]string, configHash uint64, envVars ...corev1.EnvVar) appsv1.DeploymentSpec { + // TODO: Come up with a better way to do tracing config for the agents + envVars = append(envVars, slices.Collect(utils.Map( + utils.Filter( + slices.Values(os.Environ()), + func(envVar string) bool { + return strings.HasPrefix(envVar, "OTEL_") + }, + ), + func(envVar string) corev1.EnvVar { + parts := strings.SplitN(envVar, "=", 2) + return corev1.EnvVar{ + Name: parts[0], + Value: parts[1], + } + }, + ))...) + + envVars = append(envVars, corev1.EnvVar{ + Name: "KAGENT_NAMESPACE", + ValueFrom: &corev1.EnvVarSource{ + FieldRef: &corev1.ObjectFieldSelector{ + FieldPath: "metadata.namespace", + }, + }, + }) + + podTemplateLabels := maps.Clone(labels) + podTemplateLabels["config.kagent.dev/hash"] = fmt.Sprintf("%d", configHash) + + return appsv1.DeploymentSpec{ + Replicas: ptr.To(int32(1)), + Selector: &metav1.LabelSelector{ + MatchLabels: labels, + }, + Template: corev1.PodTemplateSpec{ + ObjectMeta: metav1.ObjectMeta{ + Labels: podTemplateLabels, + }, + Spec: corev1.PodSpec{ + ServiceAccountName: name, + Containers: []corev1.Container{ + { + Name: "kagent", + Image: fmt.Sprintf("cr.kagent.dev/kagent-dev/kagent/app:%s", version.Get().Version), + ImagePullPolicy: corev1.PullIfNotPresent, + Command: []string{"kagent", "static", "--host", "0.0.0.0", "--port", "8080", "--filepath", "/config/config.json"}, + Ports: []corev1.ContainerPort{ + { + Name: "http", + ContainerPort: 8080, + }, + }, + Resources: corev1.ResourceRequirements{ + Requests: corev1.ResourceList{ + corev1.ResourceCPU: resource.MustParse("100m"), + corev1.ResourceMemory: resource.MustParse("256Mi"), + }, + Limits: corev1.ResourceList{ + corev1.ResourceCPU: resource.MustParse("1000m"), + corev1.ResourceMemory: resource.MustParse("1Gi"), + }, + }, + Env: envVars, + ReadinessProbe: &corev1.Probe{ + ProbeHandler: corev1.ProbeHandler{ + HTTPGet: &corev1.HTTPGetAction{ + Path: "/health", + Port: intstr.FromString("http"), + }, + }, + InitialDelaySeconds: 15, + PeriodSeconds: 3, + }, + VolumeMounts: []corev1.VolumeMount{ + { + Name: "config", + MountPath: "/config", + }, + }, + }, + }, + Volumes: []corev1.Volume{ + { + Name: "config", + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: name, + }, + }, + }, + }, + }, + }, + }, + } +} + +func (a *adkApiTranslator) translateDeclarativeAgent(ctx context.Context, agent *v1alpha1.Agent, state *tState) (*adk.AgentConfig, []corev1.EnvVar, error) { + + model, envVars, err := a.translateModel(ctx, agent.Namespace, agent.Spec.ModelConfig) + if err != nil { + return nil, nil, err + } + + cfg := &adk.AgentConfig{ + KagentUrl: fmt.Sprintf("http://kagent-controller.%s.svc:8083", common.GetResourceNamespace()), + Name: common.ConvertToPythonIdentifier(common.GetObjectRef(agent)), + Description: agent.Spec.Description, + Instruction: agent.Spec.SystemMessage, + Model: model, + AgentCard: server.AgentCard{ + Name: agent.Name, + Description: agent.Spec.Description, + URL: fmt.Sprintf("http://%s.%s.svc:8080", agent.Name, agent.Namespace), + Capabilities: server.AgentCapabilities{ + Streaming: ptr.To(true), + PushNotifications: ptr.To(false), + StateTransitionHistory: ptr.To(true), + }, + // Can't be null for Python, so set to empty list + Skills: []server.AgentSkill{}, + DefaultInputModes: []string{"text"}, + DefaultOutputModes: []string{"text"}, + }, + } + + if agent.Spec.A2AConfig != nil { + cfg.AgentCard.Skills = slices.Collect(utils.Map(slices.Values(agent.Spec.A2AConfig.Skills), func(skill v1alpha1.AgentSkill) server.AgentSkill { + return server.AgentSkill(skill) + })) + } + + toolsByServer := make(map[string][]string) + for _, tool := range agent.Spec.Tools { + // Skip tools that are not applicable to the model provider + switch { + case tool.McpServer != nil: + for _, toolName := range tool.McpServer.ToolNames { + toolsByServer[tool.McpServer.ToolServer] = append(toolsByServer[tool.McpServer.ToolServer], toolName) + } + case tool.Agent != nil: + toolNamespacedName, err := common.ParseRefString(tool.Agent.Ref, agent.Namespace) + if err != nil { + return nil, nil, err + } + + toolRef := toolNamespacedName.String() + agentRef := common.GetObjectRef(agent) + + if toolRef == agentRef { + return nil, nil, fmt.Errorf("agent tool cannot be used to reference itself, %s", agentRef) + } + + if state.isVisited(toolRef) { + return nil, nil, fmt.Errorf("cycle detected in agent tool chain: %s -> %s", agentRef, toolRef) + } + + if state.depth > MAX_DEPTH { + return nil, nil, fmt.Errorf("recursion limit reached in agent tool chain: %s -> %s", agentRef, toolRef) + } + + // Translate a nested tool + toolAgent := &v1alpha1.Agent{} + + err = common.GetObject( + ctx, + a.kube, + toolAgent, + toolRef, + agent.Namespace, // redundant + ) + if err != nil { + return nil, nil, err + } + + var toolAgentCfg *adk.AgentConfig + toolAgentCfg, _, err = a.translateDeclarativeAgent(ctx, toolAgent, state.with(agent)) + if err != nil { + return nil, nil, err + } + + cfg.Agents = append(cfg.Agents, *toolAgentCfg) + + default: + return nil, nil, fmt.Errorf("tool must have a provider or tool server") + } + } + for server, tools := range toolsByServer { + err := a.translateToolServerTool(ctx, cfg, server, tools, agent.Namespace) + if err != nil { + return nil, nil, err + } + } + + return cfg, envVars, nil +} + +func (a *adkApiTranslator) translateModel(ctx context.Context, namespace, modelConfig string) (adk.Model, []corev1.EnvVar, error) { + model := &v1alpha1.ModelConfig{} + err := a.kube.Get(ctx, types.NamespacedName{Namespace: namespace, Name: modelConfig}, model) + if err != nil { + return nil, nil, err + } + + var envVars []corev1.EnvVar + switch model.Spec.Provider { + case v1alpha1.ModelProviderOpenAI: + if model.Spec.APIKeySecretRef != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "OPENAI_API_KEY", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + } + openai := &adk.OpenAI{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + if model.Spec.OpenAI != nil { + openai.BaseUrl = model.Spec.OpenAI.BaseURL + if model.Spec.OpenAI.Organization != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "OPENAI_ORGANIZATION", + Value: model.Spec.OpenAI.Organization, + }) + } + } + return openai, envVars, nil + case v1alpha1.ModelProviderAnthropic: + if model.Spec.APIKeySecretRef != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "ANTHROPIC_API_KEY", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + } + anthropic := &adk.Anthropic{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + if model.Spec.Anthropic != nil { + anthropic.BaseUrl = model.Spec.Anthropic.BaseURL + } + return anthropic, envVars, nil + case v1alpha1.ModelProviderAzureOpenAI: + if model.Spec.AzureOpenAI == nil { + return nil, nil, fmt.Errorf("AzureOpenAI model config is required") + } + envVars = append(envVars, corev1.EnvVar{ + Name: "AZURE_API_KEY", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + if model.Spec.AzureOpenAI.AzureADToken != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "AZURE_AD_TOKEN", + Value: model.Spec.AzureOpenAI.AzureADToken, + }) + } + if model.Spec.AzureOpenAI.APIVersion != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "AZURE_API_VERSION", + Value: model.Spec.AzureOpenAI.APIVersion, + }) + } + if model.Spec.AzureOpenAI.Endpoint != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "AZURE_API_BASE", + Value: model.Spec.AzureOpenAI.Endpoint, + }) + } + azureOpenAI := &adk.AzureOpenAI{ + BaseModel: adk.BaseModel{ + Model: model.Spec.AzureOpenAI.DeploymentName, + }, + } + return azureOpenAI, envVars, nil + case v1alpha1.ModelProviderGeminiVertexAI: + if model.Spec.GeminiVertexAI == nil { + return nil, nil, fmt.Errorf("GeminiVertexAI model config is required") + } + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_CLOUD_PROJECT", + Value: model.Spec.GeminiVertexAI.ProjectID, + }) + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_CLOUD_LOCATION", + Value: model.Spec.GeminiVertexAI.Location, + }) + if model.Spec.APIKeySecretRef != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_APPLICATION_CREDENTIALS", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + } + gemini := &adk.GeminiVertexAI{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + return gemini, envVars, nil + case v1alpha1.ModelProviderAnthropicVertexAI: + if model.Spec.AnthropicVertexAI == nil { + return nil, nil, fmt.Errorf("AnthropicVertexAI model config is required") + } + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_CLOUD_PROJECT", + Value: model.Spec.AnthropicVertexAI.ProjectID, + }) + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_CLOUD_LOCATION", + Value: model.Spec.AnthropicVertexAI.Location, + }) + if model.Spec.APIKeySecretRef != "" { + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_APPLICATION_CREDENTIALS", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + } + anthropic := &adk.GeminiAnthropic{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + return anthropic, envVars, nil + case v1alpha1.ModelProviderOllama: + if model.Spec.Ollama == nil { + return nil, nil, fmt.Errorf("Ollama model config is required") + } + envVars = append(envVars, corev1.EnvVar{ + Name: "OLLAMA_API_BASE", + Value: model.Spec.Ollama.Host, + }) + ollama := &adk.Ollama{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + return ollama, envVars, nil + case v1alpha1.ModelProviderGemini: + envVars = append(envVars, corev1.EnvVar{ + Name: "GOOGLE_API_KEY", + ValueFrom: &corev1.EnvVarSource{ + SecretKeyRef: &corev1.SecretKeySelector{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: model.Spec.APIKeySecretRef, + }, + Key: model.Spec.APIKeySecretKey, + }, + }, + }) + gemini := &adk.Gemini{ + BaseModel: adk.BaseModel{ + Model: model.Spec.Model, + }, + } + return gemini, envVars, nil + } + return nil, nil, fmt.Errorf("unknown model provider: %s", model.Spec.Provider) +} + +func (a *adkApiTranslator) translateStreamableHttpTool(ctx context.Context, tool *v1alpha1.StreamableHttpServerConfig, namespace string) (*adk.StreamableHTTPConnectionParams, error) { + headers := make(map[string]string) + for _, header := range tool.HeadersFrom { + if header.Value != "" { + headers[header.Name] = header.Value + } else if header.ValueFrom != nil { + value, err := resolveValueSource(ctx, a.kube, header.ValueFrom, namespace) + if err != nil { + return nil, err + } + headers[header.Name] = value + } + } + + params := &adk.StreamableHTTPConnectionParams{ + Url: tool.URL, + Headers: headers, + } + if tool.Timeout != nil { + params.Timeout = ptr.To(tool.Timeout.Seconds()) + } + if tool.SseReadTimeout != nil { + params.SseReadTimeout = ptr.To(tool.SseReadTimeout.Seconds()) + } + if tool.TerminateOnClose != nil { + params.TerminateOnClose = tool.TerminateOnClose + } + return params, nil +} + +func (a *adkApiTranslator) translateSseHttpTool(ctx context.Context, tool *v1alpha1.SseMcpServerConfig, namespace string) (*adk.SseConnectionParams, error) { + headers := make(map[string]string) + for _, header := range tool.HeadersFrom { + if header.Value != "" { + headers[header.Name] = header.Value + } else if header.ValueFrom != nil { + value, err := resolveValueSource(ctx, a.kube, header.ValueFrom, namespace) + if err != nil { + return nil, err + } + headers[header.Name] = value + } + } + params := &adk.SseConnectionParams{ + Url: tool.URL, + Headers: headers, + } + if tool.Timeout != nil { + params.Timeout = ptr.To(tool.Timeout.Seconds()) + } + if tool.SseReadTimeout != nil { + params.SseReadTimeout = ptr.To(tool.SseReadTimeout.Seconds()) + } + return params, nil +} + +func (a *adkApiTranslator) translateToolServerTool(ctx context.Context, agent *adk.AgentConfig, toolServerRef string, toolNames []string, defaultNamespace string) error { + toolServerObj := &v1alpha1.ToolServer{} + err := common.GetObject( + ctx, + a.kube, + toolServerObj, + toolServerRef, + defaultNamespace, + ) + if err != nil { + return err + } + + switch { + case toolServerObj.Spec.Config.Sse != nil: + tool, err := a.translateSseHttpTool(ctx, toolServerObj.Spec.Config.Sse, defaultNamespace) + if err != nil { + return err + } + agent.SseTools = append(agent.SseTools, adk.SseMcpServerConfig{ + Params: *tool, + Tools: toolNames, + }) + case toolServerObj.Spec.Config.StreamableHttp != nil: + tool, err := a.translateStreamableHttpTool(ctx, toolServerObj.Spec.Config.StreamableHttp, defaultNamespace) + if err != nil { + return err + } + agent.HttpTools = append(agent.HttpTools, adk.HttpMcpServerConfig{ + Params: *tool, + Tools: toolNames, + }) + case toolServerObj.Spec.Config.Stdio != nil: + return fmt.Errorf("stdio tool server is deprecated") + default: + return fmt.Errorf("unknown tool server type: %s", toolServerObj.Spec.Config.Type) + } + return nil +} + +func (a *adkApiTranslator) TranslateToolServer(ctx context.Context, toolServer *v1alpha1.ToolServer) (*database.ToolServer, error) { + return &database.ToolServer{ + Name: common.GetObjectRef(toolServer), + Description: toolServer.Spec.Description, + Config: toolServer.Spec.Config, + }, nil +} + +// resolveValueSource resolves a value from a ValueSource +func resolveValueSource(ctx context.Context, kube client.Client, source *v1alpha1.ValueSource, namespace string) (string, error) { + if source == nil { + return "", fmt.Errorf("source cannot be nil") + } + + switch source.Type { + case v1alpha1.ConfigMapValueSource: + return getConfigMapValue(ctx, kube, source, namespace) + case v1alpha1.SecretValueSource: + return getSecretValue(ctx, kube, source, namespace) + default: + return "", fmt.Errorf("unknown value source type: %s", source.Type) + } +} + +// getConfigMapValue fetches a value from a ConfigMap +func getConfigMapValue(ctx context.Context, kube client.Client, source *v1alpha1.ValueSource, namespace string) (string, error) { + if source == nil { + return "", fmt.Errorf("source cannot be nil") + } + + configMap := &corev1.ConfigMap{} + err := common.GetObject( + ctx, + kube, + configMap, + source.ValueRef, + namespace, + ) + if err != nil { + return "", fmt.Errorf("failed to find ConfigMap for %s: %v", source.ValueRef, err) + } + + value, exists := configMap.Data[source.Key] + if !exists { + return "", fmt.Errorf("key %s not found in ConfigMap %s/%s", source.Key, configMap.Namespace, configMap.Name) + } + return value, nil +} + +// getSecretValue fetches a value from a Secret +func getSecretValue(ctx context.Context, kube client.Client, source *v1alpha1.ValueSource, namespace string) (string, error) { + if source == nil { + return "", fmt.Errorf("source cannot be nil") + } + + secret := &corev1.Secret{} + err := common.GetObject( + ctx, + kube, + secret, + source.ValueRef, + namespace, + ) + if err != nil { + return "", fmt.Errorf("failed to find Secret for %s: %v", source.ValueRef, err) + } + + value, exists := secret.Data[source.Key] + if !exists { + return "", fmt.Errorf("key %s not found in Secret %s/%s", source.Key, secret.Namespace, secret.Name) + } + return string(value), nil +} diff --git a/go/controller/translator/autogen_translator_golden_test.go b/go/controller/translator/adk_translator_golden_test.go similarity index 90% rename from go/controller/translator/autogen_translator_golden_test.go rename to go/controller/translator/adk_translator_golden_test.go index 068d1e418..c84b899aa 100644 --- a/go/controller/translator/autogen_translator_golden_test.go +++ b/go/controller/translator/adk_translator_golden_test.go @@ -30,8 +30,8 @@ type TestInput struct { Namespace string `yaml:"namespace"` } -// TestGoldenAutogenTranslator runs golden tests for the autogen API translator -func TestGoldenAutogenTranslator(t *testing.T) { +// TestGoldenAdkTranslator runs golden tests for the ADK API translator +func TestGoldenAdkTranslator(t *testing.T) { // Skip if running in CI without update flag updateGolden := os.Getenv("UPDATE_GOLDEN") == "true" @@ -73,7 +73,6 @@ func runGoldenTest(t *testing.T, inputFile, outputsDir, testName string, updateG // Set up fake Kubernetes client scheme := scheme.Scheme err = v1alpha1.AddToScheme(scheme) - require.NoError(t, err) // Convert map objects to unstructured and then to typed objects clientBuilder := fake.NewClientBuilder().WithScheme(scheme) @@ -108,8 +107,6 @@ func runGoldenTest(t *testing.T, inputFile, outputsDir, testName string, updateG } } - translator := translator.NewAutogenApiTranslator(kubeClient, defaultModel) - // Execute the specified operation var result interface{} switch testInput.Operation { @@ -121,18 +118,7 @@ func runGoldenTest(t *testing.T, inputFile, outputsDir, testName string, updateG }, agent) require.NoError(t, err) - result, err = translator.TranslateGroupChatForAgent(ctx, agent) - require.NoError(t, err) - - case "translateTeam": - team := &v1alpha1.Team{} - err := kubeClient.Get(ctx, types.NamespacedName{ - Name: testInput.TargetObject, - Namespace: testInput.Namespace, - }, team) - require.NoError(t, err) - - result, err = translator.TranslateGroupChatForTeam(ctx, team) + result, err = translator.NewAdkApiTranslator(kubeClient, defaultModel).TranslateAgent(ctx, agent) require.NoError(t, err) case "translateToolServer": @@ -143,7 +129,7 @@ func runGoldenTest(t *testing.T, inputFile, outputsDir, testName string, updateG }, toolServer) require.NoError(t, err) - result, err = translator.TranslateToolServer(ctx, toolServer) + result, err = translator.NewAdkApiTranslator(kubeClient, defaultModel).TranslateToolServer(ctx, toolServer) require.NoError(t, err) default: diff --git a/go/controller/translator/autogen_api_translator.go b/go/controller/translator/autogen_api_translator.go deleted file mode 100644 index 204e08f92..000000000 --- a/go/controller/translator/autogen_api_translator.go +++ /dev/null @@ -1,1241 +0,0 @@ -package translator - -import ( - "context" - "encoding/json" - "fmt" - "slices" - "strconv" - - "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - "github.com/kagent-dev/kagent/go/internal/autogen/api" - "github.com/kagent-dev/kagent/go/internal/database" - common "github.com/kagent-dev/kagent/go/internal/utils" - corev1 "k8s.io/api/core/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/types" - "k8s.io/utils/ptr" - "sigs.k8s.io/controller-runtime/pkg/client" - ctrllog "sigs.k8s.io/controller-runtime/pkg/log" -) - -var ( - log = ctrllog.Log.WithName("autogen") -) - -type ApiTranslator interface { - TranslateGroupChatForTeam( - ctx context.Context, - team *v1alpha1.Team, - ) (*database.Agent, error) - - TranslateGroupChatForAgent( - ctx context.Context, - agent *v1alpha1.Agent, - ) (*database.Agent, error) - - TranslateToolServer(ctx context.Context, toolServer *v1alpha1.ToolServer) (*database.ToolServer, error) -} - -type apiTranslator struct { - kube client.Client - defaultModelConfig types.NamespacedName -} - -func (a *apiTranslator) TranslateToolServer(ctx context.Context, toolServer *v1alpha1.ToolServer) (*database.ToolServer, error) { - // provder = "kagent.tool_servers.StdioMcpToolServer" || "kagent.tool_servers.SseMcpToolServer" - provider, toolServerConfig, err := a.translateToolServerConfig(ctx, toolServer.Spec.Config, toolServer.Namespace) - if err != nil { - return nil, err - } - - return &database.ToolServer{ - Name: common.GetObjectRef(toolServer), - Component: api.Component{ - Provider: provider, - ComponentType: "tool_server", - Version: 1, - Description: toolServer.Spec.Description, - Label: common.GetObjectRef(toolServer), - Config: api.MustToConfig(toolServerConfig), - }, - }, nil -} - -// resolveValueSource resolves a value from a ValueSource -func (a *apiTranslator) resolveValueSource(ctx context.Context, source *v1alpha1.ValueSource, namespace string) (string, error) { - if source == nil { - return "", fmt.Errorf("source cannot be nil") - } - - switch source.Type { - case v1alpha1.ConfigMapValueSource: - return a.getConfigMapValue(ctx, source, namespace) - case v1alpha1.SecretValueSource: - return a.getSecretValue(ctx, source, namespace) - default: - return "", fmt.Errorf("unknown value source type: %s", source.Type) - } -} - -// getConfigMapValue fetches a value from a ConfigMap -func (a *apiTranslator) getConfigMapValue(ctx context.Context, source *v1alpha1.ValueSource, namespace string) (string, error) { - if source == nil { - return "", fmt.Errorf("source cannot be nil") - } - - configMap := &corev1.ConfigMap{} - err := common.GetObject( - ctx, - a.kube, - configMap, - source.ValueRef, - namespace, - ) - if err != nil { - return "", fmt.Errorf("failed to find ConfigMap for %s: %v", source.ValueRef, err) - } - - value, exists := configMap.Data[source.Key] - if !exists { - return "", fmt.Errorf("key %s not found in ConfigMap %s/%s", source.Key, configMap.Namespace, configMap.Name) - } - return value, nil -} - -// getSecretValue fetches a value from a Secret -func (a *apiTranslator) getSecretValue(ctx context.Context, source *v1alpha1.ValueSource, namespace string) (string, error) { - if source == nil { - return "", fmt.Errorf("source cannot be nil") - } - - secret := &corev1.Secret{} - err := common.GetObject( - ctx, - a.kube, - secret, - source.ValueRef, - namespace, - ) - if err != nil { - return "", fmt.Errorf("failed to find Secret for %s: %v", source.ValueRef, err) - } - - value, exists := secret.Data[source.Key] - if !exists { - return "", fmt.Errorf("key %s not found in Secret %s/%s", source.Key, secret.Namespace, secret.Name) - } - return string(value), nil -} - -func (a *apiTranslator) translateToolServerConfig(ctx context.Context, config v1alpha1.ToolServerConfig, namespace string) (string, api.ComponentConfig, error) { - switch { - case config.Stdio != nil: - env := make(map[string]string) - - if config.Stdio.Env != nil { - for k, v := range config.Stdio.Env { - env[k] = v - } - } - - if len(config.Stdio.EnvFrom) > 0 { - for _, envVar := range config.Stdio.EnvFrom { - if envVar.ValueFrom != nil { - value, err := a.resolveValueSource(ctx, envVar.ValueFrom, namespace) - - if err != nil { - return "", nil, fmt.Errorf("failed to resolve environment variable %s: %v", envVar.Name, err) - } - - env[envVar.Name] = value - } else if envVar.Value != "" { - env[envVar.Name] = envVar.Value - } - } - } - - return "kagent.tool_servers.StdioMcpToolServer", &api.StdioMcpServerConfig{ - Command: config.Stdio.Command, - Args: config.Stdio.Args, - Env: env, - ReadTimeoutSeconds: 30, - }, nil - case config.Sse != nil: - headers, err := convertMapFromAnytype(config.Sse.Headers) - if err != nil { - return "", nil, err - } - - if len(config.Sse.HeadersFrom) > 0 { - for _, header := range config.Sse.HeadersFrom { - if header.ValueFrom != nil { - value, err := a.resolveValueSource(ctx, header.ValueFrom, namespace) - - if err != nil { - return "", nil, fmt.Errorf("failed to resolve header %s: %v", header.Name, err) - } - - headers[header.Name] = value - } else if header.Value != "" { - headers[header.Name] = header.Value - } - } - } - - var timeout *float64 - if config.Sse.Timeout != nil { - timeout = ptr.To(config.Sse.Timeout.Duration.Seconds()) - } - - var sseReadTimeout *float64 - if config.Sse.SseReadTimeout != nil { - sseReadTimeout = ptr.To(config.Sse.SseReadTimeout.Duration.Seconds()) - } - - return "kagent.tool_servers.SseMcpToolServer", &api.SseMcpServerConfig{ - URL: config.Sse.URL, - Headers: headers, - Timeout: timeout, - SseReadTimeout: sseReadTimeout, - }, nil - case config.StreamableHttp != nil: - - headers, err := convertMapFromAnytype(config.StreamableHttp.Headers) - if err != nil { - return "", nil, err - } - - if len(config.StreamableHttp.HeadersFrom) > 0 { - for _, header := range config.StreamableHttp.HeadersFrom { - if header.ValueFrom != nil { - value, err := a.resolveValueSource(ctx, header.ValueFrom, namespace) - - if err != nil { - return "", nil, fmt.Errorf("failed to resolve header %s: %v", header.Name, err) - } - - headers[header.Name] = value - } else if header.Value != "" { - headers[header.Name] = header.Value - } - } - } - - var timeout *float64 - if config.StreamableHttp.Timeout != nil { - timeout = ptr.To(config.StreamableHttp.Timeout.Duration.Seconds()) - } - var sseReadTimeout *float64 - if config.StreamableHttp.SseReadTimeout != nil { - sseReadTimeout = ptr.To(config.StreamableHttp.SseReadTimeout.Duration.Seconds()) - } - - return "kagent.tool_servers.StreamableHttpMcpToolServer", &api.StreamableHttpServerConfig{ - URL: config.StreamableHttp.URL, - Headers: headers, - Timeout: timeout, - SseReadTimeout: sseReadTimeout, - TerminateOnClose: config.StreamableHttp.TerminateOnClose, - }, nil - } - - return "", nil, fmt.Errorf("unsupported tool server config") -} - -func NewAutogenApiTranslator( - kube client.Client, - defaultModelConfig types.NamespacedName, -) ApiTranslator { - return &apiTranslator{ - kube: kube, - defaultModelConfig: defaultModelConfig, - } -} - -func (a *apiTranslator) TranslateGroupChatForAgent(ctx context.Context, agent *v1alpha1.Agent) (*database.Agent, error) { - stream := true - if agent.Spec.Stream != nil { - stream = *agent.Spec.Stream - } - opts := defaultTeamOptions() - opts.stream = stream - - return a.translateGroupChatForAgent(ctx, agent, opts, &tState{}) -} - -func (a *apiTranslator) TranslateGroupChatForTeam( - ctx context.Context, - team *v1alpha1.Team, -) (*database.Agent, error) { - return a.translateGroupChatForTeam(ctx, team, defaultTeamOptions(), &tState{}) -} - -type teamOptions struct { - stream bool -} - -const MAX_DEPTH = 10 - -type tState struct { - // used to prevent infinite loops - // The recursion limit is 10 - depth uint8 - // used to enforce DAG - // The final member of the list will be the "parent" agent - visitedAgents []string -} - -func (s *tState) with(agent *v1alpha1.Agent) *tState { - s.depth++ - s.visitedAgents = append(s.visitedAgents, agent.Name) - return s -} - -func (t *tState) isVisited(agentName string) bool { - return slices.Contains(t.visitedAgents, agentName) -} - -func defaultTeamOptions() *teamOptions { - return &teamOptions{ - stream: true, - } -} - -func (a *apiTranslator) translateGroupChatForAgent( - ctx context.Context, - agent *v1alpha1.Agent, - opts *teamOptions, - state *tState, -) (*database.Agent, error) { - simpleTeam, err := a.simpleRoundRobinTeam(ctx, agent) - if err != nil { - return nil, err - } - - return a.translateGroupChatForTeam(ctx, simpleTeam, opts, state) -} - -func (a *apiTranslator) translateGroupChatForTeam( - ctx context.Context, - team *v1alpha1.Team, - opts *teamOptions, - state *tState, -) (*database.Agent, error) { - // get model config - roundRobinTeamConfig := team.Spec.RoundRobinTeamConfig - - modelConfigObj, err := common.GetModelConfig( - ctx, - a.kube, - team, - a.defaultModelConfig, - ) - if err != nil { - return nil, err - } - - modelClientWithStreaming, err := a.createModelClientForProvider(ctx, modelConfigObj, true) - if err != nil { - return nil, err - } - - modelClientWithoutStreaming, err := a.createModelClientForProvider(ctx, modelConfigObj, false) - if err != nil { - return nil, err - } - - modelContext := &api.Component{ - Provider: "autogen_core.model_context.UnboundedChatCompletionContext", - ComponentType: "chat_completion_context", - Version: 1, - Description: "An unbounded chat completion context that keeps a view of the all the messages.", - Label: "UnboundedChatCompletionContext", - Config: map[string]interface{}{}, - } - - var participants []*api.Component - - for _, agentRef := range team.Spec.Participants { - agentObj := &v1alpha1.Agent{} - if err := common.GetObject( - ctx, - a.kube, - agentObj, - agentRef, - team.Namespace, - ); err != nil { - return nil, err - } - - participant, err := a.translateAssistantAgent( - ctx, - agentObj, - modelConfigObj, - modelClientWithStreaming, - modelClientWithoutStreaming, - modelContext, - opts, - state, - ) - if err != nil { - return nil, err - } - - participants = append(participants, participant) - } - - terminationCondition, err := translateTerminationCondition(team.Spec.TerminationCondition) - if err != nil { - return nil, err - } - - commonTeamConfig := api.CommonTeamConfig{ - Participants: participants, - Termination: terminationCondition, - } - - var teamConfig *api.Component - if roundRobinTeamConfig != nil { - teamConfig = &api.Component{ - Provider: "autogen_agentchat.teams.RoundRobinGroupChat", - ComponentType: "team", - Version: 1, - Description: team.Spec.Description, - Config: api.MustToConfig(&api.RoundRobinGroupChatConfig{ - CommonTeamConfig: commonTeamConfig, - }), - } - - } else { - return nil, fmt.Errorf("no team config specified") - } - - teamConfig.Label = common.GetObjectRef(team) - - return &database.Agent{ - Name: common.GetObjectRef(team), - Component: *teamConfig, - }, nil -} - -func (a *apiTranslator) simpleRoundRobinTeam(ctx context.Context, agent *v1alpha1.Agent) (*v1alpha1.Team, error) { - modelConfigObj, err := common.GetModelConfig( - ctx, - a.kube, - agent, - a.defaultModelConfig, - ) - if err != nil { - return nil, err - } - modelConfigRef := common.GetObjectRef(modelConfigObj) - - // generate an internal round robin "team" for the society of mind agent - meta := agent.ObjectMeta.DeepCopy() - meta.Name = agent.GetName() - meta.Namespace = agent.GetNamespace() - agentRef := common.GetObjectRef(agent) - - team := &v1alpha1.Team{ - ObjectMeta: *meta, - TypeMeta: metav1.TypeMeta{ - Kind: "Team", - APIVersion: "kagent.dev/v1alpha1", - }, - Spec: v1alpha1.TeamSpec{ - Participants: []string{agentRef}, - Description: agent.Spec.Description, - ModelConfig: modelConfigRef, - RoundRobinTeamConfig: &v1alpha1.RoundRobinTeamConfig{}, - TerminationCondition: v1alpha1.TerminationCondition{ - FinalTextMessageTermination: &v1alpha1.FinalTextMessageTermination{ - Source: common.ConvertToPythonIdentifier(agentRef), - }, - }, - }, - } - - return team, nil -} - -func (a *apiTranslator) translateAssistantAgent( - ctx context.Context, - agent *v1alpha1.Agent, - modelConfig *v1alpha1.ModelConfig, - modelClientWithStreaming *api.Component, - modelClientWithoutStreaming *api.Component, - modelContext *api.Component, - opts *teamOptions, - state *tState, -) (*api.Component, error) { - - tools := []*api.Component{} - for _, tool := range agent.Spec.Tools { - // Skip tools that are not applicable to the model provider - switch { - case tool.McpServer != nil: - for _, toolName := range tool.McpServer.ToolNames { - autogenTool, err := translateToolServerTool( - ctx, - a.kube, - tool.McpServer.ToolServer, - toolName, - agent.Namespace, - ) - if err != nil { - return nil, err - } - tools = append(tools, autogenTool) - } - case tool.Agent != nil: - toolNamespacedName, err := common.ParseRefString(tool.Agent.Ref, agent.Namespace) - if err != nil { - return nil, err - } - - toolRef := toolNamespacedName.String() - agentRef := common.GetObjectRef(agent) - - if toolRef == agentRef { - return nil, fmt.Errorf("agent tool cannot be used to reference itself, %s", agentRef) - } - - if state.isVisited(toolRef) { - return nil, fmt.Errorf("cycle detected in agent tool chain: %s -> %s", agentRef, toolRef) - } - - if state.depth > MAX_DEPTH { - return nil, fmt.Errorf("recursion limit reached in agent tool chain: %s -> %s", agentRef, toolRef) - } - - // Translate a nested tool - toolAgent := &v1alpha1.Agent{} - - err = common.GetObject( - ctx, - a.kube, - toolAgent, - toolRef, - agent.Namespace, // redundant - ) - if err != nil { - return nil, err - } - - team, err := a.simpleRoundRobinTeam(ctx, toolAgent) - if err != nil { - return nil, err - } - autogenTool, err := a.translateGroupChatForTeam(ctx, team, &teamOptions{}, state.with(agent)) - if err != nil { - return nil, err - } - - toolAgentRef := common.GetObjectRef(toolAgent) - tool := &api.Component{ - Provider: "autogen_agentchat.tools.TeamTool", - ComponentType: "tool", - Version: 1, - Config: api.MustToConfig(&api.TeamToolConfig{ - Name: common.ConvertToPythonIdentifier(toolAgentRef), - Description: toolAgent.Spec.Description, - Team: &autogenTool.Component, - }), - } - - tools = append(tools, tool) - - default: - return nil, fmt.Errorf("tool must have a provider or tool server") - } - } - - sysMsg := agent.Spec.SystemMessage - - agentRef := common.GetObjectRef(agent) - - cfg := &api.AssistantAgentConfig{ - Name: common.ConvertToPythonIdentifier(agentRef), - Tools: tools, - ModelContext: modelContext, - Description: agent.Spec.Description, - // TODO(ilackarms): convert to non-ptr with omitempty? - SystemMessage: sysMsg, - ReflectOnToolUse: false, - ToolCallSummaryFormat: "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - } - - if opts.stream { - cfg.ModelClient = modelClientWithStreaming - cfg.ModelClientStream = true - } else { - cfg.ModelClient = modelClientWithoutStreaming - cfg.ModelClientStream = false - } - - if agent.Spec.Memory != nil { - for _, memoryRef := range agent.Spec.Memory { - autogenMemory, err := a.translateMemory(ctx, memoryRef, agent.Namespace) - if err != nil { - return nil, err - } - - cfg.Memory = append(cfg.Memory, autogenMemory) - } - } - - return &api.Component{ - Provider: "autogen_agentchat.agents.AssistantAgent", - ComponentType: "agent", - Version: 1, - Description: agent.Spec.Description, - Config: api.MustToConfig(cfg), - }, nil -} - -func (a *apiTranslator) translateMemory(ctx context.Context, memoryRef string, defaultNamespace string) (*api.Component, error) { - memoryObj := &v1alpha1.Memory{} - if err := common.GetObject(ctx, a.kube, memoryObj, memoryRef, defaultNamespace); err != nil { - return nil, err - } - - switch memoryObj.Spec.Provider { - case v1alpha1.Pinecone: - apiKey, err := a.getMemoryApiKey(ctx, memoryObj) - if err != nil { - return nil, err - } - - threshold, err := strconv.ParseFloat(memoryObj.Spec.Pinecone.ScoreThreshold, 32) - if err != nil { - return nil, fmt.Errorf("failed to parse score threshold: %v", err) - } - - return &api.Component{ - Provider: "kagent.memory.PineconeMemory", - ComponentType: "memory", - Version: 1, - Config: api.MustToConfig(&api.PineconeMemoryConfig{ - APIKey: string(apiKey), - IndexHost: memoryObj.Spec.Pinecone.IndexHost, - TopK: memoryObj.Spec.Pinecone.TopK, - Namespace: memoryObj.Spec.Pinecone.Namespace, - RecordFields: memoryObj.Spec.Pinecone.RecordFields, - ScoreThreshold: threshold, - }), - }, nil - } - - return nil, fmt.Errorf("unsupported memory provider: %s", memoryObj.Spec.Provider) -} - -func translateToolServerTool( - ctx context.Context, - kube client.Client, - toolServerRef string, - toolName string, - defaultNamespace string, -) (*api.Component, error) { - toolServerObj := &v1alpha1.ToolServer{} - err := common.GetObject( - ctx, - kube, - toolServerObj, - toolServerRef, - defaultNamespace, - ) - if err != nil { - return nil, err - } - - // requires the tool to have been discovered - for _, discoveredTool := range toolServerObj.Status.DiscoveredTools { - if discoveredTool.Name == toolName { - return convertComponent(discoveredTool.Component) - } - } - - return nil, fmt.Errorf("tool %v not found in discovered tools in ToolServer %v", toolName, toolServerObj.Namespace+"/"+toolServerObj.Name) -} - -func convertComponent(component v1alpha1.Component) (*api.Component, error) { - config, err := convertMapFromAnytype(component.Config) - if err != nil { - return nil, err - } - return &api.Component{ - Provider: component.Provider, - ComponentType: component.ComponentType, - Version: component.Version, - ComponentVersion: component.ComponentVersion, - Description: component.Description, - Label: component.Label, - Config: config, - }, nil -} - -func convertMapFromAnytype(config map[string]v1alpha1.AnyType) (map[string]interface{}, error) { - // convert to map[string]interface{} to allow kubebuilder schemaless validation - // see https://github.com/kubernetes-sigs/controller-tools/issues/636 for more info - // must unmarshal to interface{} to avoid json.RawMessage - convertedConfig := make(map[string]interface{}) - - if config == nil { - return convertedConfig, nil - } - - raw, err := json.Marshal(config) - if err != nil { - return nil, err - } - - err = json.Unmarshal(raw, &convertedConfig) - if err != nil { - return nil, err - } - - return convertedConfig, nil -} - -func translateTerminationCondition(terminationCondition v1alpha1.TerminationCondition) (*api.Component, error) { - // ensure only one termination condition is set - var conditionsSet int - if terminationCondition.MaxMessageTermination != nil { - conditionsSet++ - } - if terminationCondition.TextMentionTermination != nil { - conditionsSet++ - } - if terminationCondition.OrTermination != nil { - conditionsSet++ - } - if terminationCondition.StopMessageTermination != nil { - conditionsSet++ - } - if terminationCondition.TextMessageTermination != nil { - conditionsSet++ - } - if terminationCondition.FinalTextMessageTermination != nil { - conditionsSet++ - } - if conditionsSet != 1 { - return nil, fmt.Errorf("exactly one termination condition must be set, got %d", conditionsSet) - } - - switch { - case terminationCondition.MaxMessageTermination != nil: - return &api.Component{ - Provider: "autogen_agentchat.conditions.MaxMessageTermination", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.MaxMessageTerminationConfig{ - MaxMessages: terminationCondition.MaxMessageTermination.MaxMessages, - }), - }, nil - case terminationCondition.TextMentionTermination != nil: - return &api.Component{ - Provider: "autogen_agentchat.conditions.TextMentionTermination", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.TextMentionTerminationConfig{ - Text: terminationCondition.TextMentionTermination.Text, - }), - }, nil - case terminationCondition.TextMessageTermination != nil: - return &api.Component{ - Provider: "autogen_agentchat.conditions.TextMessageTermination", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.TextMessageTerminationConfig{ - Source: terminationCondition.TextMessageTermination.Source, - }), - }, nil - case terminationCondition.FinalTextMessageTermination != nil: - return &api.Component{ - Provider: "kagent.conditions.FinalTextMessageTermination", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.FinalTextMessageTerminationConfig{ - Source: terminationCondition.FinalTextMessageTermination.Source, - }), - }, nil - case terminationCondition.OrTermination != nil: - var conditions []*api.Component - for _, c := range terminationCondition.OrTermination.Conditions { - subConditon := v1alpha1.TerminationCondition{ - MaxMessageTermination: c.MaxMessageTermination, - TextMentionTermination: c.TextMentionTermination, - } - - condition, err := translateTerminationCondition(subConditon) - if err != nil { - return nil, err - } - conditions = append(conditions, condition) - } - return &api.Component{ - Provider: "autogen_agentchat.conditions.OrTerminationCondition", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.OrTerminationConfig{ - Conditions: conditions, - }), - }, nil - case terminationCondition.StopMessageTermination != nil: - return &api.Component{ - Provider: "autogen_agentchat.conditions.StopMessageTermination", - ComponentType: "termination", - Version: 1, - //ComponentVersion: 1, - Config: api.MustToConfig(&api.StopMessageTerminationConfig{}), - Label: "StopMessageTermination", - }, nil - } - - return nil, fmt.Errorf("unsupported termination condition") -} - -func addModelClientToConfig( - modelClient *api.Component, - toolConfig *map[string]interface{}, -) error { - if *toolConfig == nil { - *toolConfig = make(map[string]interface{}) - } - - cfg, err := modelClient.ToConfig() - if err != nil { - return err - } - - (*toolConfig)["model_client"] = cfg - return nil -} - -func addOpenaiApiKeyToConfig( - apiKey []byte, - toolConfig *map[string]interface{}, -) error { - if *toolConfig == nil { - *toolConfig = make(map[string]interface{}) - } - - (*toolConfig)["openai_api_key"] = string(apiKey) - return nil -} - -// createModelClientForProvider creates a model client component based on the model provider -func (a *apiTranslator) createModelClientForProvider(ctx context.Context, modelConfig *v1alpha1.ModelConfig, stream bool) (*api.Component, error) { - - switch modelConfig.Spec.Provider { - case v1alpha1.Anthropic: - apiKey, err := a.getModelConfigApiKey(ctx, modelConfig) - if err != nil { - return nil, err - } - - config := &api.AnthropicClientConfiguration{ - BaseAnthropicClientConfiguration: api.BaseAnthropicClientConfiguration{ - APIKey: string(apiKey), - Model: modelConfig.Spec.Model, - ModelInfo: translateModelInfo(modelConfig.Spec.ModelInfo), - }, - } - - // Add provider-specific configurations - if modelConfig.Spec.Anthropic != nil { - anthropicConfig := modelConfig.Spec.Anthropic - - config.BaseURL = anthropicConfig.BaseURL - if anthropicConfig.MaxTokens > 0 { - config.MaxTokens = anthropicConfig.MaxTokens - } - - if anthropicConfig.Temperature != "" { - temp, err := strconv.ParseFloat(anthropicConfig.Temperature, 64) - if err == nil { - config.Temperature = temp - } - } - - if anthropicConfig.TopP != "" { - topP, err := strconv.ParseFloat(anthropicConfig.TopP, 64) - if err == nil { - config.TopP = topP - } - } - - config.TopK = anthropicConfig.TopK - } - - // Convert to map - configMap, err := config.ToConfig() - if err != nil { - return nil, fmt.Errorf("failed to convert Anthropic config: %w", err) - } - config.DefaultHeaders = modelConfig.Spec.DefaultHeaders - return &api.Component{ - Provider: "autogen_ext.models.anthropic.AnthropicChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: configMap, - }, nil - - case v1alpha1.AzureOpenAI: - apiKey, err := a.getModelConfigApiKey(ctx, modelConfig) - if err != nil { - return nil, err - } - config := &api.AzureOpenAIClientConfig{ - BaseOpenAIClientConfig: api.BaseOpenAIClientConfig{ - Model: modelConfig.Spec.Model, - APIKey: string(apiKey), - ModelInfo: translateModelInfo(modelConfig.Spec.ModelInfo), - }, - } - - if stream { - config.StreamOptions = &api.StreamOptions{ - IncludeUsage: true, - } - } - - // Add provider-specific configurations - if modelConfig.Spec.AzureOpenAI != nil { - azureConfig := modelConfig.Spec.AzureOpenAI - - config.AzureEndpoint = azureConfig.Endpoint - config.APIVersion = azureConfig.APIVersion - config.AzureDeployment = azureConfig.DeploymentName - config.AzureADToken = azureConfig.AzureADToken - - if azureConfig.Temperature != "" { - temp, err := strconv.ParseFloat(azureConfig.Temperature, 64) - if err == nil { - config.Temperature = temp - } - } - - if azureConfig.TopP != "" { - topP, err := strconv.ParseFloat(azureConfig.TopP, 64) - if err == nil { - config.TopP = topP - } - } - } - config.DefaultHeaders = modelConfig.Spec.DefaultHeaders - return &api.Component{ - Provider: "autogen_ext.models.openai.AzureOpenAIChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: api.MustToConfig(config), - }, nil - - case v1alpha1.OpenAI: - apiKey, err := a.getModelConfigApiKey(ctx, modelConfig) - if err != nil { - return nil, err - } - config := &api.OpenAIClientConfig{ - BaseOpenAIClientConfig: api.BaseOpenAIClientConfig{ - Model: modelConfig.Spec.Model, - APIKey: string(apiKey), - ModelInfo: translateModelInfo(modelConfig.Spec.ModelInfo), - }, - } - - if stream { - config.StreamOptions = &api.StreamOptions{ - IncludeUsage: true, - } - } - - // Add provider-specific configurations - if modelConfig.Spec.OpenAI != nil { - openAIConfig := modelConfig.Spec.OpenAI - - if openAIConfig.BaseURL != "" { - config.BaseURL = &openAIConfig.BaseURL - } - - if openAIConfig.Organization != "" { - config.Organization = &openAIConfig.Organization - } - - if openAIConfig.MaxTokens > 0 { - config.MaxTokens = openAIConfig.MaxTokens - } - - if openAIConfig.Temperature != "" { - temp, err := strconv.ParseFloat(openAIConfig.Temperature, 64) - if err == nil { - config.Temperature = temp - } - } - - if openAIConfig.TopP != "" { - topP, err := strconv.ParseFloat(openAIConfig.TopP, 64) - if err == nil { - config.TopP = topP - } - } - - if openAIConfig.FrequencyPenalty != "" { - freqP, err := strconv.ParseFloat(openAIConfig.FrequencyPenalty, 64) - if err == nil { - config.FrequencyPenalty = freqP - } - } - - if openAIConfig.PresencePenalty != "" { - presP, err := strconv.ParseFloat(openAIConfig.PresencePenalty, 64) - if err == nil { - config.PresencePenalty = presP - } - } - } - - config.DefaultHeaders = modelConfig.Spec.DefaultHeaders - return &api.Component{ - Provider: "autogen_ext.models.openai.OpenAIChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: api.MustToConfig(config), - }, nil - - case v1alpha1.Ollama: - config := &api.OllamaClientConfiguration{ - OllamaCreateArguments: api.OllamaCreateArguments{ - Model: modelConfig.Spec.Model, - Host: modelConfig.Spec.Ollama.Host, - }, - ModelInfo: translateModelInfo(modelConfig.Spec.ModelInfo), - FollowRedirects: true, - } - - if modelConfig.Spec.Ollama != nil { - ollamaConfig := modelConfig.Spec.Ollama - - if ollamaConfig.Options != nil { - config.Options = ollamaConfig.Options - } - } - - config.Headers = modelConfig.Spec.DefaultHeaders - return &api.Component{ - Provider: "autogen_ext.models.ollama.OllamaChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: api.MustToConfig(config), - }, nil - - case v1alpha1.AnthropicVertexAI: - var config *api.AnthropicVertexAIConfig - - creds, err := a.getModelConfigGoogleApplicationCredentials(ctx, modelConfig) - if err != nil { - return nil, err - } - - config = &api.AnthropicVertexAIConfig{ - BaseVertexAIConfig: api.BaseVertexAIConfig{ - Model: modelConfig.Spec.Model, - ProjectID: modelConfig.Spec.AnthropicVertexAI.ProjectID, - Location: modelConfig.Spec.AnthropicVertexAI.Location, - Credentials: creds, - }, - } - - if modelConfig.Spec.AnthropicVertexAI != nil { - anthropicVertexAIConfig := modelConfig.Spec.AnthropicVertexAI - - if anthropicVertexAIConfig.MaxTokens > 0 { - config.MaxTokens = &anthropicVertexAIConfig.MaxTokens - } - - if anthropicVertexAIConfig.Temperature != "" { - temp, err := strconv.ParseFloat(anthropicVertexAIConfig.Temperature, 64) - if err == nil { - config.Temperature = &temp - } - } - - if anthropicVertexAIConfig.TopP != "" { - topP, err := strconv.ParseFloat(anthropicVertexAIConfig.TopP, 64) - if err == nil { - config.TopP = &topP - } - } - - if anthropicVertexAIConfig.TopK != "" { - topK, err := strconv.ParseFloat(anthropicVertexAIConfig.TopK, 64) - if err == nil { - config.TopK = &topK - } - } - - if anthropicVertexAIConfig.StopSequences != nil { - config.StopSequences = &anthropicVertexAIConfig.StopSequences - } - } - - return &api.Component{ - Provider: "kagent.models.vertexai.AnthropicVertexAIChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: api.MustToConfig(config), - }, nil - - case v1alpha1.GeminiVertexAI: - var config *api.GeminiVertexAIConfig - - creds, err := a.getModelConfigGoogleApplicationCredentials(ctx, modelConfig) - if err != nil { - return nil, err - } - - config = &api.GeminiVertexAIConfig{ - BaseVertexAIConfig: api.BaseVertexAIConfig{ - Model: modelConfig.Spec.Model, - ProjectID: modelConfig.Spec.GeminiVertexAI.ProjectID, - Location: modelConfig.Spec.GeminiVertexAI.Location, - Credentials: creds, - }, - } - - if modelConfig.Spec.GeminiVertexAI != nil { - geminiVertexAIConfig := modelConfig.Spec.GeminiVertexAI - - if geminiVertexAIConfig.MaxOutputTokens > 0 { - config.MaxOutputTokens = &geminiVertexAIConfig.MaxOutputTokens - } - - if geminiVertexAIConfig.Temperature != "" { - temp, err := strconv.ParseFloat(geminiVertexAIConfig.Temperature, 64) - if err == nil { - config.Temperature = &temp - } - } - - if geminiVertexAIConfig.TopP != "" { - topP, err := strconv.ParseFloat(geminiVertexAIConfig.TopP, 64) - if err == nil { - config.TopP = &topP - } - } - - if geminiVertexAIConfig.TopK != "" { - topK, err := strconv.ParseFloat(geminiVertexAIConfig.TopK, 64) - if err == nil { - config.TopK = &topK - } - } - - if geminiVertexAIConfig.StopSequences != nil { - config.StopSequences = &geminiVertexAIConfig.StopSequences - } - - if geminiVertexAIConfig.CandidateCount > 0 { - config.CandidateCount = &geminiVertexAIConfig.CandidateCount - } - - if geminiVertexAIConfig.ResponseMimeType != "" { - config.ResponseMimeType = &geminiVertexAIConfig.ResponseMimeType - } - } - - return &api.Component{ - Provider: "kagent.models.vertexai.GeminiVertexAIChatCompletionClient", - ComponentType: "model", - Version: 1, - Config: api.MustToConfig(config), - }, nil - - default: - return nil, fmt.Errorf("unsupported model provider: %s", modelConfig.Spec.Provider) - } -} - -func translateModelInfo(modelInfo *v1alpha1.ModelInfo) *api.ModelInfo { - if modelInfo == nil { - return nil - } - - return &api.ModelInfo{ - Vision: modelInfo.Vision, - FunctionCalling: modelInfo.FunctionCalling, - JSONOutput: modelInfo.JSONOutput, - Family: modelInfo.Family, - StructuredOutput: modelInfo.StructuredOutput, - MultipleSystemMessages: modelInfo.MultipleSystemMessages, - } -} - -func (a *apiTranslator) getSecretKey(ctx context.Context, secretRef string, secretKey string, namespace string) ([]byte, error) { - secret := &corev1.Secret{} - if err := common.GetObject( - ctx, - a.kube, - secret, - secretRef, - namespace, - ); err != nil { - return nil, fmt.Errorf("failed to fetch secret %s/%s: %w", namespace, secretRef, err) - } - - if secret.Data == nil { - return nil, fmt.Errorf("secret data not found in %s/%s", namespace, secretRef) - } - - value, ok := secret.Data[secretKey] - if !ok { - return nil, fmt.Errorf("key %s not found in secret %s/%s", secretKey, namespace, secretRef) - } - - return value, nil -} - -func (a *apiTranslator) getMemoryApiKey(ctx context.Context, memory *v1alpha1.Memory) ([]byte, error) { - return a.getSecretKey(ctx, memory.Spec.APIKeySecretRef, memory.Spec.APIKeySecretKey, memory.Namespace) -} - -func (a *apiTranslator) getModelConfigGoogleApplicationCredentials(ctx context.Context, modelConfig *v1alpha1.ModelConfig) (map[string]interface{}, error) { - googleApplicationCredentialsSecret := &corev1.Secret{} - err := common.GetObject( - ctx, - a.kube, - googleApplicationCredentialsSecret, - modelConfig.Spec.APIKeySecretRef, - modelConfig.Namespace, - ) - if err != nil { - return nil, err - } - - if googleApplicationCredentialsSecret.Data == nil { - return nil, fmt.Errorf("google application credentials secret data not found") - } - - googleApplicationCredentialsBytes, ok := googleApplicationCredentialsSecret.Data[modelConfig.Spec.APIKeySecretKey] - if !ok { - return nil, fmt.Errorf("google application credentials not found") - } - - var credsMap map[string]interface{} - err = json.Unmarshal(googleApplicationCredentialsBytes, &credsMap) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal google application credentials into map: %w", err) - } - - return credsMap, nil -} - -func (a *apiTranslator) getModelConfigApiKey(ctx context.Context, modelConfig *v1alpha1.ModelConfig) ([]byte, error) { - return a.getSecretKey(ctx, modelConfig.Spec.APIKeySecretRef, modelConfig.Spec.APIKeySecretKey, modelConfig.Namespace) -} diff --git a/go/controller/translator/testdata/README.md b/go/controller/translator/testdata/README.md index e29179668..10e0236b6 100644 --- a/go/controller/translator/testdata/README.md +++ b/go/controller/translator/testdata/README.md @@ -1,6 +1,6 @@ -# Autogen API Translator Golden Tests +# API Translator Golden Tests -This directory contains golden tests for the autogen API translator. Golden tests are a type of test where the expected output is stored in files and compared against the actual output. +This directory contains golden tests for the API translator. Golden tests are a type of test where the expected output is stored in files and compared against the actual output. ## Structure @@ -57,17 +57,17 @@ To add a new test case: ### Run all golden tests: ```bash -go test -run TestGoldenAutogenTranslator ./go/controller/internal/autogen/ +go test -run TestGoldenTranslator ./go/controller/translator/ ``` ### Update golden files (regenerate expected outputs): ```bash -UPDATE_GOLDEN=true go test -run TestGoldenAutogenTranslator ./go/controller/internal/autogen/ +UPDATE_GOLDEN=true go test -run TestGoldenTranslator ./go/controller/translator/ ``` ### Run specific test: ```bash -go test -run TestGoldenAutogenTranslator/basic_agent ./go/controller/internal/autogen/ +go test -run TestGoldenTranslator/basic_agent ./go/controller/translator/ ``` ## Test Coverage diff --git a/go/controller/translator/testdata/inputs/agent_with_cross_ns_agent.yaml b/go/controller/translator/testdata/inputs/agent_with_cross_ns_agent.yaml index 0579e44b5..8c5e9bd31 100644 --- a/go/controller/translator/testdata/inputs/agent_with_cross_ns_agent.yaml +++ b/go/controller/translator/testdata/inputs/agent_with_cross_ns_agent.yaml @@ -36,7 +36,7 @@ objects: spec: provider: OpenAI model: gpt-3.5-turbo - apiKeySecretRef: test-provider/openai-secret + apiKeySecretRef: openai-secret apiKeySecretKey: api-key - apiVersion: kagent.dev/v1alpha1 kind: Agent diff --git a/go/controller/translator/testdata/inputs/agent_with_cross_ns_resources.yaml b/go/controller/translator/testdata/inputs/agent_with_cross_ns_resources.yaml deleted file mode 100644 index 7c1f0c054..000000000 --- a/go/controller/translator/testdata/inputs/agent_with_cross_ns_resources.yaml +++ /dev/null @@ -1,55 +0,0 @@ -operation: translateAgent -targetObject: agent-with-cross-ns-resources -namespace: test-agent -objects: - - apiVersion: v1 - kind: Secret - metadata: - name: openai-secret - namespace: test-model - data: - api-key: c2stdGVzdC1hcGkta2V5 # base64 encoded "sk-test-api-key" - - apiVersion: v1 - kind: Secret - metadata: - name: pinecone-secret - namespace: test-memory - data: - api-key: cGluZWNvbmUtYXBpLWtleQ== # base64 encoded "pinecone-api-key" - - apiVersion: kagent.dev/v1alpha1 - kind: ModelConfig - metadata: - name: cross-ns-model - namespace: test-model - spec: - provider: OpenAI - model: gpt-4o - apiKeySecretRef: openai-secret - apiKeySecretKey: api-key - - apiVersion: kagent.dev/v1alpha1 - kind: Memory - metadata: - name: cross-ns-memory - namespace: test-memory - spec: - provider: Pinecone - apiKeySecretRef: pinecone-secret - apiKeySecretKey: api-key - pinecone: - indexHost: "https://test-index.pinecone.io" - topK: 5 - namespace: "test-namespace" - recordFields: ["content", "metadata"] - scoreThreshold: "0.7" - - apiVersion: kagent.dev/v1alpha1 - kind: Agent - metadata: - name: agent-with-cross-ns-resources - namespace: test-agent - spec: - description: An agent with vector memory and model from different namespaces - systemMessage: You are an assistant with access to cross-namespace resources. - modelConfig: test-model/cross-ns-model - memory: - - test-memory/cross-ns-memory - tools: [] diff --git a/go/controller/translator/testdata/inputs/agent_with_http_toolserver.yaml b/go/controller/translator/testdata/inputs/agent_with_http_toolserver.yaml new file mode 100644 index 000000000..8660a5a03 --- /dev/null +++ b/go/controller/translator/testdata/inputs/agent_with_http_toolserver.yaml @@ -0,0 +1,55 @@ +operation: translateAgent +targetObject: agent +namespace: test +objects: + - apiVersion: v1 + kind: Secret + metadata: + name: openai-secret + namespace: test + data: + api-key: c2stdGVzdC1hcGkta2V5 # base64 encoded "sk-test-api-key" + - apiVersion: v1 + kind: Secret + metadata: + name: math-secret + namespace: test + data: + math: c2stdGVzdC1hcGkta2V5 # base64 encoded "sk-test-api-key" + - apiVersion: kagent.dev/v1alpha1 + kind: ModelConfig + metadata: + name: nested-model + namespace: test + spec: + provider: OpenAI + model: gpt-4o + apiKeySecretRef: openai-secret + apiKeySecretKey: api-key + - apiVersion: kagent.dev/v1alpha1 + kind: Agent + metadata: + name: agent + namespace: test + spec: + description: A toolserver for math problems + systemMessage: You are a math toolserver. Focus on solving mathematical problems step by step. + modelConfig: nested-model + tools: + - type: MCPServer + mcpServer: + toolServer: toolserver + toolNames: + - k8s_get_resources + - apiVersion: kagent.dev/v1alpha1 + kind: ToolServer + metadata: + name: toolserver + namespace: test + spec: + config: + streamableHttp: + sseReadTimeout: 5m0s + timeout: 30s + url: http://localhost:8084/mcp + description: "KAgent Tool Server" diff --git a/go/controller/translator/testdata/inputs/agent_with_memory.yaml b/go/controller/translator/testdata/inputs/agent_with_memory.yaml deleted file mode 100644 index fc4049c67..000000000 --- a/go/controller/translator/testdata/inputs/agent_with_memory.yaml +++ /dev/null @@ -1,55 +0,0 @@ -operation: translateAgent -targetObject: agent-with-memory -namespace: test -objects: - - apiVersion: v1 - kind: Secret - metadata: - name: openai-secret - namespace: test - data: - api-key: c2stdGVzdC1hcGkta2V5 # base64 encoded "sk-test-api-key" - - apiVersion: v1 - kind: Secret - metadata: - name: pinecone-secret - namespace: test - data: - api-key: cGluZWNvbmUtYXBpLWtleQ== # base64 encoded "pinecone-api-key" - - apiVersion: kagent.dev/v1alpha1 - kind: ModelConfig - metadata: - name: memory-model - namespace: test - spec: - provider: OpenAI - model: gpt-4o - apiKeySecretRef: openai-secret - apiKeySecretKey: api-key - - apiVersion: kagent.dev/v1alpha1 - kind: Memory - metadata: - name: vector-memory - namespace: test - spec: - provider: Pinecone - apiKeySecretRef: pinecone-secret - apiKeySecretKey: api-key - pinecone: - indexHost: "https://test-index.pinecone.io" - topK: 5 - namespace: "test-namespace" - recordFields: ["content", "metadata"] - scoreThreshold: "0.7" - - apiVersion: kagent.dev/v1alpha1 - kind: Agent - metadata: - name: agent-with-memory - namespace: test - spec: - description: An agent with vector memory - systemMessage: You are an assistant with access to long-term memory. - modelConfig: memory-model - memory: - - vector-memory - tools: [] \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/agent_with_cross_ns_agent.json b/go/controller/translator/testdata/outputs/agent_with_cross_ns_agent.json index ba3c0e44f..fb804f76a 100644 --- a/go/controller/translator/testdata/outputs/agent_with_cross_ns_agent.json +++ b/go/controller/translator/testdata/outputs/agent_with_cross_ns_agent.json @@ -1,145 +1,313 @@ { - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "An agent that uses another agent from a different namespace", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-3.5-turbo", - "stream_options": { - "include_usage": true - } - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test_consumer__NS__agent_with_cross_ns_agent", - "reflect_on_tool_use": false, - "system_message": "You are an assistant that can delegate to other agents.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": [ + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "An agent that uses another agent from a different namespace", + "name": "agent-with-cross-ns-agent", + "skills": null, + "url": "http://agent-with-cross-ns-agent.test-consumer.svc:8080", + "version": "" + }, + "agents": [ + { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "A provider agent with vector memory", + "name": "provider-agent", + "skills": null, + "url": "http://provider-agent.test-provider.svc:8080", + "version": "" + }, + "agents": null, + "description": "A provider agent with vector memory", + "http_tools": null, + "instruction": "You are a provider assistant with access to memory.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-4o", + "type": "openai" + }, + "name": "test_provider__NS__provider_agent", + "sse_tools": null + } + ], + "description": "An agent that uses another agent from a different namespace", + "http_tools": null, + "instruction": "You are an assistant that can delegate to other agents.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-3.5-turbo", + "type": "openai" + }, + "name": "test_consumer__NS__agent_with_cross_ns_agent", + "sse_tools": null + }, + "configHash": [ + 191, + 192, + 25, + 252, + 4, + 170, + 63, + 220, + 195, + 140, + 194, + 249, + 133, + 102, + 241, + 161, + 117, + 152, + 64, + 230, + 184, + 200, + 89, + 0, + 122, + 87, + 186, + 123, + 57, + 117, + 214, + 108 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" + }, + "name": "agent-with-cross-ns-agent", + "namespace": "test-consumer", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent-with-cross-ns-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"agent-with-cross-ns-agent\",\"description\":\"An agent that uses another agent from a different namespace\",\"url\":\"http://agent-with-cross-ns-agent.test-consumer.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test_consumer__NS__agent_with_cross_ns_agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-3.5-turbo\",\"type\":\"openai\"},\"description\":\"An agent that uses another agent from a different namespace\",\"instruction\":\"You are an assistant that can delegate to other agents.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":[{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"provider-agent\",\"description\":\"A provider agent with vector memory\",\"url\":\"http://provider-agent.test-provider.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test_provider__NS__provider_agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-4o\",\"type\":\"openai\"},\"description\":\"A provider agent with vector memory\",\"instruction\":\"You are a provider assistant with access to memory.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":null}]}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" + }, + "name": "agent-with-cross-ns-agent", + "namespace": "test-consumer", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent-with-cross-ns-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" + }, + "name": "agent-with-cross-ns-agent", + "namespace": "test-consumer", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent-with-cross-ns-agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "13817072226973401052", + "kagent": "agent-with-cross-ns-agent" + } + }, + "spec": { + "containers": [ { - "component_type": "tool", - "component_version": 0, - "config": { - "description": "A provider agent with vector memory", - "name": "test_provider__NS__provider_agent", - "team": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "A provider agent with vector memory", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-4o" - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": false, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test_provider__NS__provider_agent", - "reflect_on_tool_use": false, - "system_message": "You are a provider assistant with access to memory.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "A provider agent with vector memory", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test_provider__NS__provider_agent" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "OPENAI_API_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "api-key", + "name": "openai-secret" + } + } + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" } - }, - "description": "A provider agent with vector memory", - "label": "test-provider/provider-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 + } } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 }, - "description": "", - "label": "", - "provider": "autogen_agentchat.tools.TeamTool", - "version": 1 + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } + }, + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "agent-with-cross-ns-agent", + "volumes": [ + { + "configMap": { + "name": "agent-with-cross-ns-agent" + }, + "name": "config" } ] - }, - "description": "An agent that uses another agent from a different namespace", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 + } } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test_consumer__NS__agent_with_cross_ns_agent" + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 + "name": "agent-with-cross-ns-agent", + "namespace": "test-consumer", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent-with-cross-ns-agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "agent-with-cross-ns-agent" + }, + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} } - }, - "description": "An agent that uses another agent from a different namespace", - "label": "test-consumer/agent-with-cross-ns-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 - }, - "name": "test-consumer/agent-with-cross-ns-agent" + } + ] } \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/agent_with_cross_ns_resources.json b/go/controller/translator/testdata/outputs/agent_with_cross_ns_resources.json deleted file mode 100644 index 19ab312ce..000000000 --- a/go/controller/translator/testdata/outputs/agent_with_cross_ns_resources.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "An agent with vector memory and model from different namespaces", - "memory": [ - { - "component_type": "memory", - "component_version": 0, - "config": { - "api_key": "pinecone-api-key", - "index_host": "https://test-index.pinecone.io", - "namespace": "test-namespace", - "record_fields": [ - "content", - "metadata" - ], - "score_threshold": 0.699999988079071, - "top_k": 5 - }, - "description": "", - "label": "", - "provider": "kagent.memory.PineconeMemory", - "version": 1 - } - ], - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-4o", - "stream_options": { - "include_usage": true - } - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test_agent__NS__agent_with_cross_ns_resources", - "reflect_on_tool_use": false, - "system_message": "You are an assistant with access to cross-namespace resources.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "An agent with vector memory and model from different namespaces", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test_agent__NS__agent_with_cross_ns_resources" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 - } - }, - "description": "An agent with vector memory and model from different namespaces", - "label": "test-agent/agent-with-cross-ns-resources", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 - }, - "name": "test-agent/agent-with-cross-ns-resources" -} \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/agent_with_http_toolserver.json b/go/controller/translator/testdata/outputs/agent_with_http_toolserver.json new file mode 100644 index 000000000..717895588 --- /dev/null +++ b/go/controller/translator/testdata/outputs/agent_with_http_toolserver.json @@ -0,0 +1,292 @@ +{ + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "A toolserver for math problems", + "name": "agent", + "skills": null, + "url": "http://agent.test.svc:8080", + "version": "" + }, + "agents": null, + "description": "A toolserver for math problems", + "http_tools": [ + { + "params": { + "headers": {}, + "sse_read_timeout": 300, + "timeout": 30, + "url": "http://localhost:8084/mcp" + }, + "tools": [ + "k8s_get_resources" + ] + } + ], + "instruction": "You are a math toolserver. Focus on solving mathematical problems step by step.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-4o", + "type": "openai" + }, + "name": "test__NS__agent", + "sse_tools": null + }, + "configHash": [ + 6, + 37, + 107, + 15, + 35, + 154, + 38, + 177, + 198, + 193, + 243, + 63, + 108, + 228, + 230, + 94, + 60, + 23, + 180, + 218, + 11, + 108, + 238, + 39, + 144, + 13, + 221, + 4, + 86, + 223, + 90, + 159 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent" + }, + "name": "agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"agent\",\"description\":\"A toolserver for math problems\",\"url\":\"http://agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-4o\",\"type\":\"openai\"},\"description\":\"A toolserver for math problems\",\"instruction\":\"You are a math toolserver. Focus on solving mathematical problems step by step.\",\"http_tools\":[{\"params\":{\"url\":\"http://localhost:8084/mcp\",\"headers\":{},\"timeout\":30,\"sse_read_timeout\":300},\"tools\":[\"k8s_get_resources\"]}],\"sse_tools\":null,\"agents\":null}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent" + }, + "name": "agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent" + }, + "name": "agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "442877851131848369", + "kagent": "agent" + } + }, + "spec": { + "containers": [ + { + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "OPENAI_API_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "api-key", + "name": "openai-secret" + } + } + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" + } + } + } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 + }, + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } + }, + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "agent", + "volumes": [ + { + "configMap": { + "name": "agent" + }, + "name": "config" + } + ] + } + } + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "agent" + }, + "name": "agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "agent" + }, + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} + } + } + ] +} \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/agent_with_memory.json b/go/controller/translator/testdata/outputs/agent_with_memory.json deleted file mode 100644 index d1cc11358..000000000 --- a/go/controller/translator/testdata/outputs/agent_with_memory.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "An agent with vector memory", - "memory": [ - { - "component_type": "memory", - "component_version": 0, - "config": { - "api_key": "pinecone-api-key", - "index_host": "https://test-index.pinecone.io", - "namespace": "test-namespace", - "record_fields": [ - "content", - "metadata" - ], - "score_threshold": 0.699999988079071, - "top_k": 5 - }, - "description": "", - "label": "", - "provider": "kagent.memory.PineconeMemory", - "version": 1 - } - ], - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-4o", - "stream_options": { - "include_usage": true - } - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__agent_with_memory", - "reflect_on_tool_use": false, - "system_message": "You are an assistant with access to long-term memory.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "An agent with vector memory", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__agent_with_memory" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 - } - }, - "description": "An agent with vector memory", - "label": "test/agent-with-memory", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 - }, - "name": "test/agent-with-memory" -} \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/agent_with_nested_agent.json b/go/controller/translator/testdata/outputs/agent_with_nested_agent.json index ee7882a10..ed06ab395 100644 --- a/go/controller/translator/testdata/outputs/agent_with_nested_agent.json +++ b/go/controller/translator/testdata/outputs/agent_with_nested_agent.json @@ -1,145 +1,313 @@ { - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "A parent agent that can delegate to specialists", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-4o", - "stream_options": { - "include_usage": true - } - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__parent_agent", - "reflect_on_tool_use": false, - "system_message": "You are a coordinating agent that can delegate tasks to specialists.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": [ + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "A parent agent that can delegate to specialists", + "name": "parent-agent", + "skills": null, + "url": "http://parent-agent.test.svc:8080", + "version": "" + }, + "agents": [ + { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "A specialist agent for math problems", + "name": "specialist-agent", + "skills": null, + "url": "http://specialist-agent.test.svc:8080", + "version": "" + }, + "agents": null, + "description": "A specialist agent for math problems", + "http_tools": null, + "instruction": "You are a math specialist. Focus on solving mathematical problems step by step.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-4o", + "type": "openai" + }, + "name": "test__NS__specialist_agent", + "sse_tools": null + } + ], + "description": "A parent agent that can delegate to specialists", + "http_tools": null, + "instruction": "You are a coordinating agent that can delegate tasks to specialists.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-4o", + "type": "openai" + }, + "name": "test__NS__parent_agent", + "sse_tools": null + }, + "configHash": [ + 36, + 159, + 178, + 63, + 62, + 190, + 1, + 88, + 140, + 52, + 10, + 201, + 94, + 231, + 216, + 76, + 9, + 49, + 29, + 99, + 77, + 75, + 116, + 25, + 236, + 67, + 150, + 208, + 11, + 5, + 28, + 49 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "parent-agent" + }, + "name": "parent-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "parent-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"parent-agent\",\"description\":\"A parent agent that can delegate to specialists\",\"url\":\"http://parent-agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__parent_agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-4o\",\"type\":\"openai\"},\"description\":\"A parent agent that can delegate to specialists\",\"instruction\":\"You are a coordinating agent that can delegate tasks to specialists.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":[{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"specialist-agent\",\"description\":\"A specialist agent for math problems\",\"url\":\"http://specialist-agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__specialist_agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-4o\",\"type\":\"openai\"},\"description\":\"A specialist agent for math problems\",\"instruction\":\"You are a math specialist. Focus on solving mathematical problems step by step.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":null}]}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "parent-agent" + }, + "name": "parent-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "parent-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "parent-agent" + }, + "name": "parent-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "parent-agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "parent-agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "2639023891367723352", + "kagent": "parent-agent" + } + }, + "spec": { + "containers": [ { - "component_type": "tool", - "component_version": 0, - "config": { - "description": "A specialist agent for math problems", - "name": "test__NS__specialist_agent", - "team": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "A specialist agent for math problems", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "model": "gpt-4o" - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": false, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__specialist_agent", - "reflect_on_tool_use": false, - "system_message": "You are a math specialist. Focus on solving mathematical problems step by step.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "A specialist agent for math problems", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__specialist_agent" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "OPENAI_API_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "api-key", + "name": "openai-secret" + } + } + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" } - }, - "description": "A specialist agent for math problems", - "label": "test/specialist-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 + } } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 }, - "description": "", - "label": "", - "provider": "autogen_agentchat.tools.TeamTool", - "version": 1 + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } + }, + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "parent-agent", + "volumes": [ + { + "configMap": { + "name": "parent-agent" + }, + "name": "config" } ] - }, - "description": "A parent agent that can delegate to specialists", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 + } } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__parent_agent" + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "parent-agent" }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 + "name": "parent-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "parent-agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "parent-agent" + }, + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} } - }, - "description": "A parent agent that can delegate to specialists", - "label": "test/parent-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 - }, - "name": "test/parent-agent" + } + ] } \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/anthropic_agent.json b/go/controller/translator/testdata/outputs/anthropic_agent.json index c03cc02f1..f83da1f0b 100644 --- a/go/controller/translator/testdata/outputs/anthropic_agent.json +++ b/go/controller/translator/testdata/outputs/anthropic_agent.json @@ -1,72 +1,280 @@ { - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "An agent using Anthropic Claude", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "anthropic-api-key", - "max_tokens": 4096, - "model": "claude-3-sonnet-20240229", - "temperature": 0.3, - "top_k": 40, - "top_p": 0.9 - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.anthropic.AnthropicChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__anthropic_agent", - "reflect_on_tool_use": false, - "system_message": "You are Claude, an AI assistant created by Anthropic.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "An agent using Anthropic Claude", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__anthropic_agent" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 - } + "defaultOutputModes": [ + "text" + ], + "description": "An agent using Anthropic Claude", + "name": "anthropic-agent", + "skills": null, + "url": "http://anthropic-agent.test.svc:8080", + "version": "" }, + "agents": null, "description": "An agent using Anthropic Claude", - "label": "test/anthropic-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 + "http_tools": null, + "instruction": "You are Claude, an AI assistant created by Anthropic.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "claude-3-sonnet-20240229", + "type": "anthropic" + }, + "name": "test__NS__anthropic_agent", + "sse_tools": null }, - "name": "test/anthropic-agent" + "configHash": [ + 195, + 189, + 233, + 59, + 60, + 72, + 46, + 191, + 148, + 32, + 249, + 148, + 77, + 175, + 182, + 168, + 109, + 228, + 220, + 20, + 29, + 103, + 96, + 243, + 43, + 51, + 162, + 101, + 230, + 163, + 248, + 166 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "anthropic-agent" + }, + "name": "anthropic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "anthropic-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"anthropic-agent\",\"description\":\"An agent using Anthropic Claude\",\"url\":\"http://anthropic-agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__anthropic_agent\",\"model\":{\"base_url\":\"\",\"model\":\"claude-3-sonnet-20240229\",\"type\":\"anthropic\"},\"description\":\"An agent using Anthropic Claude\",\"instruction\":\"You are Claude, an AI assistant created by Anthropic.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":null}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "anthropic-agent" + }, + "name": "anthropic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "anthropic-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "anthropic-agent" + }, + "name": "anthropic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "anthropic-agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "anthropic-agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "14104686048617967295", + "kagent": "anthropic-agent" + } + }, + "spec": { + "containers": [ + { + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "ANTHROPIC_API_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "api-key", + "name": "anthropic-secret" + } + } + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" + } + } + } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 + }, + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } + }, + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "anthropic-agent", + "volumes": [ + { + "configMap": { + "name": "anthropic-agent" + }, + "name": "config" + } + ] + } + } + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "anthropic-agent" + }, + "name": "anthropic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "anthropic-agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "anthropic-agent" + }, + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} + } + } + ] } \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/basic_agent.json b/go/controller/translator/testdata/outputs/basic_agent.json index 694c4f1a6..b60a036af 100644 --- a/go/controller/translator/testdata/outputs/basic_agent.json +++ b/go/controller/translator/testdata/outputs/basic_agent.json @@ -1,74 +1,280 @@ { - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "A basic test agent", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "api_key": "sk-test-api-key", - "max_tokens": 1024, - "model": "gpt-4o", - "stream_options": { - "include_usage": true - }, - "temperature": 0.7, - "top_p": 0.95 - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__basic_agent", - "reflect_on_tool_use": false, - "system_message": "You are a helpful assistant.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "A basic test agent", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 - } + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__basic_agent" - }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 - } + "defaultOutputModes": [ + "text" + ], + "description": "A basic test agent", + "name": "basic-agent", + "skills": null, + "url": "http://basic-agent.test.svc:8080", + "version": "" }, + "agents": null, "description": "A basic test agent", - "label": "test/basic-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 + "http_tools": null, + "instruction": "You are a helpful assistant.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "base_url": "", + "model": "gpt-4o", + "type": "openai" + }, + "name": "test__NS__basic_agent", + "sse_tools": null }, - "name": "test/basic-agent" + "configHash": [ + 246, + 144, + 96, + 168, + 86, + 173, + 114, + 171, + 33, + 166, + 132, + 169, + 232, + 79, + 105, + 242, + 221, + 68, + 177, + 46, + 63, + 132, + 114, + 85, + 205, + 53, + 96, + 206, + 215, + 67, + 174, + 12 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "basic-agent" + }, + "name": "basic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "basic-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"basic-agent\",\"description\":\"A basic test agent\",\"url\":\"http://basic-agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__basic_agent\",\"model\":{\"base_url\":\"\",\"model\":\"gpt-4o\",\"type\":\"openai\"},\"description\":\"A basic test agent\",\"instruction\":\"You are a helpful assistant.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":null}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "basic-agent" + }, + "name": "basic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "basic-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "basic-agent" + }, + "name": "basic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "basic-agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "basic-agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "17766806806101586603", + "kagent": "basic-agent" + } + }, + "spec": { + "containers": [ + { + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "OPENAI_API_KEY", + "valueFrom": { + "secretKeyRef": { + "key": "api-key", + "name": "openai-secret" + } + } + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" + } + } + } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 + }, + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } + }, + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "basic-agent", + "volumes": [ + { + "configMap": { + "name": "basic-agent" + }, + "name": "config" + } + ] + } + } + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "basic-agent" + }, + "name": "basic-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "basic-agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "basic-agent" + }, + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} + } + } + ] } \ No newline at end of file diff --git a/go/controller/translator/testdata/outputs/ollama_agent.json b/go/controller/translator/testdata/outputs/ollama_agent.json index 9ecf63a1b..6f5a92567 100644 --- a/go/controller/translator/testdata/outputs/ollama_agent.json +++ b/go/controller/translator/testdata/outputs/ollama_agent.json @@ -1,86 +1,274 @@ { - "CreatedAt": "0001-01-01T00:00:00Z", - "DeletedAt": null, - "ID": 0, - "UpdatedAt": "0001-01-01T00:00:00Z", - "component": { - "component_type": "team", - "component_version": 0, - "config": { - "participants": [ - { - "component_type": "agent", - "component_version": 0, - "config": { - "description": "An agent using Ollama local model", - "model_client": { - "component_type": "model", - "component_version": 0, - "config": { - "follow_redirects": true, - "headers": { - "User-Agent": "kagent/1.0" + "config": { + "agent_card": { + "capabilities": { + "pushNotifications": false, + "stateTransitionHistory": true, + "streaming": true + }, + "defaultInputModes": [ + "text" + ], + "defaultOutputModes": [ + "text" + ], + "description": "An agent using Ollama local model", + "name": "ollama-agent", + "skills": null, + "url": "http://ollama-agent.test.svc:8080", + "version": "" + }, + "agents": null, + "description": "An agent using Ollama local model", + "http_tools": null, + "instruction": "You are a helpful AI assistant running locally via Ollama.", + "kagent_url": "http://kagent-controller.kagent.svc:8083", + "model": { + "model": "llama3.2:latest", + "type": "ollama" + }, + "name": "test__NS__ollama_agent", + "sse_tools": null + }, + "configHash": [ + 137, + 32, + 99, + 134, + 69, + 152, + 16, + 158, + 20, + 212, + 135, + 3, + 193, + 235, + 58, + 56, + 81, + 7, + 166, + 155, + 173, + 156, + 242, + 106, + 239, + 99, + 178, + 14, + 110, + 202, + 202, + 183 + ], + "manifest": [ + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "ollama-agent" + }, + "name": "ollama-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "ollama-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "v1", + "data": { + "config.json": "{\"kagent_url\":\"http://kagent-controller.kagent.svc:8083\",\"agent_card\":{\"name\":\"ollama-agent\",\"description\":\"An agent using Ollama local model\",\"url\":\"http://ollama-agent.test.svc:8080\",\"version\":\"\",\"capabilities\":{\"streaming\":true,\"pushNotifications\":false,\"stateTransitionHistory\":true},\"defaultInputModes\":[\"text\"],\"defaultOutputModes\":[\"text\"],\"skills\":[]},\"name\":\"test__NS__ollama_agent\",\"model\":{\"model\":\"llama3.2:latest\",\"type\":\"ollama\"},\"description\":\"An agent using Ollama local model\",\"instruction\":\"You are a helpful AI assistant running locally via Ollama.\",\"http_tools\":null,\"sse_tools\":null,\"agents\":null}" + }, + "kind": "ConfigMap", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "ollama-agent" + }, + "name": "ollama-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "ollama-agent", + "uid": "" + } + ] + } + }, + { + "apiVersion": "apps/v1", + "kind": "Deployment", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "ollama-agent" + }, + "name": "ollama-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "ollama-agent", + "uid": "" + } + ] + }, + "spec": { + "replicas": 1, + "selector": { + "matchLabels": { + "app": "kagent", + "kagent": "ollama-agent" + } + }, + "strategy": {}, + "template": { + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "config.kagent.dev/hash": "9881007010795229342", + "kagent": "ollama-agent" + } + }, + "spec": { + "containers": [ + { + "command": [ + "kagent", + "static", + "--host", + "0.0.0.0", + "--port", + "8080", + "--filepath", + "/config/config.json" + ], + "env": [ + { + "name": "OLLAMA_API_BASE", + "value": "http://localhost:11434" + }, + { + "name": "KAGENT_NAMESPACE", + "valueFrom": { + "fieldRef": { + "fieldPath": "metadata.namespace" + } + } + } + ], + "image": "cr.kagent.dev/kagent-dev/kagent/app:dev", + "imagePullPolicy": "IfNotPresent", + "name": "kagent", + "ports": [ + { + "containerPort": 8080, + "name": "http" + } + ], + "readinessProbe": { + "httpGet": { + "path": "/health", + "port": "http" + }, + "initialDelaySeconds": 15, + "periodSeconds": 3 }, - "host": "http://localhost:11434", - "model": "llama3.2:latest", - "model_info": { - "family": "llama", - "function_calling": false, - "json_output": false, - "multiple_system_messages": false, - "structured_output": false, - "vision": false + "resources": { + "limits": { + "cpu": "1", + "memory": "1Gi" + }, + "requests": { + "cpu": "100m", + "memory": "256Mi" + } }, - "options": { - "num_ctx": "2048", - "temperature": "0.8", - "top_p": "0.9" + "volumeMounts": [ + { + "mountPath": "/config", + "name": "config" + } + ] + } + ], + "serviceAccountName": "ollama-agent", + "volumes": [ + { + "configMap": { + "name": "ollama-agent" }, - "timeout": 0 - }, - "description": "", - "label": "", - "provider": "autogen_ext.models.ollama.OllamaChatCompletionClient", - "version": 1 - }, - "model_client_stream": true, - "model_context": { - "component_type": "chat_completion_context", - "component_version": 0, - "config": {}, - "description": "An unbounded chat completion context that keeps a view of the all the messages.", - "label": "UnboundedChatCompletionContext", - "provider": "autogen_core.model_context.UnboundedChatCompletionContext", - "version": 1 - }, - "name": "test__NS__ollama_agent", - "reflect_on_tool_use": false, - "system_message": "You are a helpful AI assistant running locally via Ollama.", - "tool_call_summary_format": "\nTool: \n{tool_name}\n\nArguments:\n\n{arguments}\n\nResult: \n{result}\n", - "tools": null - }, - "description": "An agent using Ollama local model", - "label": "", - "provider": "autogen_agentchat.agents.AssistantAgent", - "version": 1 + "name": "config" + } + ] + } } - ], - "termination_condition": { - "component_type": "termination", - "component_version": 0, - "config": { - "source": "test__NS__ollama_agent" + }, + "status": {} + }, + { + "apiVersion": "v1", + "kind": "Service", + "metadata": { + "creationTimestamp": null, + "labels": { + "app": "kagent", + "kagent": "ollama-agent" + }, + "name": "ollama-agent", + "namespace": "test", + "ownerReferences": [ + { + "apiVersion": "kagent.dev/v1alpha1", + "blockOwnerDeletion": true, + "controller": true, + "kind": "Agent", + "name": "ollama-agent", + "uid": "" + } + ] + }, + "spec": { + "ports": [ + { + "name": "http", + "port": 8080, + "targetPort": 8080 + } + ], + "selector": { + "app": "kagent", + "kagent": "ollama-agent" }, - "description": "", - "label": "", - "provider": "kagent.conditions.FinalTextMessageTermination", - "version": 1 + "type": "ClusterIP" + }, + "status": { + "loadBalancer": {} } - }, - "description": "An agent using Ollama local model", - "label": "test/ollama-agent", - "provider": "autogen_agentchat.teams.RoundRobinGroupChat", - "version": 1 - }, - "name": "test/ollama-agent" + } + ] } \ No newline at end of file diff --git a/go/go.mod b/go/go.mod index 522724f39..2dcedbf52 100644 --- a/go/go.mod +++ b/go/go.mod @@ -81,6 +81,7 @@ require ( github.com/lestrrat-go/jwx/v2 v2.1.6 // indirect github.com/lestrrat-go/option v1.0.1 // indirect github.com/mailru/easyjson v0.9.0 // indirect + github.com/mark3labs/mcp-go v0.34.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect @@ -105,6 +106,7 @@ require ( github.com/stoewer/go-strcase v1.3.1 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/x448/float16 v0.8.4 // indirect + github.com/yosida95/uritemplate/v3 v3.0.2 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect go.opentelemetry.io/otel v1.36.0 // indirect diff --git a/go/go.sum b/go/go.sum index 8e722a373..e4811c2be 100644 --- a/go/go.sum +++ b/go/go.sum @@ -150,6 +150,8 @@ github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNB github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I= github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= +github.com/mark3labs/mcp-go v0.34.0 h1:eWy7WBGvhk6EyAAyVzivTCprE52iXJwNtvHV6Cv3bR0= +github.com/mark3labs/mcp-go v0.34.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= @@ -230,6 +232,8 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8 github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= +github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= +github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= diff --git a/go/internal/a2a/a2a_handler_mux.go b/go/internal/a2a/a2a_handler_mux.go index 6dde21a2a..be8b9f6dd 100644 --- a/go/internal/a2a/a2a_handler_mux.go +++ b/go/internal/a2a/a2a_handler_mux.go @@ -6,21 +6,17 @@ import ( "strings" "sync" - "github.com/kagent-dev/kagent/go/internal/a2a/manager" common "github.com/kagent-dev/kagent/go/internal/utils" + "trpc.group/trpc-go/trpc-a2a-go/client" "trpc.group/trpc-go/trpc-a2a-go/server" ) -type A2AHandlerParams struct { - AgentCard server.AgentCard - TaskHandler MessageHandler -} - // A2AHandlerMux is an interface that defines methods for adding, getting, and removing agentic task handlers. type A2AHandlerMux interface { SetAgentHandler( agentRef string, - params *A2AHandlerParams, + client *client.A2AClient, + card server.AgentCard, ) error RemoveAgentHandler( agentRef string, @@ -32,31 +28,23 @@ type handlerMux struct { handlers map[string]http.Handler lock sync.RWMutex basePathPrefix string - storage manager.Storage } var _ A2AHandlerMux = &handlerMux{} -func NewA2AHttpMux(pathPrefix string, storage manager.Storage) *handlerMux { +func NewA2AHttpMux(pathPrefix string) *handlerMux { return &handlerMux{ handlers: make(map[string]http.Handler), basePathPrefix: pathPrefix, - storage: storage, } } func (a *handlerMux) SetAgentHandler( agentRef string, - params *A2AHandlerParams, + client *client.A2AClient, + card server.AgentCard, ) error { - processor := newA2AMessageProcessor(params.TaskHandler) - - // Create task manager and inject processor. - taskManager, err := manager.NewTaskManager(processor, a.storage) - if err != nil { - return fmt.Errorf("failed to create task manager: %w", err) - } - srv, err := server.NewA2AServer(params.AgentCard, taskManager) + srv, err := server.NewA2AServer(card, NewPassthroughManager(client)) if err != nil { return fmt.Errorf("failed to create A2A server: %w", err) } diff --git a/go/internal/a2a/a2a_task_processor.go b/go/internal/a2a/a2a_task_processor.go deleted file mode 100644 index 851a041c3..000000000 --- a/go/internal/a2a/a2a_task_processor.go +++ /dev/null @@ -1,151 +0,0 @@ -package a2a - -import ( - "context" - "fmt" - - "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/utils" - "k8s.io/utils/ptr" - ctrl "sigs.k8s.io/controller-runtime" - "trpc.group/trpc-go/trpc-a2a-go/protocol" - "trpc.group/trpc-go/trpc-a2a-go/taskmanager" -) - -var ( - processorLog = ctrl.Log.WithName("a2a_task_processor") -) - -type MessageHandler interface { - HandleMessage(ctx context.Context, task string, contextID *string) ([]client.Event, error) - HandleMessageStream(ctx context.Context, task string, contextID *string) (<-chan client.Event, error) -} - -type a2aMessageProcessor struct { - // msgHandler is a function that processes the input text. - // in production this is done by handing off the input text by a call to - // the underlying agentic framework (e.g.: autogen) - msgHandler MessageHandler -} - -var _ taskmanager.MessageProcessor = &a2aMessageProcessor{} - -// newA2AMessageProcessor creates a new A2A message processor. -func newA2AMessageProcessor(taskHandler MessageHandler) taskmanager.MessageProcessor { - return &a2aMessageProcessor{ - msgHandler: taskHandler, - } -} - -func (a *a2aMessageProcessor) ProcessMessage( - ctx context.Context, - message protocol.Message, - options taskmanager.ProcessOptions, - handle taskmanager.TaskHandler, -) (*taskmanager.MessageProcessingResult, error) { - - // Extract text from the incoming message. - text := ExtractText(message) - if text == "" { - err := fmt.Errorf("input message must contain text") - message := protocol.NewMessage( - protocol.MessageRoleAgent, - []protocol.Part{protocol.NewTextPart(err.Error())}, - ) - return &taskmanager.MessageProcessingResult{ - Result: &message, - }, nil - } - - taskID, err := handle.BuildTask(message.TaskID, message.ContextID) - if err != nil { - return nil, err - } - - processorLog.Info("Processing task", "taskID", taskID, "contextID", message.ContextID, "text", text) - - if !options.Streaming { - defer handle.CleanTask(&taskID) - - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateWorking, &message); err != nil { - processorLog.Error(err, "Failed to update task state to working") - } - - // Process the input text (in this simple example, we'll just reverse it). - result, err := a.msgHandler.HandleMessage(ctx, text, message.ContextID) - if err != nil { - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateFailed, &message); err != nil { - processorLog.Error(err, "Failed to update task state to failed") - } - - return &taskmanager.MessageProcessingResult{ - Result: buildError(err), - }, nil - } - - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateCompleted, &message); err != nil { - processorLog.Error(err, "Failed to update task state to completed") - } - - textResult := client.GetLastStringMessage(result) - - // Create response message. - responseMessage := protocol.NewMessage( - protocol.MessageRoleAgent, - []protocol.Part{protocol.NewTextPart(textResult)}, - ) - - return &taskmanager.MessageProcessingResult{ - Result: &responseMessage, - }, nil - } - - taskSubscriber, err := handle.SubscribeTask(ptr.To(taskID)) - if err != nil { - return nil, err - } - - events, err := a.msgHandler.HandleMessageStream(ctx, text, message.ContextID) - if err != nil { - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateFailed, &message); err != nil { - processorLog.Error(err, "Failed to update task state to failed") - } - - return nil, err - } - - go func() { - defer handle.CleanTask(&taskID) - - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateWorking, &message); err != nil { - processorLog.Error(err, "Failed to update task state to working") - } - - for event := range events { - events := utils.ConvertAutogenEventsToMessages(&taskID, message.ContextID, event) - for _, event := range events { - event := protocol.StreamingMessageEvent{ - Result: event, - } - err := taskSubscriber.Send(event) - if err != nil { - processorLog.Error(err, "Failed to send event to task subscriber") - } - } - } - - if err := handle.UpdateTaskState(&taskID, protocol.TaskStateCompleted, &message); err != nil { - processorLog.Error(err, "Failed to update task state to completed") - } - }() - - return &taskmanager.MessageProcessingResult{ - StreamingEvents: taskSubscriber, - }, nil -} - -func buildError(err error) *protocol.Message { - return &protocol.Message{ - Parts: []protocol.Part{protocol.NewTextPart(err.Error())}, - } -} diff --git a/go/internal/a2a/manager.go b/go/internal/a2a/manager.go new file mode 100644 index 000000000..da092ad8f --- /dev/null +++ b/go/internal/a2a/manager.go @@ -0,0 +1,71 @@ +package a2a + +import ( + "context" + "errors" + + "trpc.group/trpc-go/trpc-a2a-go/client" + "trpc.group/trpc-go/trpc-a2a-go/protocol" + "trpc.group/trpc-go/trpc-a2a-go/taskmanager" +) + +type PassthroughManager struct { + client *client.A2AClient +} + +func NewPassthroughManager(client *client.A2AClient) taskmanager.TaskManager { + return &PassthroughManager{ + client: client, + } +} + +func (m *PassthroughManager) OnSendMessage(ctx context.Context, request protocol.SendMessageParams) (*protocol.MessageResult, error) { + if request.Message.MessageID == "" { + request.Message.MessageID = protocol.GenerateMessageID() + } + if request.Message.Kind == "" { + request.Message.Kind = protocol.KindMessage + } + return m.client.SendMessage(ctx, request) +} + +func (m *PassthroughManager) OnSendMessageStream(ctx context.Context, request protocol.SendMessageParams) (<-chan protocol.StreamingMessageEvent, error) { + if request.Message.MessageID == "" { + request.Message.MessageID = protocol.GenerateMessageID() + } + if request.Message.Kind == "" { + request.Message.Kind = protocol.KindMessage + } + return m.client.StreamMessage(ctx, request) +} + +func (m *PassthroughManager) OnGetTask(ctx context.Context, params protocol.TaskQueryParams) (*protocol.Task, error) { + return m.client.GetTasks(ctx, params) +} + +func (m *PassthroughManager) OnCancelTask(ctx context.Context, params protocol.TaskIDParams) (*protocol.Task, error) { + return m.client.CancelTasks(ctx, params) +} + +func (m *PassthroughManager) OnPushNotificationSet(ctx context.Context, params protocol.TaskPushNotificationConfig) (*protocol.TaskPushNotificationConfig, error) { + return m.client.SetPushNotification(ctx, params) +} + +func (m *PassthroughManager) OnPushNotificationGet(ctx context.Context, params protocol.TaskIDParams) (*protocol.TaskPushNotificationConfig, error) { + return m.client.GetPushNotification(ctx, params) +} + +func (m *PassthroughManager) OnResubscribe(ctx context.Context, params protocol.TaskIDParams) (<-chan protocol.StreamingMessageEvent, error) { + // TODO: Implement + return nil, nil +} + +// Deprecated: OnSendTask is deprecated and will be removed in the future. +func (m *PassthroughManager) OnSendTask(ctx context.Context, request protocol.SendTaskParams) (*protocol.Task, error) { + return nil, errors.New("OnSendTask is deprecated and will be removed in the future") +} + +// Deprecated: OnSendTaskSubscribe is deprecated and will be removed in the future. +func (m *PassthroughManager) OnSendTaskSubscribe(ctx context.Context, request protocol.SendTaskParams) (<-chan protocol.TaskEvent, error) { + return nil, errors.New("OnSendTaskSubscribe is deprecated and will be removed in the future") +} diff --git a/go/internal/a2a/manager/README.md b/go/internal/a2a/manager/README.md deleted file mode 100644 index 9471c6b50..000000000 --- a/go/internal/a2a/manager/README.md +++ /dev/null @@ -1,114 +0,0 @@ -# TaskManager Storage Refactoring - -This package contains the refactored TaskManager that uses a Storage interface for persistence instead of in-memory maps. - -## Overview - -The TaskManager has been refactored to support multiple storage backends through the `Storage` interface. This allows you to choose between in-memory storage for development/testing and persistent database storage for production. - -## Storage Interface - -The `Storage` interface defines methods for: -- **Messages**: Store, retrieve, and delete protocol messages -- **Conversations**: Manage conversation history and access tracking -- **Tasks**: Store and manage cancellable tasks -- **Push Notifications**: Handle push notification configurations -- **Cleanup**: Manage expired conversation cleanup - -## Storage Implementations - -### 1. MemoryStorage - -An in-memory implementation suitable for: -- Development and testing -- Single-instance deployments -- Scenarios where persistence is not required - -```go -storageOpts := DefaultStorageOptions() -manager, err := NewTaskManagerWithMemoryStorage(processor, storageOpts) -``` - -### 2. GormStorage - -A GORM-based implementation that supports: -- SQLite, PostgreSQL, MySQL, and other GORM-supported databases -- Persistent storage across restarts -- Concurrent access from multiple instances -- Proper transaction handling - -```go -db, err := gorm.Open(sqlite.Open("taskmanager.db"), &gorm.Config{}) -if err != nil { - return err -} - -storageOpts := DefaultStorageOptions() -manager, err := NewTaskManagerWithGormStorage(processor, db, storageOpts) -``` - -## Key Features - -### Automatic Migration -The GormStorage implementation automatically creates the required database tables: -- `a2a_messages`: Stores protocol messages -- `a2a_conversations`: Tracks conversation history and access times -- `a2a_tasks`: Stores task information (simplified, without context.CancelFunc) -- `a2a_push_notifications`: Stores push notification configurations - -### History Management -Both storage implementations respect the `MaxHistoryLength` setting to limit conversation history size and automatically clean up old messages. - -### Concurrent Access -- MemoryStorage uses read-write mutexes for thread safety -- GormStorage leverages database transactions for consistency - -### Error Handling -All storage operations return errors that are properly propagated through the TaskManager methods. - -## Configuration Options - -### StorageOptions -```go -type StorageOptions struct { - MaxHistoryLength int // Maximum number of messages per conversation -} -``` - -### ManagerOptions -```go -type ManagerOptions struct { - EnableCleanup bool // Enable automatic cleanup of expired conversations - CleanupInterval time.Duration // How often to run cleanup - ConversationTTL time.Duration // Time after which conversations expire -} -``` - -## Usage Examples - -See `example.go` for complete examples of using both storage implementations. - -## Migration Notes - -### From Original Implementation -The original TaskManager used in-memory maps directly. When migrating: - -1. Replace `taskmanager.NewMemoryTaskManager()` calls with `NewTaskManagerWithMemoryStorage()` -2. Add storage configuration options -3. Handle storage-related errors in your code - -### Database Schema -The GORM implementation stores tasks in a simplified format since `context.CancelFunc` cannot be serialized. When tasks are retrieved, new cancellation contexts are created. - -## Performance Considerations - -- **MemoryStorage**: Fast read/write operations, but limited by available RAM -- **GormStorage**: Slightly slower due to database I/O, but supports much larger datasets and persistence - -## Future Enhancements - -Potential improvements could include: -- Redis-based storage implementation -- Distributed storage with consensus mechanisms -- Configurable serialization formats -- Optimized batch operations for high-throughput scenarios \ No newline at end of file diff --git a/go/internal/a2a/manager/handler.go b/go/internal/a2a/manager/handler.go deleted file mode 100644 index 479cf9b05..000000000 --- a/go/internal/a2a/manager/handler.go +++ /dev/null @@ -1,243 +0,0 @@ -// Tencent is pleased to support the open source community by making trpc-a2a-go available. -// -// Copyright (C) 2025 THL A29 Limited, a Tencent company. All rights reserved. -// -// trpc-a2a-go is licensed under the Apache License Version 2.0. - -package manager - -import ( - "context" - "fmt" - "time" - - "trpc.group/trpc-go/trpc-a2a-go/log" - "trpc.group/trpc-go/trpc-a2a-go/protocol" - "trpc.group/trpc-go/trpc-a2a-go/taskmanager" -) - -// ============================================================================= -// MessageHandle Implementation -// ============================================================================= - -// taskHandler implements TaskHandler interface -type taskHandler struct { - manager *TaskManager - messageID string - ctx context.Context -} - -var _ taskmanager.TaskHandler = (*taskHandler)(nil) - -// UpdateTaskState updates task state -func (h *taskHandler) UpdateTaskState( - taskID *string, - state protocol.TaskState, - message *protocol.Message, -) error { - if taskID == nil || *taskID == "" { - return fmt.Errorf("taskID cannot be nil or empty") - } - - task, err := h.manager.Storage.GetTask(*taskID) - if err != nil { - log.Warnf("UpdateTaskState called for non-existent task %s", *taskID) - return fmt.Errorf("task not found: %s", *taskID) - } - - originalTask := task.Task() - originalTask.Status = protocol.TaskStatus{ - State: state, - Message: message, - Timestamp: time.Now().UTC().Format(time.RFC3339), - } - - // Update task in storage - if err := h.manager.Storage.StoreTask(*taskID, task); err != nil { - return fmt.Errorf("failed to update task: %w", err) - } - - log.Debugf("Updated task %s state to %s", *taskID, state) - - // notify subscribers - finalState := isFinalState(state) - event := &protocol.TaskStatusUpdateEvent{ - TaskID: *taskID, - ContextID: originalTask.ContextID, - Status: originalTask.Status, - Kind: protocol.KindTaskStatusUpdate, - Final: finalState, - } - streamEvent := protocol.StreamingMessageEvent{Result: event} - h.manager.notifySubscribers(*taskID, streamEvent) - return nil -} - -// SubscribeTask subscribes to the task -func (h *taskHandler) SubscribeTask(taskID *string) (taskmanager.TaskSubscriber, error) { - if taskID == nil || *taskID == "" { - return nil, fmt.Errorf("taskID cannot be nil or empty") - } - if !h.manager.Storage.TaskExists(*taskID) { - return nil, fmt.Errorf("task not found: %s", *taskID) - } - subscriber := NewTaskSubscriber(*taskID, defaultTaskSubscriberBufferSize) - h.manager.addSubscriber(*taskID, subscriber) - return subscriber, nil -} - -// AddArtifact adds artifact to specified task -func (h *taskHandler) AddArtifact( - taskID *string, - artifact protocol.Artifact, - isFinal bool, - needMoreData bool, -) error { - if taskID == nil || *taskID == "" { - return fmt.Errorf("taskID cannot be nil or empty") - } - - task, err := h.manager.Storage.GetTask(*taskID) - if err != nil { - return fmt.Errorf("task not found: %s", *taskID) - } - - task.Task().Artifacts = append(task.Task().Artifacts, artifact) - - // Update task in storage - if err := h.manager.Storage.StoreTask(*taskID, task); err != nil { - return fmt.Errorf("failed to update task: %w", err) - } - - log.Debugf("Added artifact %s to task %s", artifact.ArtifactID, *taskID) - - // notify subscribers - event := &protocol.TaskArtifactUpdateEvent{ - TaskID: *taskID, - ContextID: task.Task().ContextID, - Artifact: artifact, - Kind: protocol.KindTaskArtifactUpdate, - LastChunk: &isFinal, - Append: &needMoreData, - } - streamEvent := protocol.StreamingMessageEvent{Result: event} - h.manager.notifySubscribers(*taskID, streamEvent) - - return nil -} - -// GetTask gets task -func (h *taskHandler) GetTask(taskID *string) (taskmanager.CancellableTask, error) { - if taskID == nil || *taskID == "" { - return nil, fmt.Errorf("taskID cannot be nil or empty") - } - - task, err := h.manager.getTask(*taskID) - if err != nil { - return nil, err - } - - // return task copy to avoid external modification - taskCopy := *task.Task() - if taskCopy.Artifacts != nil { - taskCopy.Artifacts = make([]protocol.Artifact, len(task.Task().Artifacts)) - copy(taskCopy.Artifacts, task.Task().Artifacts) - } - if taskCopy.History != nil { - taskCopy.History = make([]protocol.Message, len(task.Task().History)) - copy(taskCopy.History, task.Task().History) - } - - return &MemoryCancellableTask{ - task: taskCopy, - cancelFunc: task.cancelFunc, - ctx: task.ctx, - }, nil -} - -// GetContextID gets context ID -func (h *taskHandler) GetContextID() string { - message, err := h.manager.Storage.GetMessage(h.messageID) - if err == nil && message.ContextID != nil { - return *message.ContextID - } - return "" -} - -// GetMessageHistory gets message history -func (h *taskHandler) GetMessageHistory() []protocol.Message { - message, err := h.manager.Storage.GetMessage(h.messageID) - if err == nil && message.ContextID != nil { - return h.manager.getMessageHistory(*message.ContextID) - } - return []protocol.Message{} -} - -// BuildTask creates a new task and returns task object -func (h *taskHandler) BuildTask(specificTaskID *string, contextID *string) (string, error) { - // if no taskID provided, generate one - var actualTaskID string - if specificTaskID == nil || *specificTaskID == "" { - actualTaskID = protocol.GenerateTaskID() - } else { - actualTaskID = *specificTaskID - } - - // Check if task already exists to avoid duplicate WithCancel calls - if _, err := h.manager.Storage.GetTask(actualTaskID); err == nil { - log.Warnf("Task %s already exists, returning existing task", actualTaskID) - return "", fmt.Errorf("task already exists: %s", actualTaskID) - } - - var actualContextID string - if contextID == nil || *contextID == "" { - actualContextID = "" - } else { - actualContextID = *contextID - } - - // create new task - task := protocol.Task{ - ID: actualTaskID, - ContextID: actualContextID, - Kind: protocol.KindTask, - Status: protocol.TaskStatus{ - State: protocol.TaskStateSubmitted, - Timestamp: time.Now().UTC().Format(time.RFC3339), - }, - Artifacts: make([]protocol.Artifact, 0), - History: make([]protocol.Message, 0), - Metadata: make(map[string]interface{}), - } - - cancellableTask := NewCancellableTask(task) - - // store task - if err := h.manager.Storage.StoreTask(actualTaskID, cancellableTask); err != nil { - return "", fmt.Errorf("failed to store task: %w", err) - } - - log.Debugf("Created new task %s with context %s", actualTaskID, actualContextID) - - return actualTaskID, nil -} - -// CleanTask cancels and cleans up the task. -func (h *taskHandler) CleanTask(taskID *string) error { - if taskID == nil || *taskID == "" { - return fmt.Errorf("taskID cannot be nil or empty") - } - - task, err := h.manager.Storage.GetTask(*taskID) - if err != nil { - return fmt.Errorf("task not found: %s", *taskID) - } - - // Cancel the task - task.Cancel() - - // Clean up subscribers - h.manager.cleanSubscribers(*taskID) - - return nil -} diff --git a/go/internal/a2a/manager/manager.go b/go/internal/a2a/manager/manager.go deleted file mode 100644 index 8501f63e9..000000000 --- a/go/internal/a2a/manager/manager.go +++ /dev/null @@ -1,565 +0,0 @@ -package manager - -import ( - "context" - "fmt" - "sync" - "sync/atomic" - "time" - - "trpc.group/trpc-go/trpc-a2a-go/log" - "trpc.group/trpc-go/trpc-a2a-go/protocol" - "trpc.group/trpc-go/trpc-a2a-go/taskmanager" -) - -const defaultMaxHistoryLength = 100 -const defaultCleanupInterval = 30 * time.Second -const defaultConversationTTL = 1 * time.Hour -const defaultTaskSubscriberBufferSize = 10 - -// // ConversationHistory stores conversation history information -// type ConversationHistory struct { -// // MessageIDs is the list of message IDs, ordered by time -// MessageIDs []string -// // LastAccessTime is the last access time -// LastAccessTime time.Time -// } - -// MemoryCancellableTask is a task that can be cancelled -type MemoryCancellableTask struct { - task protocol.Task - cancelFunc context.CancelFunc - ctx context.Context -} - -// NewCancellableTask creates a new cancellable task -func NewCancellableTask(task protocol.Task) *MemoryCancellableTask { - cancelCtx, cancel := context.WithCancel(context.Background()) - return &MemoryCancellableTask{ - task: task, - cancelFunc: cancel, - ctx: cancelCtx, - } -} - -// Cancel cancels the task -func (t *MemoryCancellableTask) Cancel() { - t.cancelFunc() -} - -// Task returns the task -func (t *MemoryCancellableTask) Task() *protocol.Task { - return &t.task -} - -// TaskSubscriber is a subscriber for a task -type TaskSubscriber struct { - taskID string - eventQueue chan protocol.StreamingMessageEvent - lastAccessTime time.Time - closed atomic.Bool - mu sync.RWMutex -} - -// NewTaskSubscriber creates a new task subscriber with specified buffer length -func NewTaskSubscriber(taskID string, length int) *TaskSubscriber { - if length <= 0 { - length = defaultTaskSubscriberBufferSize // default buffer size - } - - eventQueue := make(chan protocol.StreamingMessageEvent, length) - - return &TaskSubscriber{ - taskID: taskID, - eventQueue: eventQueue, - lastAccessTime: time.Now(), - closed: atomic.Bool{}, - } -} - -// Close closes the task subscriber -func (s *TaskSubscriber) Close() { - s.mu.Lock() - defer s.mu.Unlock() - if !s.closed.Load() { - s.closed.Store(true) - close(s.eventQueue) - } -} - -// Channel returns the channel of the task subscriber -func (s *TaskSubscriber) Channel() <-chan protocol.StreamingMessageEvent { - return s.eventQueue -} - -// Closed returns true if the task subscriber is closed -func (s *TaskSubscriber) Closed() bool { - return s.closed.Load() -} - -// Send sends an event to the task subscriber -func (s *TaskSubscriber) Send(event protocol.StreamingMessageEvent) error { - if s.Closed() { - return fmt.Errorf("task subscriber is closed") - } - - s.mu.RLock() - defer s.mu.RUnlock() - if s.Closed() { - return fmt.Errorf("task subscriber is closed") - } - - s.lastAccessTime = time.Now() - - // Use select with default to avoid blocking - select { - case s.eventQueue <- event: - return nil - default: - return fmt.Errorf("event queue is full or closed") - } -} - -// GetLastAccessTime returns the last access time -func (s *TaskSubscriber) GetLastAccessTime() time.Time { - s.mu.RLock() - defer s.mu.RUnlock() - return s.lastAccessTime -} - -// TaskManager is the implementation of the TaskManager interface -type TaskManager struct { - // mu protects the following fields - mu sync.RWMutex - - // Processor is the user-provided message Processor - Processor taskmanager.MessageProcessor - - // Storage handles data persistence - Storage Storage - - // taskMu protects the Tasks field - taskMu sync.RWMutex - - // Subscribers stores the task subscribers - // key: taskID, value: TaskSubscriber list - // supports all event types: Message, Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent - Subscribers map[string][]*TaskSubscriber -} - -// NewTaskManager creates a new TaskManager instance -func NewTaskManager(processor taskmanager.MessageProcessor, storage Storage) (*TaskManager, error) { - if processor == nil { - return nil, fmt.Errorf("processor cannot be nil") - } - if storage == nil { - return nil, fmt.Errorf("storage cannot be nil") - } - - manager := &TaskManager{ - Processor: processor, - Storage: storage, - Subscribers: make(map[string][]*TaskSubscriber), - } - - return manager, nil -} - -// ============================================================================= -// TaskManager interface implementation -// ============================================================================= - -// OnSendMessage handles the message/tasks request -func (m *TaskManager) OnSendMessage( - ctx context.Context, - request protocol.SendMessageParams, -) (*protocol.MessageResult, error) { - log.Debugf("TaskManager: OnSendMessage for message %s", request.Message.MessageID) - - // process the request message - if err := m.processRequestMessage(&request.Message); err != nil { - return nil, fmt.Errorf("failed to process request message: %w", err) - } - - // process Configuration - options := m.processConfiguration(request.Configuration, request.Metadata) - options.Streaming = false // non-streaming processing - - // create MessageHandle - handle := &taskHandler{ - manager: m, - messageID: request.Message.MessageID, - ctx: ctx, - } - - // call the user's message processor - result, err := m.Processor.ProcessMessage(ctx, request.Message, options, handle) - if err != nil { - return nil, fmt.Errorf("message processing failed: %w", err) - } - - if result == nil { - return nil, fmt.Errorf("processor returned nil result") - } - - // check if the user returned StreamingEvents for non-streaming request - if result.StreamingEvents != nil { - log.Infof("User returned StreamingEvents for non-streaming request, ignoring") - } - - if result.Result == nil { - return nil, fmt.Errorf("processor returned nil result for non-streaming request") - } - - switch result.Result.(type) { - case *protocol.Task: - case *protocol.Message: - default: - return nil, fmt.Errorf("processor returned unsupported result type %T for SendMessage request", result.Result) - } - - if message, ok := result.Result.(*protocol.Message); ok { - var contextID string - if request.Message.ContextID != nil { - contextID = *request.Message.ContextID - } - if err := m.processReplyMessage(contextID, message); err != nil { - return nil, fmt.Errorf("failed to process reply message: %w", err) - } - } - - return &protocol.MessageResult{Result: result.Result}, nil -} - -// OnSendMessageStream handles message/stream requests -func (m *TaskManager) OnSendMessageStream( - ctx context.Context, - request protocol.SendMessageParams, -) (<-chan protocol.StreamingMessageEvent, error) { - log.Debugf("TaskManager: OnSendMessageStream for message %s", request.Message.MessageID) - - if err := m.processRequestMessage(&request.Message); err != nil { - return nil, fmt.Errorf("failed to process request message: %w", err) - } - - // Process Configuration - options := m.processConfiguration(request.Configuration, request.Metadata) - options.Streaming = true // streaming mode - - // Create streaming MessageHandle - handle := &taskHandler{ - manager: m, - messageID: request.Message.MessageID, - ctx: ctx, - } - - // Call user's message processor - result, err := m.Processor.ProcessMessage(ctx, request.Message, options, handle) - if err != nil { - return nil, fmt.Errorf("message processing failed: %w", err) - } - - if result == nil || result.StreamingEvents == nil { - return nil, fmt.Errorf("processor returned nil result") - } - - return result.StreamingEvents.Channel(), nil -} - -// OnGetTask handles the tasks/get request -func (m *TaskManager) OnGetTask(ctx context.Context, params protocol.TaskQueryParams) (*protocol.Task, error) { - task, err := m.Storage.GetTask(params.ID) - if err != nil { - return nil, err - } - - // return a copy of the task - taskCopy := *task.Task() - - // if the request contains history length, fill the message history - if params.HistoryLength != nil && *params.HistoryLength > 0 { - if taskCopy.ContextID != "" { - history := m.getConversationHistory(taskCopy.ContextID, *params.HistoryLength) - taskCopy.History = history - } - } - - return &taskCopy, nil -} - -// OnCancelTask handles the tasks/cancel request -func (m *TaskManager) OnCancelTask(ctx context.Context, params protocol.TaskIDParams) (*protocol.Task, error) { - task, err := m.Storage.GetTask(params.ID) - if err != nil { - return nil, err - } - - taskCopy := *task.Task() - - handle := &taskHandler{ - manager: m, - ctx: ctx, - } - handle.CleanTask(¶ms.ID) - taskCopy.Status.State = protocol.TaskStateCanceled - taskCopy.Status.Timestamp = time.Now().UTC().Format(time.RFC3339) - - return &taskCopy, nil -} - -// OnPushNotificationSet handles tasks/pushNotificationConfig/set requests -func (m *TaskManager) OnPushNotificationSet( - ctx context.Context, - params protocol.TaskPushNotificationConfig, -) (*protocol.TaskPushNotificationConfig, error) { - err := m.Storage.StorePushNotification(params.TaskID, params) - if err != nil { - return nil, fmt.Errorf("failed to store push notification config: %w", err) - } - log.Debugf("TaskManager: Push notification config set for task %s", params.TaskID) - return ¶ms, nil -} - -// OnPushNotificationGet handles tasks/pushNotificationConfig/get requests -func (m *TaskManager) OnPushNotificationGet( - ctx context.Context, - params protocol.TaskIDParams, -) (*protocol.TaskPushNotificationConfig, error) { - config, err := m.Storage.GetPushNotification(params.ID) - if err != nil { - return nil, err - } - - return &config, nil -} - -// OnResubscribe handles tasks/resubscribe requests -func (m *TaskManager) OnResubscribe( - ctx context.Context, - params protocol.TaskIDParams, -) (<-chan protocol.StreamingMessageEvent, error) { - // Check if task exists - if _, err := m.Storage.GetTask(params.ID); err != nil { - return nil, fmt.Errorf("task not found: %s", params.ID) - } - - m.taskMu.Lock() - defer m.taskMu.Unlock() - - subscriber := NewTaskSubscriber(params.ID, defaultTaskSubscriberBufferSize) - - // Add to subscribers list - if _, exists := m.Subscribers[params.ID]; !exists { - m.Subscribers[params.ID] = make([]*TaskSubscriber, 0) - } - m.Subscribers[params.ID] = append(m.Subscribers[params.ID], subscriber) - - return subscriber.eventQueue, nil -} - -// OnSendTask deprecated method empty implementation -func (m *TaskManager) OnSendTask(ctx context.Context, request protocol.SendTaskParams) (*protocol.Task, error) { - return nil, fmt.Errorf("OnSendTask is deprecated, use OnSendMessage instead") -} - -// OnSendTaskSubscribe deprecated method empty implementation -func (m *TaskManager) OnSendTaskSubscribe(ctx context.Context, request protocol.SendTaskParams) (<-chan protocol.TaskEvent, error) { - return nil, fmt.Errorf("OnSendTaskSubscribe is deprecated, use OnSendMessageStream instead") -} - -// ============================================================================= -// Internal helper methods -// ============================================================================= - -// storeMessage stores messages -func (m *TaskManager) storeMessage(message protocol.Message) error { - log.Infof("Storing message %s", message.MessageID) - // Store the message using the storage interface - return m.Storage.StoreMessage(message) -} - -// getMessageHistory gets message history -func (m *TaskManager) getMessageHistory(contextID string) []protocol.Message { - if contextID == "" { - return []protocol.Message{} - } - - messages, err := m.Storage.ListMessagesByContextID(contextID, defaultMaxHistoryLength) - if err != nil { - return []protocol.Message{} - } - - return messages -} - -// getConversationHistory gets conversation history of specified length -func (m *TaskManager) getConversationHistory(contextID string, length int) []protocol.Message { - if contextID == "" { - return []protocol.Message{} - } - - messages, err := m.Storage.ListMessagesByContextID(contextID, length) - if err != nil { - return []protocol.Message{} - } - - return messages -} - -// isFinalState checks if it's a final state -func isFinalState(state protocol.TaskState) bool { - return state == protocol.TaskStateCompleted || - state == protocol.TaskStateFailed || - state == protocol.TaskStateCanceled || - state == protocol.TaskStateRejected -} - -// ============================================================================= -// Configuration related types and helper methods -// ============================================================================= - -// processConfiguration processes and normalizes Configuration -func (m *TaskManager) processConfiguration(config *protocol.SendMessageConfiguration, metadata map[string]interface{}) taskmanager.ProcessOptions { - result := taskmanager.ProcessOptions{ - Blocking: false, - HistoryLength: 0, - } - - if config == nil { - return result - } - - // Process Blocking configuration - if config.Blocking != nil { - result.Blocking = *config.Blocking - } - - // Process HistoryLength configuration - if config.HistoryLength != nil && *config.HistoryLength > 0 { - result.HistoryLength = *config.HistoryLength - } - - // Process PushNotificationConfig - if config.PushNotificationConfig != nil { - result.PushNotificationConfig = config.PushNotificationConfig - } - - return result -} - -func (m *TaskManager) processRequestMessage(message *protocol.Message) error { - if message.MessageID == "" { - message.MessageID = protocol.GenerateMessageID() - } - return m.storeMessage(*message) -} - -func (m *TaskManager) processReplyMessage(ctxID string, message *protocol.Message) error { - message.ContextID = &ctxID - message.Role = protocol.MessageRoleAgent - if message.MessageID == "" { - message.MessageID = protocol.GenerateMessageID() - } - return m.storeMessage(*message) -} - -func (m *TaskManager) getTask(taskID string) (*MemoryCancellableTask, error) { - return m.Storage.GetTask(taskID) -} - -// notifySubscribers notifies all subscribers of the task -func (m *TaskManager) notifySubscribers(taskID string, event protocol.StreamingMessageEvent) { - m.taskMu.RLock() - subs, exists := m.Subscribers[taskID] - if !exists || len(subs) == 0 { - m.taskMu.RUnlock() - return - } - - subsCopy := make([]*TaskSubscriber, len(subs)) - copy(subsCopy, subs) - m.taskMu.RUnlock() - - log.Debugf("Notifying %d subscribers for task %s (Event Type: %T)", len(subsCopy), taskID, event.Result) - - var failedSubscribers []*TaskSubscriber - - for _, sub := range subsCopy { - if sub.Closed() { - log.Debugf("Subscriber for task %s is already closed, marking for removal", taskID) - failedSubscribers = append(failedSubscribers, sub) - continue - } - - err := sub.Send(event) - if err != nil { - log.Warnf("Failed to send event to subscriber for task %s: %v", taskID, err) - failedSubscribers = append(failedSubscribers, sub) - } - } - - // Clean up failed or closed subscribers - if len(failedSubscribers) > 0 { - m.cleanupFailedSubscribers(taskID, failedSubscribers) - } -} - -// cleanupFailedSubscribers cleans up failed or closed subscribers -func (m *TaskManager) cleanupFailedSubscribers(taskID string, failedSubscribers []*TaskSubscriber) { - m.taskMu.Lock() - defer m.taskMu.Unlock() - - subs, exists := m.Subscribers[taskID] - if !exists { - return - } - - // Filter out failed subscribers - filteredSubs := make([]*TaskSubscriber, 0, len(subs)) - removedCount := 0 - - for _, sub := range subs { - shouldRemove := false - for _, failedSub := range failedSubscribers { - if sub == failedSub { - shouldRemove = true - removedCount++ - break - } - } - if !shouldRemove { - filteredSubs = append(filteredSubs, sub) - } - } - - if removedCount > 0 { - m.Subscribers[taskID] = filteredSubs - log.Debugf("Removed %d failed subscribers for task %s", removedCount, taskID) - - // If there are no subscribers left, delete the entire entry - if len(filteredSubs) == 0 { - delete(m.Subscribers, taskID) - } - } -} - -// addSubscriber adds a subscriber -func (m *TaskManager) addSubscriber(taskID string, sub *TaskSubscriber) { - m.taskMu.Lock() - defer m.taskMu.Unlock() - - if _, exists := m.Subscribers[taskID]; !exists { - m.Subscribers[taskID] = make([]*TaskSubscriber, 0) - } - m.Subscribers[taskID] = append(m.Subscribers[taskID], sub) -} - -// cleanSubscribers cleans up subscribers -func (m *TaskManager) cleanSubscribers(taskID string) { - m.taskMu.Lock() - defer m.taskMu.Unlock() - for _, sub := range m.Subscribers[taskID] { - sub.Close() - } - delete(m.Subscribers, taskID) -} diff --git a/go/internal/a2a/manager/memory.go b/go/internal/a2a/manager/memory.go deleted file mode 100644 index 2c5e9c933..000000000 --- a/go/internal/a2a/manager/memory.go +++ /dev/null @@ -1,288 +0,0 @@ -package manager - -import ( - "fmt" - "sync" - "time" - - "trpc.group/trpc-go/trpc-a2a-go/protocol" -) - -// MemoryStorage is an in-memory implementation of the Storage interface - -type ConversationHistory struct { - MessageIDs []string - LastAccessTime time.Time -} - -type MemoryStorage struct { - mu sync.RWMutex - messages map[string]protocol.Message - conversations map[string]*ConversationHistory - tasks map[string]*MemoryCancellableTask - pushNotifications map[string]protocol.TaskPushNotificationConfig - maxHistoryLength int -} - -// NewMemoryStorage creates a new in-memory storage implementation -func NewMemoryStorage(options StorageOptions) *MemoryStorage { - maxHistoryLength := options.MaxHistoryLength - if maxHistoryLength <= 0 { - maxHistoryLength = defaultMaxHistoryLength - } - - return &MemoryStorage{ - messages: make(map[string]protocol.Message), - conversations: make(map[string]*ConversationHistory), - tasks: make(map[string]*MemoryCancellableTask), - pushNotifications: make(map[string]protocol.TaskPushNotificationConfig), - maxHistoryLength: maxHistoryLength, - } -} - -// Message operations -func (s *MemoryStorage) StoreMessage(message protocol.Message) error { - s.mu.Lock() - defer s.mu.Unlock() - - s.messages[message.MessageID] = message - - // If the message has a contextID, add it to conversation history - if message.ContextID != nil { - contextID := *message.ContextID - if _, exists := s.conversations[contextID]; !exists { - s.conversations[contextID] = &ConversationHistory{ - MessageIDs: make([]string, 0), - LastAccessTime: time.Now(), - } - } - - // Add message ID to conversation history - s.conversations[contextID].MessageIDs = append(s.conversations[contextID].MessageIDs, message.MessageID) - // Update last access time - s.conversations[contextID].LastAccessTime = time.Now() - - // Limit history length - if len(s.conversations[contextID].MessageIDs) > s.maxHistoryLength { - // Remove the oldest message - removedMsgID := s.conversations[contextID].MessageIDs[0] - s.conversations[contextID].MessageIDs = s.conversations[contextID].MessageIDs[1:] - // Delete old message from message storage - delete(s.messages, removedMsgID) - } - } - - return nil -} - -func (s *MemoryStorage) GetMessage(messageID string) (protocol.Message, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - message, exists := s.messages[messageID] - if !exists { - return protocol.Message{}, fmt.Errorf("message not found: %s", messageID) - } - return message, nil -} - -func (s *MemoryStorage) DeleteMessage(messageID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - delete(s.messages, messageID) - return nil -} - -func (s *MemoryStorage) GetMessages(messageIDs []string) ([]protocol.Message, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - messages := make([]protocol.Message, 0, len(messageIDs)) - for _, msgID := range messageIDs { - if msg, exists := s.messages[msgID]; exists { - messages = append(messages, msg) - } - } - return messages, nil -} - -// Conversation operations -func (s *MemoryStorage) StoreConversation(contextID string, history *ConversationHistory) error { - s.mu.Lock() - defer s.mu.Unlock() - - s.conversations[contextID] = history - return nil -} - -func (s *MemoryStorage) GetConversation(contextID string) (*ConversationHistory, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - conversation, exists := s.conversations[contextID] - if !exists { - return nil, fmt.Errorf("conversation not found: %s", contextID) - } - return conversation, nil -} - -func (s *MemoryStorage) UpdateConversationAccess(contextID string, timestamp time.Time) error { - s.mu.Lock() - defer s.mu.Unlock() - - if conversation, exists := s.conversations[contextID]; exists { - conversation.LastAccessTime = timestamp - } - return nil -} - -func (s *MemoryStorage) DeleteConversation(contextID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - delete(s.conversations, contextID) - return nil -} - -func (s *MemoryStorage) GetExpiredConversations(maxAge time.Duration) ([]string, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - now := time.Now() - expiredContexts := make([]string, 0) - - for contextID, conversation := range s.conversations { - if now.Sub(conversation.LastAccessTime) > maxAge { - expiredContexts = append(expiredContexts, contextID) - } - } - - return expiredContexts, nil -} - -func (s *MemoryStorage) GetConversationStats() (map[string]interface{}, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - totalConversations := len(s.conversations) - totalMessages := len(s.messages) - - oldestAccess := time.Now() - newestAccess := time.Time{} - - for _, conversation := range s.conversations { - if conversation.LastAccessTime.Before(oldestAccess) { - oldestAccess = conversation.LastAccessTime - } - if conversation.LastAccessTime.After(newestAccess) { - newestAccess = conversation.LastAccessTime - } - } - - stats := map[string]interface{}{ - "total_conversations": totalConversations, - "total_messages": totalMessages, - } - - if totalConversations > 0 { - stats["oldest_access"] = oldestAccess - stats["newest_access"] = newestAccess - } - - return stats, nil -} - -// Task operations -func (s *MemoryStorage) StoreTask(taskID string, task *MemoryCancellableTask) error { - s.mu.Lock() - defer s.mu.Unlock() - - s.tasks[taskID] = task - return nil -} - -func (s *MemoryStorage) GetTask(taskID string) (*MemoryCancellableTask, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - task, exists := s.tasks[taskID] - if !exists { - return nil, fmt.Errorf("task not found: %s", taskID) - } - return task, nil -} - -func (s *MemoryStorage) DeleteTask(taskID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - delete(s.tasks, taskID) - return nil -} - -func (s *MemoryStorage) TaskExists(taskID string) bool { - s.mu.RLock() - defer s.mu.RUnlock() - - _, exists := s.tasks[taskID] - return exists -} - -// Push notification operations -func (s *MemoryStorage) StorePushNotification(taskID string, config protocol.TaskPushNotificationConfig) error { - s.mu.Lock() - defer s.mu.Unlock() - - s.pushNotifications[taskID] = config - return nil -} - -func (s *MemoryStorage) GetPushNotification(taskID string) (protocol.TaskPushNotificationConfig, error) { - s.mu.RLock() - defer s.mu.RUnlock() - - config, exists := s.pushNotifications[taskID] - if !exists { - return protocol.TaskPushNotificationConfig{}, fmt.Errorf("push notification config not found for task: %s", taskID) - } - return config, nil -} - -func (s *MemoryStorage) DeletePushNotification(taskID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - delete(s.pushNotifications, taskID) - return nil -} - -// Cleanup operations -func (s *MemoryStorage) CleanupExpiredConversations(maxAge time.Duration) (int, error) { - s.mu.Lock() - defer s.mu.Unlock() - - now := time.Now() - expiredContexts := make([]string, 0) - expiredMessageIDs := make([]string, 0) - - // Find expired conversations - for contextID, conversation := range s.conversations { - if now.Sub(conversation.LastAccessTime) > maxAge { - expiredContexts = append(expiredContexts, contextID) - expiredMessageIDs = append(expiredMessageIDs, conversation.MessageIDs...) - } - } - - // Delete expired conversations - for _, contextID := range expiredContexts { - delete(s.conversations, contextID) - } - - // Delete messages from expired conversations - for _, messageID := range expiredMessageIDs { - delete(s.messages, messageID) - } - - return len(expiredContexts), nil -} diff --git a/go/internal/a2a/manager/storage.go b/go/internal/a2a/manager/storage.go deleted file mode 100644 index 35bae7f0c..000000000 --- a/go/internal/a2a/manager/storage.go +++ /dev/null @@ -1,101 +0,0 @@ -package manager - -import ( - "github.com/kagent-dev/kagent/go/internal/database" - "github.com/kagent-dev/kagent/go/internal/utils" - "trpc.group/trpc-go/trpc-a2a-go/protocol" -) - -// Storage defines the interface for persisting task manager data -type Storage interface { - // Message operations - StoreMessage(message protocol.Message) error - GetMessage(messageID string) (protocol.Message, error) - // List messages by context ID, if limit is -1, return all messages - ListMessagesByContextID(contextID string, limit int) ([]protocol.Message, error) - - // Task operations - StoreTask(taskID string, task *MemoryCancellableTask) error - GetTask(taskID string) (*MemoryCancellableTask, error) - TaskExists(taskID string) bool - - // Push notification operations - StorePushNotification(taskID string, config protocol.TaskPushNotificationConfig) error - GetPushNotification(taskID string) (protocol.TaskPushNotificationConfig, error) -} - -type storageImpl struct { - dbClient database.Client -} - -func NewStorage(dbClient database.Client) Storage { - return &storageImpl{ - dbClient: dbClient, - } -} - -func (s *storageImpl) GetTask(taskID string) (*MemoryCancellableTask, error) { - task, err := s.dbClient.GetTask(taskID) - if err != nil { - return nil, err - } - parsedTask, err := task.Parse() - if err != nil { - return nil, err - } - return NewCancellableTask(parsedTask), nil -} - -func (s *storageImpl) TaskExists(taskID string) bool { - _, err := s.dbClient.GetTask(taskID) - return err == nil -} - -func (s *storageImpl) StoreMessage(message protocol.Message) error { - return s.dbClient.CreateMessages(&message) -} - -func (s *storageImpl) GetMessage(messageID string) (protocol.Message, error) { - message, err := s.dbClient.GetMessage(messageID) - if err != nil { - return protocol.Message{}, err - } - return message.Parse() -} - -func (s *storageImpl) ListMessagesByContextID(contextID string, limit int) ([]protocol.Message, error) { - messages, err := s.dbClient.ListMessagesForSession(contextID, utils.GetGlobalUserID()) - if err != nil { - return nil, err - } - protocolMessages := make([]protocol.Message, len(messages)) - for i, message := range messages { - parsedMessage, err := message.Parse() - if err != nil { - return nil, err - } - protocolMessages[i] = parsedMessage - } - return protocolMessages, nil -} - -func (s *storageImpl) StoreTask(taskID string, task *MemoryCancellableTask) error { - return s.dbClient.CreateTask(task.Task()) -} - -func (s *storageImpl) StorePushNotification(taskID string, config protocol.TaskPushNotificationConfig) error { - return s.dbClient.CreatePushNotification(taskID, &config) -} - -func (s *storageImpl) GetPushNotification(taskID string) (protocol.TaskPushNotificationConfig, error) { - pushNotification, err := s.dbClient.GetPushNotification(taskID) - if err != nil { - return protocol.TaskPushNotificationConfig{}, err - } - return *pushNotification, nil -} - -// StorageOptions contains configuration options for storage implementations -type StorageOptions struct { - MaxHistoryLength int -} diff --git a/go/internal/adk/types.go b/go/internal/adk/types.go new file mode 100644 index 000000000..b1fad2938 --- /dev/null +++ b/go/internal/adk/types.go @@ -0,0 +1,257 @@ +package adk + +import ( + "database/sql" + "database/sql/driver" + "encoding/json" + "fmt" + + "trpc.group/trpc-go/trpc-a2a-go/server" +) + +type StreamableHTTPConnectionParams struct { + Url string `json:"url"` + Headers map[string]string `json:"headers"` + Timeout *float64 `json:"timeout,omitempty"` + SseReadTimeout *float64 `json:"sse_read_timeout,omitempty"` + TerminateOnClose *bool `json:"terminate_on_close,omitempty"` +} + +type HttpMcpServerConfig struct { + Params StreamableHTTPConnectionParams `json:"params"` + Tools []string `json:"tools"` +} + +type SseConnectionParams struct { + Url string `json:"url"` + Headers map[string]string `json:"headers"` + Timeout *float64 `json:"timeout,omitempty"` + SseReadTimeout *float64 `json:"sse_read_timeout,omitempty"` +} + +type SseMcpServerConfig struct { + Params SseConnectionParams `json:"params"` + Tools []string `json:"tools"` +} + +type Model interface { + GetType() string +} + +type BaseModel struct { + Type string `json:"type"` + Model string `json:"model"` +} + +type OpenAI struct { + BaseModel + BaseUrl string `json:"base_url"` +} + +func (o *OpenAI) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "type": "openai", + "model": o.Model, + "base_url": o.BaseUrl, + }) +} + +func (o *OpenAI) GetType() string { + return "openai" +} + +type AzureOpenAI struct { + BaseModel +} + +func (a *AzureOpenAI) GetType() string { + return "azure_openai" +} + +func (a *AzureOpenAI) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "type": "azure_openai", + "model": a.Model, + }) +} + +type Anthropic struct { + BaseModel + BaseUrl string `json:"base_url"` +} + +func (a *Anthropic) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "type": "anthropic", + "model": a.Model, + "base_url": a.BaseUrl, + }) +} + +func (a *Anthropic) GetType() string { + return "anthropic" +} + +type GeminiVertexAI struct { + BaseModel +} + +func (g *GeminiVertexAI) MarshalJSON() ([]byte, error) { + + return json.Marshal(map[string]interface{}{ + "type": "gemini_vertex_ai", + "model": g.Model, + }) +} + +func (g *GeminiVertexAI) GetType() string { + return "gemini_vertex_ai" +} + +type GeminiAnthropic struct { + BaseModel +} + +func (g *GeminiAnthropic) MarshalJSON() ([]byte, error) { + + return json.Marshal(map[string]interface{}{ + "type": "gemini_anthropic", + "model": g.Model, + }) +} + +func (g *GeminiAnthropic) GetType() string { + return "gemini_anthropic" +} + +type Ollama struct { + BaseModel +} + +func (o *Ollama) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "type": "ollama", + "model": o.Model, + }) +} + +func (o *Ollama) GetType() string { + return "ollama" +} + +type Gemini struct { + BaseModel +} + +func (g *Gemini) MarshalJSON() ([]byte, error) { + + return json.Marshal(map[string]interface{}{ + "type": "gemini", + "model": g.Model, + }) +} + +func (g *Gemini) GetType() string { + return "gemini" +} + +func ParseModel(bytes []byte) (Model, error) { + var model BaseModel + if err := json.Unmarshal(bytes, &model); err != nil { + return nil, err + } + switch model.Type { + case "openai": + var openai OpenAI + if err := json.Unmarshal(bytes, &openai); err != nil { + return nil, err + } + return &openai, nil + case "anthropic": + var anthropic Anthropic + if err := json.Unmarshal(bytes, &anthropic); err != nil { + return nil, err + } + return &anthropic, nil + case "gemini_vertex_ai": + var geminiVertexAI GeminiVertexAI + if err := json.Unmarshal(bytes, &geminiVertexAI); err != nil { + return nil, err + } + return &geminiVertexAI, nil + case "gemini_anthropic": + var geminiAnthropic GeminiAnthropic + if err := json.Unmarshal(bytes, &geminiAnthropic); err != nil { + return nil, err + } + return &geminiAnthropic, nil + case "ollama": + var ollama Ollama + if err := json.Unmarshal(bytes, &ollama); err != nil { + return nil, err + } + return &ollama, nil + case "gemini": + var gemini Gemini + if err := json.Unmarshal(bytes, &gemini); err != nil { + return nil, err + } + return &gemini, nil + } + return nil, fmt.Errorf("unknown model type: %s", model.Type) +} + +type AgentConfig struct { + KagentUrl string `json:"kagent_url"` + AgentCard server.AgentCard `json:"agent_card"` + Name string `json:"name"` + Model Model `json:"model"` + Description string `json:"description"` + Instruction string `json:"instruction"` + HttpTools []HttpMcpServerConfig `json:"http_tools"` + SseTools []SseMcpServerConfig `json:"sse_tools"` + Agents []AgentConfig `json:"agents"` +} + +func (a *AgentConfig) UnmarshalJSON(data []byte) error { + var tmp struct { + KagentUrl string `json:"kagent_url"` + AgentCard server.AgentCard `json:"agent_card"` + Name string `json:"name"` + Model json.RawMessage `json:"model"` + Description string `json:"description"` + Instruction string `json:"instruction"` + HttpTools []HttpMcpServerConfig `json:"http_tools"` + SseTools []SseMcpServerConfig `json:"sse_tools"` + Agents []AgentConfig `json:"agents"` + } + if err := json.Unmarshal(data, &tmp); err != nil { + return err + } + a.KagentUrl = tmp.KagentUrl + a.AgentCard = tmp.AgentCard + a.Name = tmp.Name + model, err := ParseModel(tmp.Model) + if err != nil { + return err + } + a.Model = model + a.Description = tmp.Description + a.Instruction = tmp.Instruction + a.HttpTools = tmp.HttpTools + a.SseTools = tmp.SseTools + a.Agents = tmp.Agents + return nil +} + +var _ sql.Scanner = &AgentConfig{} + +func (a *AgentConfig) Scan(value interface{}) error { + return json.Unmarshal(value.([]byte), a) +} + +var _ driver.Valuer = &AgentConfig{} + +func (a AgentConfig) Value() (driver.Value, error) { + return json.Marshal(a) +} diff --git a/go/internal/autogen/api/agents.go b/go/internal/autogen/api/agents.go deleted file mode 100644 index f731baee3..000000000 --- a/go/internal/autogen/api/agents.go +++ /dev/null @@ -1,69 +0,0 @@ -package api - -type Handoff struct { - Target string `json:"target"` - Description string `json:"description"` - Name string `json:"name"` - Message string `json:"message"` -} - -type AssistantAgentConfig struct { - Name string `json:"name"` - Description string `json:"description"` - ModelClient *Component `json:"model_client,omitempty"` - Tools []*Component `json:"tools"` - ModelContext *Component `json:"model_context,omitempty"` - SystemMessage string `json:"system_message,omitempty"` - ReflectOnToolUse bool `json:"reflect_on_tool_use"` - ModelClientStream bool `json:"model_client_stream"` - ToolCallSummaryFormat string `json:"tool_call_summary_format,omitempty"` - Handoffs []Handoff `json:"handoffs,omitempty"` - Memory []*Component `json:"memory,omitempty"` -} - -func (c *AssistantAgentConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *AssistantAgentConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type MultiModalWebSurferConfig struct { - Name string `json:"name"` - ModelClient *Component `json:"model_client,omitempty"` - DownloadsFolder *string `json:"downloads_folder,omitempty"` - Description string `json:"description"` - DebugDir *string `json:"debug_dir,omitempty"` - Headless *bool `json:"headless,omitempty"` - StartPage *string `json:"start_page,omitempty"` - AnimateActions *bool `json:"animate_actions,omitempty"` - ToSaveScreenshots *bool `json:"to_save_screenshots,omitempty"` - UseOCR *bool `json:"use_ocr,omitempty"` - BrowserChannel *string `json:"browser_channel,omitempty"` - BrowserDataDir *string `json:"browser_data_dir,omitempty"` - ToResizeViewport *bool `json:"to_resize_viewport,omitempty"` -} - -func (c *MultiModalWebSurferConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *MultiModalWebSurferConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type TaskAgentConfig struct { - Name string `json:"name"` - Team *Component `json:"team,omitempty"` - ModelContext *Component `json:"model_context,omitempty"` - Description *string `json:"description,omitempty"` -} - -func (c *TaskAgentConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *TaskAgentConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/memory.go b/go/internal/autogen/api/memory.go deleted file mode 100644 index b3b43ca33..000000000 --- a/go/internal/autogen/api/memory.go +++ /dev/null @@ -1,18 +0,0 @@ -package api - -type PineconeMemoryConfig struct { - APIKey string `json:"api_key"` - IndexHost string `json:"index_host"` - TopK int `json:"top_k"` - Namespace string `json:"namespace"` - RecordFields []string `json:"record_fields"` - ScoreThreshold float64 `json:"score_threshold"` -} - -func (c *PineconeMemoryConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *PineconeMemoryConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/models.go b/go/internal/autogen/api/models.go deleted file mode 100644 index 394983e86..000000000 --- a/go/internal/autogen/api/models.go +++ /dev/null @@ -1,178 +0,0 @@ -package api - -// Model Configurations -type ModelInfo struct { - Vision bool `json:"vision"` - FunctionCalling bool `json:"function_calling"` - JSONOutput bool `json:"json_output"` - Family string `json:"family"` - StructuredOutput bool `json:"structured_output"` - MultipleSystemMessages bool `json:"multiple_system_messages"` -} - -type OpenAICreateArgumentsConfig struct { - FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` - LogitBias map[string]float64 `json:"logit_bias,omitempty"` - MaxTokens int `json:"max_tokens,omitempty"` - N int `json:"n,omitempty"` - PresencePenalty float64 `json:"presence_penalty,omitempty"` - Seed int `json:"seed,omitempty"` - Temperature float64 `json:"temperature,omitempty"` - TopP float64 `json:"top_p,omitempty"` - User string `json:"user,omitempty"` -} - -type StreamOptions struct { - IncludeUsage bool `json:"include_usage,omitempty"` -} - -type BaseClientConfig struct { - // Base OpenAI fields - DefaultHeaders map[string]string `json:"default_headers,omitempty"` -} - -type BaseOpenAIClientConfig struct { - BaseClientConfig - // Base OpenAI fields - Model string `json:"model"` - APIKey string `json:"api_key,omitempty"` - Timeout int `json:"timeout,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - ModelCapabilities interface{} `json:"model_capabilities,omitempty"` - ModelInfo *ModelInfo `json:"model_info,omitempty"` - StreamOptions *StreamOptions `json:"stream_options,omitempty"` - OpenAICreateArgumentsConfig -} - -type OpenAIClientConfig struct { - BaseOpenAIClientConfig - - // OpenAIClientConfig specific fields - Organization *string `json:"organization,omitempty"` - BaseURL *string `json:"base_url,omitempty"` -} - -func (c *OpenAIClientConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *OpenAIClientConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type AzureOpenAIClientConfig struct { - BaseOpenAIClientConfig - - // AzureOpenAIClientConfig specific fields - AzureEndpoint string `json:"azure_endpoint,omitempty"` - AzureDeployment string `json:"azure_deployment,omitempty"` - APIVersion string `json:"api_version,omitempty"` - AzureADToken string `json:"azure_ad_token,omitempty"` -} - -func (c *AzureOpenAIClientConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *AzureOpenAIClientConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type AnthropicCreateArguments struct { - MaxTokens int `json:"max_tokens,omitempty"` - Temperature float64 `json:"temperature,omitempty"` - TopP float64 `json:"top_p,omitempty"` - TopK int `json:"top_k,omitempty"` - StopSequences []string `json:"stop_sequences,omitempty"` - Metadata map[string]string `json:"metadata,omitempty"` -} - -type BaseAnthropicClientConfiguration struct { - APIKey string `json:"api_key,omitempty"` - BaseURL string `json:"base_url,omitempty"` - Model string `json:"model"` - ModelCapabilities *ModelInfo `json:"model_capabilities,omitempty"` - ModelInfo *ModelInfo `json:"model_info,omitempty"` - Timeout float64 `json:"timeout,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - BaseClientConfig - AnthropicCreateArguments -} - -type AnthropicClientConfiguration struct { - BaseAnthropicClientConfiguration -} - -func (c *AnthropicClientConfiguration) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *AnthropicClientConfiguration) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type OllamaCreateArguments struct { - Model string `json:"model"` - Host string `json:"host"` -} - -type OllamaClientConfiguration struct { - FollowRedirects bool `json:"follow_redirects"` - Timeout int `json:"timeout"` - Headers map[string]string `json:"headers"` - ModelCapabilities interface{} `json:"model_capabilities,omitempty"` - ModelInfo *ModelInfo `json:"model_info"` - Options map[string]string `json:"options"` - OllamaCreateArguments -} - -func (c *OllamaClientConfiguration) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *OllamaClientConfiguration) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type BaseVertexAIConfig struct { - Model string `json:"model"` - ProjectID string `json:"project"` - Location string `json:"location"` - Credentials map[string]interface{} `json:"credentials,omitempty"` - ModelInfo *ModelInfo `json:"model_info,omitempty"` - - Temperature *float64 `json:"temperature,omitempty"` - TopP *float64 `json:"topP,omitempty"` - TopK *float64 `json:"topK,omitempty"` - StopSequences *[]string `json:"stopSequences,omitempty"` -} - -type GeminiVertexAIConfig struct { - BaseVertexAIConfig - - MaxOutputTokens *int `json:"max_output_tokens,omitempty"` - CandidateCount *int `json:"candidate_count,omitempty"` - ResponseMimeType *string `json:"response_mime_type,omitempty"` -} - -type AnthropicVertexAIConfig struct { - BaseVertexAIConfig - - MaxTokens *int `json:"max_tokens,omitempty"` -} - -func (c *GeminiVertexAIConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *GeminiVertexAIConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -func (c *AnthropicVertexAIConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *AnthropicVertexAIConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/teams.go b/go/internal/autogen/api/teams.go deleted file mode 100644 index f6166a17f..000000000 --- a/go/internal/autogen/api/teams.go +++ /dev/null @@ -1,67 +0,0 @@ -package api - -type CommonTeamConfig struct { - Participants []*Component `json:"participants"` - Termination *Component `json:"termination_condition,omitempty"` - MaxTurns *int `json:"max_turns,omitempty"` - ModelConfig *Component `json:"model_config,omitempty"` -} - -func (c *CommonTeamConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *CommonTeamConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type RoundRobinGroupChatConfig struct { - CommonTeamConfig -} - -func (c *RoundRobinGroupChatConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *RoundRobinGroupChatConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type SelectorGroupChatConfig struct { - CommonTeamConfig - SelectorPrompt string `json:"selector_prompt,omitempty"` -} - -func (c *SelectorGroupChatConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *SelectorGroupChatConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type MagenticOneGroupChatConfig struct { - CommonTeamConfig - FinalAnswerPrompt string `json:"final_answer_prompt,omitempty"` - MaxStalls int `json:"max_stalls,omitempty"` -} - -func (c *MagenticOneGroupChatConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *MagenticOneGroupChatConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type SwarmTeamConfig struct { - CommonTeamConfig -} - -func (c *SwarmTeamConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *SwarmTeamConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/terminations.go b/go/internal/autogen/api/terminations.go deleted file mode 100644 index 0e7d928d1..000000000 --- a/go/internal/autogen/api/terminations.go +++ /dev/null @@ -1,83 +0,0 @@ -package api - -type OrTerminationConfig struct { - Conditions []*Component `json:"conditions"` -} - -func (c *OrTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *OrTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type AndTerminationConfig struct { - Conditions []*Component `json:"conditions"` -} - -func (c *AndTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *AndTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type TextMentionTerminationConfig struct { - Text string `json:"text,omitempty"` -} - -func (c *TextMentionTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *TextMentionTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type TextMessageTerminationConfig struct { - Source string `json:"source,omitempty"` -} - -func (c *TextMessageTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *TextMessageTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type FinalTextMessageTerminationConfig struct { - Source string `json:"source,omitempty"` -} - -func (c *FinalTextMessageTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *FinalTextMessageTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type MaxMessageTerminationConfig struct { - MaxMessages int `json:"max_messages"` -} - -func (c *MaxMessageTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *MaxMessageTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type StopMessageTerminationConfig struct{} - -func (c *StopMessageTerminationConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *StopMessageTerminationConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/tool_servers.go b/go/internal/autogen/api/tool_servers.go deleted file mode 100644 index ef1f7e9cf..000000000 --- a/go/internal/autogen/api/tool_servers.go +++ /dev/null @@ -1,59 +0,0 @@ -package api - -type StdioMcpServerConfig struct { - Command string `json:"command"` - Args []string `json:"args,omitempty"` - Env map[string]string `json:"env,omitempty"` - ReadTimeoutSeconds uint8 `json:"read_timeout_seconds,omitempty"` -} - -func (c *StdioMcpServerConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *StdioMcpServerConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type SseMcpServerConfig struct { - URL string `json:"url"` - Headers map[string]interface{} `json:"headers,omitempty"` - Timeout *float64 `json:"timeout,omitempty"` - SseReadTimeout *float64 `json:"sse_read_timeout,omitempty"` -} - -func (c *SseMcpServerConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *SseMcpServerConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type StreamableHttpServerConfig struct { - URL string `json:"url"` - Headers map[string]interface{} `json:"headers,omitempty"` - Timeout *float64 `json:"timeout,omitempty"` - SseReadTimeout *float64 `json:"sse_read_timeout,omitempty"` - TerminateOnClose bool `json:"terminate_on_close,omitempty"` -} - -func (c *StreamableHttpServerConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *StreamableHttpServerConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} - -type MCPToolConfig struct { - // can be StdioMcpServerConfig | SseMcpServerConfig - ServerParams any `json:"server_params"` - Tool MCPTool `json:"tool"` -} - -type MCPTool struct { - Name string `json:"name"` - Description string `json:"description"` - InputSchema any `json:"input_schema"` -} diff --git a/go/internal/autogen/api/tools.go b/go/internal/autogen/api/tools.go deleted file mode 100644 index d22e350f0..000000000 --- a/go/internal/autogen/api/tools.go +++ /dev/null @@ -1,15 +0,0 @@ -package api - -type TeamToolConfig struct { - Name string `json:"name"` - Description string `json:"description"` - Team *Component `json:"team"` -} - -func (c *TeamToolConfig) ToConfig() (map[string]interface{}, error) { - return toConfig(c) -} - -func (c *TeamToolConfig) FromConfig(config map[string]interface{}) error { - return fromConfig(c, config) -} diff --git a/go/internal/autogen/api/types.go b/go/internal/autogen/api/types.go deleted file mode 100644 index 7ed6d59d6..000000000 --- a/go/internal/autogen/api/types.go +++ /dev/null @@ -1,114 +0,0 @@ -package api - -import ( - "database/sql/driver" - "encoding/json" - "errors" -) - -// JSONMap is a custom type for handling JSON columns in GORM -type JSONMap map[string]interface{} - -// Scan implements the sql.Scanner interface -func (j *JSONMap) Scan(value interface{}) error { - if value == nil { - *j = make(JSONMap) - return nil - } - - bytes, ok := value.([]byte) - if !ok { - return errors.New("failed to scan JSONMap: value is not []byte") - } - - return json.Unmarshal(bytes, j) -} - -// Value implements the driver.Valuer interface -func (j JSONMap) Value() (driver.Value, error) { - if j == nil { - return nil, nil - } - return json.Marshal(j) -} - -type Component struct { - Provider string `json:"provider"` - ComponentType string `json:"component_type"` - Version int `json:"version"` - ComponentVersion int `json:"component_version"` - Description string `json:"description"` - Label string `json:"label"` - Config JSONMap `gorm:"type:json" json:"config"` -} - -// Scan implements the sql.Scanner interface -func (c *Component) Scan(value interface{}) error { - if value == nil { - return nil - } - - bytes, ok := value.([]byte) - if !ok { - return errors.New("failed to scan Component: value is not []byte") - } - - return json.Unmarshal(bytes, c) -} - -// Value implements the driver.Valuer interface -func (c Component) Value() (driver.Value, error) { - return json.Marshal(c) -} - -func (c *Component) ToConfig() (map[string]interface{}, error) { - if c == nil { - return nil, nil - } - - return toConfig(c) -} - -func MustToConfig(c ComponentConfig) map[string]interface{} { - config, err := c.ToConfig() - if err != nil { - panic(err) - } - return config -} - -func MustFromConfig(c ComponentConfig, config map[string]interface{}) { - err := c.FromConfig(config) - if err != nil { - panic(err) - } -} - -type ComponentConfig interface { - ToConfig() (map[string]interface{}, error) - FromConfig(map[string]interface{}) error -} - -func toConfig(c any) (map[string]interface{}, error) { - byt, err := json.Marshal(c) - if err != nil { - return nil, err - } - - result := make(map[string]interface{}) - err = json.Unmarshal(byt, &result) - if err != nil { - return nil, err - } - - return result, nil -} - -func fromConfig(c any, config map[string]interface{}) error { - byt, err := json.Marshal(config) - if err != nil { - return err - } - - return json.Unmarshal(byt, c) -} diff --git a/go/internal/autogen/client/client.go b/go/internal/autogen/client/client.go deleted file mode 100644 index f5bb44692..000000000 --- a/go/internal/autogen/client/client.go +++ /dev/null @@ -1,236 +0,0 @@ -package client - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "strings" - "time" - - "github.com/kagent-dev/kagent/go/internal/autogen/api" -) - -type client struct { - BaseURL string - HTTPClient *http.Client -} - -type Client interface { - GetVersion(ctx context.Context) (string, error) - InvokeTask(ctx context.Context, req *InvokeTaskRequest) (*InvokeTaskResult, error) - InvokeTaskStream(ctx context.Context, req *InvokeTaskRequest) (<-chan *SseEvent, error) - FetchTools(ctx context.Context, req *ToolServerRequest) (*ToolServerResponse, error) - Validate(ctx context.Context, req *ValidationRequest) (*ValidationResponse, error) - ListSupportedModels(ctx context.Context) (*ProviderModels, error) -} - -func New(baseURL string) Client { - // Ensure baseURL doesn't end with a slash - baseURL = strings.TrimRight(baseURL, "/") - - return &client{ - BaseURL: baseURL, - HTTPClient: &http.Client{ - Timeout: time.Minute * 30, - }, - } -} - -func (c *client) GetVersion(ctx context.Context) (string, error) { - var result struct { - Version string `json:"version"` - } - - err := c.doRequest(context.Background(), "GET", "/version", nil, &result) - if err != nil { - return "", err - } - - return result.Version, nil -} - -func (c *client) startRequest(ctx context.Context, method, path string, body interface{}) (*http.Response, error) { - var bodyReader *bytes.Reader - if body != nil { - bodyBytes, err := json.Marshal(body) - if err != nil { - return nil, fmt.Errorf("error marshaling request body: %w", err) - } - bodyReader = bytes.NewReader(bodyBytes) - } - - // Ensure path starts with a slash - if !strings.HasPrefix(path, "/") { - path = "/" + path - } - - url := c.BaseURL + path - - var req *http.Request - var err error - if bodyReader != nil { - req, err = http.NewRequestWithContext(ctx, method, url, bodyReader) - } else { - req, err = http.NewRequestWithContext(ctx, method, url, nil) - } - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - - req.Header.Set("Content-Type", "application/json") - - return c.HTTPClient.Do(req) -} - -func (c *client) doRequest(ctx context.Context, method, path string, body interface{}, result interface{}) error { - resp, err := c.startRequest(ctx, method, path, body) - if err != nil { - return fmt.Errorf("error making request: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return fmt.Errorf("request failed with status: %s", resp.Status) - } - - b, err := io.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("error reading response: %w", err) - } - - // Try decoding into APIResponse first - var apiResp APIResponse - - decoder := json.NewDecoder(bytes.NewReader(b)) - decoder.DisallowUnknownFields() - if err := decoder.Decode(&apiResp); err != nil { - // Trying the base value - return json.Unmarshal(b, result) - } else { - // Check response status - if !apiResp.Status { - return fmt.Errorf("api error: [%+v]", apiResp) - } - - // If caller wants the result, marshal the Data field into their result type - if result != nil { - dataBytes, err := json.Marshal(apiResp.Data) - if err != nil { - return fmt.Errorf("error re-marshaling data: %w", err) - } - - if err := json.Unmarshal(dataBytes, result); err != nil { - return fmt.Errorf("error unmarshaling into result: %w", err) - } - } - } - - return nil -} - -type InvokeTaskRequest struct { - Task string `json:"task"` - TeamConfig *api.Component `json:"team_config"` - Messages []Event `json:"messages"` -} - -type InvokeTaskResult struct { - Duration float64 `json:"duration"` - TaskResult TaskResult `json:"task_result"` - Usage string `json:"usage"` -} - -func (c *client) InvokeTask(ctx context.Context, req *InvokeTaskRequest) (*InvokeTaskResult, error) { - var invoke InvokeTaskResult - { - bytes, err := json.Marshal(req) - if err != nil { - return nil, fmt.Errorf("error marshaling request: %w", err) - } - fmt.Println(string(bytes)) - } - - err := c.doRequest(ctx, "POST", "/invoke", req, &invoke) - return &invoke, err -} - -func (c *client) InvokeTaskStream(ctx context.Context, req *InvokeTaskRequest) (<-chan *SseEvent, error) { - resp, err := c.startRequest(ctx, "POST", "/invoke/stream", req) - if err != nil { - return nil, err - } - ch := streamSseResponse(resp.Body) - return ch, nil -} - -type ToolServerRequest struct { - Server *api.Component `json:"server"` -} - -type NamedTool struct { - Name string `json:"name"` - Component *api.Component `json:"component"` -} - -type ToolServerResponse struct { - Tools []*NamedTool `json:"tools"` -} - -func (c *client) FetchTools(ctx context.Context, req *ToolServerRequest) (*ToolServerResponse, error) { - var tools ToolServerResponse - err := c.doRequest(ctx, "POST", "/toolservers", req, &tools) - if err != nil { - return nil, err - } - - return &tools, err -} - -type ValidationRequest struct { - Component *api.Component `json:"component"` -} - -type ValidationError struct { - Field string `json:"field"` - Error string `json:"error"` - Suggestion *string `json:"suggestion,omitempty"` -} - -type ValidationResponse struct { - IsValid bool `json:"is_valid"` - Errors []*ValidationError `json:"errors"` - Warnings []*ValidationError `json:"warnings"` -} - -func (r ValidationResponse) ErrorMsg() string { - var msg string - for _, e := range r.Errors { - msg += fmt.Sprintf("Error: %s\n [%s]\n", e.Error, e.Field) - if e.Suggestion != nil { - msg += fmt.Sprintf("Suggestion: %s\n", *e.Suggestion) - } - } - for _, w := range r.Warnings { - msg += fmt.Sprintf("Warning: %s\n [%s]\n", w.Error, w.Field) - if w.Suggestion != nil { - msg += fmt.Sprintf("Suggestion: %s\n", *w.Suggestion) - } - } - - return msg -} - -func (c *client) Validate(ctx context.Context, req *ValidationRequest) (*ValidationResponse, error) { - var resp ValidationResponse - err := c.doRequest(ctx, "POST", "/validate", req, &resp) - return &resp, err -} - -func (c *client) ListSupportedModels(ctx context.Context) (*ProviderModels, error) { - var models ProviderModels - err := c.doRequest(ctx, "GET", "/models", nil, &models) - return &models, err -} diff --git a/go/internal/autogen/client/fake/client.go b/go/internal/autogen/client/fake/client.go deleted file mode 100644 index 1c14cc63c..000000000 --- a/go/internal/autogen/client/fake/client.go +++ /dev/null @@ -1,136 +0,0 @@ -package fake - -import ( - "context" - "encoding/json" - "fmt" - "sync" - - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" -) - -type InMemoryAutogenClient struct { - mu sync.RWMutex - - // Minimal storage for FetchTools functionality - toolsByServer map[string][]*autogen_client.NamedTool -} - -func NewInMemoryAutogenClient() *InMemoryAutogenClient { - return &InMemoryAutogenClient{ - toolsByServer: make(map[string][]*autogen_client.NamedTool), - } -} - -// NewMockAutogenClient creates a new in-memory autogen client for backward compatibility -func NewMockAutogenClient() *InMemoryAutogenClient { - return NewInMemoryAutogenClient() -} - -// GetVersion implements the Client interface -func (m *InMemoryAutogenClient) GetVersion(_ context.Context) (string, error) { - return "1.0.0-inmemory", nil -} - -// InvokeTask implements the Client interface -func (m *InMemoryAutogenClient) InvokeTask(ctx context.Context, req *autogen_client.InvokeTaskRequest) (*autogen_client.InvokeTaskResult, error) { - - // Determine the response based on context (session/no session) - // If Messages is set (even if empty), it's a session-based call - - return &autogen_client.InvokeTaskResult{ - TaskResult: autogen_client.TaskResult{ - Messages: []autogen_client.Event{ - &autogen_client.TextMessage{ - BaseChatMessage: autogen_client.BaseChatMessage{ - BaseEvent: autogen_client.BaseEvent{ - Type: "TextMessage", - }, - Source: "assistant", - Metadata: map[string]string{}, - ModelsUsage: &autogen_client.ModelsUsage{ - PromptTokens: 0, - CompletionTokens: 0, - }, - }, - Content: fmt.Sprintf("Session task completed: %s", req.Task), - }, - }, - }, - }, nil -} - -// InvokeTaskStream implements the Client interface -func (m *InMemoryAutogenClient) InvokeTaskStream(ctx context.Context, req *autogen_client.InvokeTaskRequest) (<-chan *autogen_client.SseEvent, error) { - ch := make(chan *autogen_client.SseEvent, 1) - go func() { - defer close(ch) - // Create a proper TextMessage event in JSON format - textEvent := map[string]interface{}{ - "type": "TextMessage", - "source": "assistant", - "content": fmt.Sprintf("Session task completed: %s", req.Task), - "metadata": map[string]string{}, - "models_usage": map[string]interface{}{ - "prompt_tokens": 0, - "completion_tokens": 0, - }, - } - - jsonData, err := json.Marshal(textEvent) - if err != nil { - return - } - - ch <- &autogen_client.SseEvent{ - Event: "message", - Data: jsonData, - } - }() - - return ch, nil -} - -// FetchTools implements the Client interface -func (m *InMemoryAutogenClient) FetchTools(ctx context.Context, req *autogen_client.ToolServerRequest) (*autogen_client.ToolServerResponse, error) { - m.mu.RLock() - defer m.mu.RUnlock() - - tools, exists := m.toolsByServer[req.Server.Label] - if !exists { - return &autogen_client.ToolServerResponse{ - Tools: []*autogen_client.NamedTool{}, - }, nil - } - - return &autogen_client.ToolServerResponse{ - Tools: tools, - }, nil -} - -// Validate implements the Client interface -func (m *InMemoryAutogenClient) Validate(ctx context.Context, req *autogen_client.ValidationRequest) (*autogen_client.ValidationResponse, error) { - return &autogen_client.ValidationResponse{ - IsValid: true, - Errors: []*autogen_client.ValidationError{}, - Warnings: []*autogen_client.ValidationError{}, - }, nil -} - -// Helper method to add tools for testing purposes (not part of the interface) -func (m *InMemoryAutogenClient) AddToolsForServer(serverLabel string, tools []*autogen_client.NamedTool) { - m.mu.Lock() - defer m.mu.Unlock() - - m.toolsByServer[serverLabel] = tools -} - -func (m *InMemoryAutogenClient) ListSupportedModels(ctx context.Context) (*autogen_client.ProviderModels, error) { - return &autogen_client.ProviderModels{ - "openai": { - { - Name: "gpt-4o", - }, - }, - }, nil -} diff --git a/go/internal/autogen/client/messages.go b/go/internal/autogen/client/messages.go deleted file mode 100644 index a5962fe8f..000000000 --- a/go/internal/autogen/client/messages.go +++ /dev/null @@ -1,177 +0,0 @@ -package client - -import ( - "encoding/json" - "fmt" -) - -type ModelsUsage struct { - PromptTokens int `json:"prompt_tokens"` - CompletionTokens int `json:"completion_tokens"` -} - -func (m *ModelsUsage) Add(other *ModelsUsage) { - if other == nil { - return - } - m.PromptTokens += other.PromptTokens - m.CompletionTokens += other.CompletionTokens -} - -func (m *ModelsUsage) String() string { - return fmt.Sprintf("Prompt Tokens: %d, Completion Tokens: %d", m.PromptTokens, m.CompletionTokens) -} - -func (m *ModelsUsage) ToMap() map[string]interface{} { - return map[string]interface{}{ - "prompt_tokens": m.PromptTokens, - "completion_tokens": m.CompletionTokens, - } -} - -type Event interface { - GetType() string -} - -type BaseEvent struct { - Type string `json:"type"` -} - -func (e *BaseEvent) GetType() string { - return e.Type -} - -type BaseChatMessage struct { - BaseEvent `json:",inline"` - Source string `json:"source"` - Metadata map[string]string `json:"metadata"` - ModelsUsage *ModelsUsage `json:"models_usage"` -} - -func newBaseChatMessage(source string, eventType string) BaseChatMessage { - return BaseChatMessage{ - BaseEvent: BaseEvent{Type: eventType}, - Source: source, - Metadata: make(map[string]string), - ModelsUsage: &ModelsUsage{}, - } -} - -type TextMessage struct { - BaseChatMessage `json:",inline"` - Content string `json:"content"` -} - -func NewTextMessage(content, source string) *TextMessage { - return &TextMessage{ - BaseChatMessage: newBaseChatMessage(source, TextMessageLabel), - Content: content, - } -} - -type ModelClientStreamingChunkEvent struct { - BaseChatMessage `json:",inline"` - Content string `json:"content"` -} - -type FunctionCall struct { - ID string `json:"id"` - Arguments string `json:"arguments"` - Name string `json:"name"` -} - -type ToolCallRequestEvent struct { - BaseChatMessage `json:",inline"` - Content []FunctionCall `json:"content"` -} - -type FunctionExecutionResult struct { - Name string `json:"name"` - CallID string `json:"call_id"` - Content string `json:"content"` -} - -type ToolCallExecutionEvent struct { - BaseChatMessage `json:",inline"` - Content []FunctionExecutionResult `json:"content"` -} - -type MemoryQueryEvent struct { - BaseChatMessage `json:",inline"` - Content []map[string]interface{} `json:"content"` -} - -type ToolCallSummaryMessage struct { - BaseChatMessage `json:",inline"` - Content string `json:"content"` - ToolCalls []FunctionCall `json:"tool_calls"` - Results []FunctionExecutionResult `json:"results"` -} - -const ( - TextMessageLabel = "TextMessage" - ToolCallRequestEventLabel = "ToolCallRequestEvent" - ToolCallExecutionEventLabel = "ToolCallExecutionEvent" - StopMessageLabel = "StopMessage" - ModelClientStreamingChunkEventLabel = "ModelClientStreamingChunkEvent" - LLMCallEventMessageLabel = "LLMCallEventMessage" - MemoryQueryEventLabel = "MemoryQueryEvent" - ToolCallSummaryMessageLabel = "ToolCallSummaryMessage" -) - -func ParseEvent(event []byte) (Event, error) { - var baseEvent BaseEvent - if err := json.Unmarshal(event, &baseEvent); err != nil { - return nil, err - } - - switch baseEvent.Type { - case TextMessageLabel: - var textMessage TextMessage - if err := json.Unmarshal(event, &textMessage); err != nil { - return nil, err - } - return &textMessage, nil - case ModelClientStreamingChunkEventLabel: - var modelClientStreamingChunkEvent ModelClientStreamingChunkEvent - if err := json.Unmarshal(event, &modelClientStreamingChunkEvent); err != nil { - return nil, err - } - return &modelClientStreamingChunkEvent, nil - case ToolCallRequestEventLabel: - var toolCallRequestEvent ToolCallRequestEvent - if err := json.Unmarshal(event, &toolCallRequestEvent); err != nil { - return nil, err - } - return &toolCallRequestEvent, nil - case ToolCallExecutionEventLabel: - var toolCallExecutionEvent ToolCallExecutionEvent - if err := json.Unmarshal(event, &toolCallExecutionEvent); err != nil { - return nil, err - } - return &toolCallExecutionEvent, nil - case MemoryQueryEventLabel: - var memoryQueryEvent MemoryQueryEvent - if err := json.Unmarshal(event, &memoryQueryEvent); err != nil { - return nil, err - } - return &memoryQueryEvent, nil - case ToolCallSummaryMessageLabel: - var ToolCallSummaryMessage ToolCallSummaryMessage - if err := json.Unmarshal(event, &ToolCallSummaryMessage); err != nil { - return nil, err - } - return &ToolCallSummaryMessage, nil - default: - return nil, fmt.Errorf("unknown event type: %s", baseEvent.Type) - } -} - -func GetLastStringMessage(events []Event) string { - for i := len(events) - 1; i >= 0; i-- { - if _, ok := events[i].(*TextMessage); ok { - return events[i].(*TextMessage).Content - } - } - return "" -} diff --git a/go/internal/autogen/client/types.go b/go/internal/autogen/client/types.go deleted file mode 100644 index a2b7ab5d5..000000000 --- a/go/internal/autogen/client/types.go +++ /dev/null @@ -1,92 +0,0 @@ -package client - -import ( - "bufio" - "bytes" - "encoding/json" - "fmt" - "io" -) - -type TaskResult struct { - // These are all of type Event, but we don't want to unmarshal them here - // because we want to handle them in the caller - Messages []Event `json:"messages"` - StopReason string `json:"stop_reason"` -} - -func (t *TaskResult) UnmarshalJSON(data []byte) error { - // Create a temporary struct with Messages as raw JSON - type temp struct { - Messages []json.RawMessage `json:"messages"` - StopReason string `json:"stop_reason"` - } - - var tmp temp - if err := json.Unmarshal(data, &tmp); err != nil { - return err - } - - // Parse each raw message into an Event - events := make([]Event, len(tmp.Messages)) - for i, rawMsg := range tmp.Messages { - event, err := ParseEvent(rawMsg) - if err != nil { - return fmt.Errorf("failed to parse event at index %d: %w", i, err) - } - events[i] = event - } - - // Set the fields - t.Messages = events - t.StopReason = tmp.StopReason - - return nil -} - -// APIResponse is the common response wrapper for all API responses -type APIResponse struct { - Status bool `json:"status"` - Message string `json:"message"` - Data interface{} `json:"data"` -} - -// ProviderModels maps provider names to a list of their supported model names. -type ProviderModels map[string][]ModelInfo - -// ModelInfo holds details about a specific model. -type ModelInfo struct { - Name string `json:"name"` - FunctionCalling bool `json:"function_calling"` -} - -type SseEvent struct { - Event string `json:"event"` - Data []byte `json:"data"` -} - -func (e *SseEvent) String() string { - return fmt.Sprintf("event: %s\ndata: %s\n\n", e.Event, e.Data) -} - -func streamSseResponse(r io.ReadCloser) chan *SseEvent { - scanner := bufio.NewScanner(r) - ch := make(chan *SseEvent, 10) - go func() { - defer close(ch) - defer r.Close() - currentEvent := &SseEvent{} - for scanner.Scan() { - line := scanner.Bytes() - if bytes.HasPrefix(line, []byte("event:")) { - currentEvent.Event = string(bytes.TrimSpace(bytes.TrimPrefix(line, []byte("event:")))) - } - if bytes.HasPrefix(line, []byte("data:")) { - currentEvent.Data = bytes.TrimSpace(bytes.TrimPrefix(line, []byte("data:"))) - ch <- currentEvent - currentEvent = &SseEvent{} - } - } - }() - return ch -} diff --git a/go/internal/autogen/client/types_test.go b/go/internal/autogen/client/types_test.go deleted file mode 100644 index 1517bbb68..000000000 --- a/go/internal/autogen/client/types_test.go +++ /dev/null @@ -1,281 +0,0 @@ -package client - -import ( - "io" - "strings" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// mockReadCloser implements io.ReadCloser for testing -type mockReadCloser struct { - *strings.Reader -} - -func (m *mockReadCloser) Close() error { - return nil -} - -func newMockReadCloser(data string) io.ReadCloser { - return &mockReadCloser{ - Reader: strings.NewReader(data), - } -} - -func TestStreamSseResponse(t *testing.T) { - t.Run("should parse single SSE event with event and data", func(t *testing.T) { - sseData := "event:message\ndata:hello world\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read the event from the channel - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "message", event.Event) - assert.Equal(t, []byte("hello world"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should parse multiple SSE events", func(t *testing.T) { - sseData := "event:message\ndata:first message\nevent:update\ndata:second message\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read first event - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "message", event.Event) - assert.Equal(t, []byte("first message"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for first event") - } - - // Read second event - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "update", event.Event) - assert.Equal(t, []byte("second message"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for second event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle data-only events without event type", func(t *testing.T) { - sseData := "data:message without event type\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read the event from the channel - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "", event.Event) // Empty event type - assert.Equal(t, []byte("message without event type"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle empty data", func(t *testing.T) { - sseData := "event:empty\ndata:\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read the event from the channel - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "empty", event.Event) - assert.Equal(t, ([]byte)(nil), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle JSON data", func(t *testing.T) { - jsonData := `{"message": "hello", "count": 42}` - sseData := "event:json\ndata:" + jsonData + "\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read the event from the channel - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "json", event.Event) - assert.Equal(t, []byte(jsonData), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle empty input", func(t *testing.T) { - reader := newMockReadCloser("") - - ch := streamSseResponse(reader) - - // Verify channel is closed immediately - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should ignore lines that don't start with event: or data:", func(t *testing.T) { - sseData := "comment: this is a comment\nevent:test\nother line\ndata:test data\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read the event from the channel - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "test", event.Event) - assert.Equal(t, []byte("test data"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle event type without corresponding data", func(t *testing.T) { - sseData := "event:orphan\nevent:complete\ndata:complete data\n" - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Should only receive the complete event (the orphan event has no data line) - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "complete", event.Event) - assert.Equal(t, []byte("complete data"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) - - t.Run("should handle complex multiline scenario", func(t *testing.T) { - sseData := `event:start -data:starting process - -event:progress -data:50% complete - -event:end -data:process finished -` - reader := newMockReadCloser(sseData) - - ch := streamSseResponse(reader) - - // Read first event - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "start", event.Event) - assert.Equal(t, []byte("starting process"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for first event") - } - - // Read second event - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "progress", event.Event) - assert.Equal(t, []byte("50% complete"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for second event") - } - - // Read third event - select { - case event := <-ch: - require.NotNil(t, event) - assert.Equal(t, "end", event.Event) - assert.Equal(t, []byte("process finished"), event.Data) - case <-time.After(time.Second): - t.Fatal("timeout waiting for third event") - } - - // Verify channel is closed - select { - case _, ok := <-ch: - assert.False(t, ok, "channel should be closed") - case <-time.After(time.Second): - t.Fatal("timeout waiting for channel to close") - } - }) -} diff --git a/go/internal/database/client.go b/go/internal/database/client.go index 842928628..0d245a528 100644 --- a/go/internal/database/client.go +++ b/go/internal/database/client.go @@ -4,54 +4,52 @@ import ( "encoding/json" "fmt" "slices" + "time" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/utils" + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "gorm.io/gorm" "trpc.group/trpc-go/trpc-a2a-go/protocol" ) type Client interface { - CreateFeedback(feedback *Feedback) error - CreateSession(session *Session) error - CreateAgent(agent *Agent) error - CreateToolServer(toolServer *ToolServer) (*ToolServer, error) - CreateMessages(messages ...*protocol.Message) error - CreateTask(task *protocol.Task) error - CreatePushNotification(taskID string, config *protocol.TaskPushNotificationConfig) error - - UpsertAgent(agent *Agent) error - - RefreshToolsForServer(serverName string, tools []*autogen_client.NamedTool) error - + // Store methods + StoreFeedback(feedback *Feedback) error + StoreSession(session *Session) error + StoreAgent(agent *Agent) error + StoreTask(task *protocol.Task) error + StorePushNotification(config *protocol.TaskPushNotificationConfig) error + StoreToolServer(toolServer *ToolServer) (*ToolServer, error) + StoreEvents(messages ...*Event) error + + // Delete methods DeleteSession(sessionName string, userID string) error - DeleteAgent(agentName string) error + DeleteAgent(agentID string) error DeleteToolServer(serverName string) error DeleteTask(taskID string) error + DeletePushNotification(taskID string) error - UpdateSession(session *Session) error - UpdateToolServer(server *ToolServer) error - UpdateAgent(agent *Agent) error - UpdateTask(task *Task) error - + // Get methods GetSession(name string, userID string) (*Session, error) GetAgent(name string) (*Agent, error) + GetTask(id string) (*protocol.Task, error) GetTool(name string) (*Tool, error) GetToolServer(name string) (*ToolServer, error) - GetTask(taskID string) (*Task, error) - GetMessage(messageID string) (*Message, error) - GetPushNotification(taskID string) (*protocol.TaskPushNotificationConfig, error) + GetPushNotification(taskID string, configID string) (*protocol.TaskPushNotificationConfig, error) + // List methods ListTools() ([]Tool, error) ListFeedback(userID string) ([]Feedback, error) - ListSessionTasks(sessionID string, userID string) ([]Task, error) + ListTasksForSession(sessionID string) ([]*protocol.Task, error) ListSessions(userID string) ([]Session, error) - ListSessionsForAgent(agentID uint, userID string) ([]Session, error) + ListSessionsForAgent(agentID string, userID string) ([]Session, error) ListAgents() ([]Agent, error) ListToolServers() ([]ToolServer, error) ListToolsForServer(serverName string) ([]Tool, error) - ListMessagesForTask(taskID, userID string) ([]Message, error) - ListMessagesForSession(sessionID, userID string) ([]Message, error) + ListEventsForSession(sessionID, userID string, options QueryOptions) ([]*Event, error) + ListPushNotifications(taskID string) ([]*protocol.TaskPushNotificationConfig, error) + + // Helper methods + RefreshToolsForServer(serverName string, tools ...*v1alpha1.MCPTool) error } type clientImpl struct { @@ -65,33 +63,18 @@ func NewClient(dbManager *Manager) Client { } // CreateFeedback creates a new feedback record -func (c *clientImpl) CreateFeedback(feedback *Feedback) error { - return create(c.db, feedback) +func (c *clientImpl) StoreFeedback(feedback *Feedback) error { + return save(c.db, feedback) } // CreateSession creates a new session record -func (c *clientImpl) CreateSession(session *Session) error { - return create(c.db, session) +func (c *clientImpl) StoreSession(session *Session) error { + return save(c.db, session) } // CreateAgent creates a new agent record -func (c *clientImpl) CreateAgent(agent *Agent) error { - return create(c.db, agent) -} - -// CreateTask creates a new task record -func (c *clientImpl) CreateTask(task *protocol.Task) error { - data, err := json.Marshal(task) - if err != nil { - return fmt.Errorf("failed to serialize task: %w", err) - } - - return create(c.db, &Task{ - ID: task.ID, - SessionID: task.ContextID, - UserID: utils.GetGlobalUserID(), - Data: string(data), - }) +func (c *clientImpl) StoreAgent(agent *Agent) error { + return save(c.db, agent) } func (c *clientImpl) CreatePushNotification(taskID string, config *protocol.TaskPushNotificationConfig) error { @@ -100,20 +83,17 @@ func (c *clientImpl) CreatePushNotification(taskID string, config *protocol.Task return fmt.Errorf("failed to serialize push notification config: %w", err) } - return create(c.db, &PushNotification{ + dbPushNotification := PushNotification{ TaskID: taskID, Data: string(data), - }) -} + } -// UpsertAgent upserts an agent record -func (c *clientImpl) UpsertAgent(agent *Agent) error { - return upsert(c.db, agent) + return save(c.db, &dbPushNotification) } // CreateToolServer creates a new tool server record -func (c *clientImpl) CreateToolServer(toolServer *ToolServer) (*ToolServer, error) { - err := create(c.db, toolServer) +func (c *clientImpl) StoreToolServer(toolServer *ToolServer) (*ToolServer, error) { + err := save(c.db, toolServer) if err != nil { return nil, err } @@ -121,8 +101,8 @@ func (c *clientImpl) CreateToolServer(toolServer *ToolServer) (*ToolServer, erro } // CreateTool creates a new tool record -func (c *clientImpl) CreateTool(tool *Tool) error { - return create(c.db, tool) +func (c *clientImpl) StoreTool(tool *Tool) error { + return save(c.db, tool) } // DeleteTask deletes a task by ID @@ -138,8 +118,8 @@ func (c *clientImpl) DeleteSession(sessionName string, userID string) error { } // DeleteAgent deletes an agent by name and user ID -func (c *clientImpl) DeleteAgent(agentName string) error { - return delete[Agent](c.db, Clause{Key: "name", Value: agentName}) +func (c *clientImpl) DeleteAgent(agentID string) error { + return delete[Agent](c.db, Clause{Key: "id", Value: agentID}) } // DeleteToolServer deletes a tool server by name and user ID @@ -147,18 +127,23 @@ func (c *clientImpl) DeleteToolServer(serverName string) error { return delete[ToolServer](c.db, Clause{Key: "name", Value: serverName}) } -func (c *clientImpl) GetTask(taskID string) (*Task, error) { - return get[Task](c.db, Clause{Key: "id", Value: taskID}) -} - // GetTaskMessages retrieves messages for a specific task -func (c *clientImpl) GetTaskMessages(taskID int) ([]Message, error) { - messages, err := list[Message](c.db, Clause{Key: "task_id", Value: taskID}) +func (c *clientImpl) GetTaskMessages(taskID int) ([]*protocol.Message, error) { + messages, err := list[Event](c.db, Clause{Key: "task_id", Value: taskID}) if err != nil { return nil, err } - return messages, nil + protocolMessages := make([]*protocol.Message, 0, len(messages)) + for _, message := range messages { + var protocolMessage protocol.Message + if err := json.Unmarshal([]byte(message.Data), &protocolMessage); err != nil { + return nil, fmt.Errorf("failed to deserialize message: %w", err) + } + protocolMessages = append(protocolMessages, &protocolMessage) + } + + return protocolMessages, nil } // GetSession retrieves a session by name and user ID @@ -169,8 +154,8 @@ func (c *clientImpl) GetSession(sessionName string, userID string) (*Session, er } // GetAgent retrieves an agent by name and user ID -func (c *clientImpl) GetAgent(agentName string) (*Agent, error) { - return get[Agent](c.db, Clause{Key: "name", Value: agentName}) +func (c *clientImpl) GetAgent(agentID string) (*Agent, error) { + return get[Agent](c.db, Clause{Key: "id", Value: agentID}) } // GetTool retrieves a tool by provider (name) and user ID @@ -193,50 +178,29 @@ func (c *clientImpl) ListFeedback(userID string) ([]Feedback, error) { return feedback, nil } -func (c *clientImpl) CreateMessages(messages ...*protocol.Message) error { - for _, message := range messages { - if message == nil { - continue - } - - data, err := json.Marshal(message) +func (c *clientImpl) StoreEvents(events ...*Event) error { + for _, event := range events { + err := save(c.db, event) if err != nil { - return fmt.Errorf("failed to serialize message: %w", err) - } - - dbMessage := Message{ - ID: message.MessageID, - Data: string(data), - SessionID: message.ContextID, - TaskID: message.TaskID, - UserID: utils.GetGlobalUserID(), - } - - err = create(c.db, &dbMessage) - if err != nil { - return fmt.Errorf("failed to create message: %w", err) + return fmt.Errorf("failed to create event: %w", err) } } return nil } -// ListRuns lists all runs for a user -func (c *clientImpl) ListTasks(userID string) ([]Task, error) { - tasks, err := list[Task](c.db, Clause{Key: "user_id", Value: userID}) +// ListSessionRuns lists all runs for a specific session +func (c *clientImpl) ListTasksForSession(sessionID string) ([]*protocol.Task, error) { + tasks, err := list[Task](c.db, + Clause{Key: "session_id", Value: sessionID}, + ) if err != nil { return nil, err } - return tasks, nil -} -// ListSessionRuns lists all runs for a specific session -func (c *clientImpl) ListSessionTasks(sessionID string, userID string) ([]Task, error) { - return list[Task](c.db, - Clause{Key: "session_id", Value: sessionID}, - Clause{Key: "user_id", Value: userID}) + return ParseTasks(tasks) } -func (c *clientImpl) ListSessionsForAgent(agentID uint, userID string) ([]Session, error) { +func (c *clientImpl) ListSessionsForAgent(agentID string, userID string) ([]Session, error) { return list[Session](c.db, Clause{Key: "agent_id", Value: agentID}, Clause{Key: "user_id", Value: userID}) @@ -268,7 +232,8 @@ func (c *clientImpl) ListToolsForServer(serverName string) ([]Tool, error) { } // RefreshToolsForServer refreshes a tool server -func (c *clientImpl) RefreshToolsForServer(serverName string, tools []*autogen_client.NamedTool) error { +// TODO: Use a transaction to ensure atomicity +func (c *clientImpl) RefreshToolsForServer(serverName string, tools ...*v1alpha1.MCPTool) error { existingTools, err := c.ListToolsForServer(serverName) if err != nil { return err @@ -280,21 +245,21 @@ func (c *clientImpl) RefreshToolsForServer(serverName string, tools []*autogen_c // If it's in the existing tools but not in the new tools, delete it for _, tool := range tools { existingToolIndex := slices.IndexFunc(existingTools, func(t Tool) bool { - return t.Name == tool.Name + return t.ID == tool.Name }) if existingToolIndex != -1 { existingTool := existingTools[existingToolIndex] - existingTool.Component = *tool.Component existingTool.ServerName = serverName - err = upsert(c.db, &existingTool) + existingTool.Description = tool.Description + err = save(c.db, &existingTool) if err != nil { return err } } else { - err = create(c.db, &Tool{ - Name: tool.Name, - Component: *tool.Component, - ServerName: serverName, + err = save(c.db, &Tool{ + ID: tool.Name, + ServerName: serverName, + Description: tool.Description, }) if err != nil { return fmt.Errorf("failed to create tool %s: %v", tool.Name, err) @@ -304,80 +269,86 @@ func (c *clientImpl) RefreshToolsForServer(serverName string, tools []*autogen_c // Delete any tools that are in the existing tools but not in the new tools for _, existingTool := range existingTools { - if !slices.ContainsFunc(tools, func(t *autogen_client.NamedTool) bool { - return t.Name == existingTool.Name + if !slices.ContainsFunc(tools, func(t *v1alpha1.MCPTool) bool { + return t.Name == existingTool.ID }) { - err = delete[Tool](c.db, Clause{Key: "name", Value: existingTool.Name}) + err = delete[Tool](c.db, Clause{Key: "name", Value: existingTool.ID}) if err != nil { - return fmt.Errorf("failed to delete tool %s: %v", existingTool.Name, err) + return fmt.Errorf("failed to delete tool %s: %v", existingTool.ID, err) } } } return nil } -// UpdateSession updates a session -func (c *clientImpl) UpdateSession(session *Session) error { - return upsert(c.db, session) -} +// ListMessagesForRun retrieves messages for a specific run (helper method) +func (c *clientImpl) ListMessagesForTask(taskID, userID string) ([]*protocol.Message, error) { + messages, err := list[Event](c.db, + Clause{Key: "task_id", Value: taskID}, + Clause{Key: "user_id", Value: userID}) + if err != nil { + return nil, err + } -// UpdateToolServer updates a tool server -func (c *clientImpl) UpdateToolServer(server *ToolServer) error { - return upsert(c.db, server) + return ParseMessages(messages) } -// UpdateTask updates a task record -func (c *clientImpl) UpdateTask(task *Task) error { - return upsert(c.db, task) +type QueryOptions struct { + Limit int + After time.Time } -// UpdateAgent updates an agent record -func (c *clientImpl) UpdateAgent(agent *Agent) error { - return upsert(c.db, agent) -} +func (c *clientImpl) ListEventsForSession(sessionID, userID string, options QueryOptions) ([]*Event, error) { + var events []Event + query := c.db. + Where("session_id = ?", sessionID). + Where("user_id = ?", userID). + Order("created_at DESC") -// ListMessagesForRun retrieves messages for a specific run (helper method) -func (c *clientImpl) ListMessagesForTask(taskID, userID string) ([]Message, error) { - return list[Message](c.db, - Clause{Key: "task_id", Value: taskID}, - Clause{Key: "user_id", Value: userID}) -} + if !options.After.IsZero() { + query = query.Where("created_at > ?", options.After) + } -func (c *clientImpl) ListMessagesForSession(sessionID, userID string) ([]Message, error) { - return list[Message](c.db, - Clause{Key: "session_id", Value: sessionID}, - Clause{Key: "user_id", Value: userID}) -} + if options.Limit > 1 { + query = query.Limit(options.Limit) + } -// GetMessage retrieves a protocol message from the database -func (c *clientImpl) GetMessage(messageID string) (*Message, error) { - dbMessage, err := get[Message](c.db, Clause{Key: "id", Value: messageID}) + err := query.Find(&events).Error if err != nil { - return nil, fmt.Errorf("failed to get message: %w", err) + return nil, err + } + + protocolEvents := make([]*Event, 0, len(events)) + for _, event := range events { + protocolEvents = append(protocolEvents, &event) } - return dbMessage, nil + + return protocolEvents, nil } -func (c *clientImpl) GetPushNotification(taskID string) (*protocol.TaskPushNotificationConfig, error) { - dbPushNotification, err := get[PushNotification](c.db, Clause{Key: "task_id", Value: taskID}) +// GetMessage retrieves a protocol message from the database +func (c *clientImpl) GetMessage(messageID string) (*protocol.Message, error) { + dbMessage, err := get[Event](c.db, Clause{Key: "id", Value: messageID}) if err != nil { - return nil, fmt.Errorf("failed to get push notification config: %w", err) + return nil, fmt.Errorf("failed to get message: %w", err) } - var config protocol.TaskPushNotificationConfig - if err := json.Unmarshal([]byte(dbPushNotification.Data), &config); err != nil { - return nil, fmt.Errorf("failed to deserialize push notification config: %w", err) + + var message protocol.Message + if err := json.Unmarshal([]byte(dbMessage.Data), &message); err != nil { + return nil, fmt.Errorf("failed to deserialize message: %w", err) } - return &config, nil + + return &message, nil } // DeleteMessage deletes a protocol message from the database func (c *clientImpl) DeleteMessage(messageID string) error { - return delete[Message](c.db, Clause{Key: "id", Value: messageID}) + return delete[Event](c.db, Clause{Key: "id", Value: messageID}) } // ListMessagesByContextID retrieves messages by context ID with optional limit func (c *clientImpl) ListMessagesByContextID(contextID string, limit int) ([]protocol.Message, error) { - var dbMessages []Message + var dbMessages []Event query := c.db.Where("session_id = ?", contextID).Order("created_at DESC") if limit > 0 { @@ -401,19 +372,93 @@ func (c *clientImpl) ListMessagesByContextID(contextID string, limit int) ([]pro return protocolMessages, nil } +// StoreTask stores a MemoryCancellableTask in the database +func (c *clientImpl) StoreTask(task *protocol.Task) error { + data, err := json.Marshal(task) + if err != nil { + return fmt.Errorf("failed to serialize task: %w", err) + } + + dbTask := Task{ + ID: task.ID, + Data: string(data), + SessionID: task.ContextID, + } + + return save(c.db, &dbTask) +} + +// GetTask retrieves a MemoryCancellableTask from the database +func (c *clientImpl) GetTask(taskID string) (*protocol.Task, error) { + dbTask, err := get[Task](c.db, Clause{Key: "id", Value: taskID}) + if err != nil { + return nil, fmt.Errorf("failed to get task: %w", err) + } + + var task protocol.Task + if err := json.Unmarshal([]byte(dbTask.Data), &task); err != nil { + return nil, fmt.Errorf("failed to deserialize task: %w", err) + } + + return &task, nil +} + +// TaskExists checks if a task exists in the database +func (c *clientImpl) TaskExists(taskID string) bool { + var count int64 + c.db.Model(&Task{}).Where("id = ?", taskID).Count(&count) + return count > 0 +} + // StorePushNotification stores a push notification configuration in the database -func (c *clientImpl) StorePushNotification(taskID string, config protocol.TaskPushNotificationConfig) error { +func (c *clientImpl) StorePushNotification(config *protocol.TaskPushNotificationConfig) error { data, err := json.Marshal(config) if err != nil { return fmt.Errorf("failed to serialize push notification config: %w", err) } dbPushNotification := PushNotification{ - TaskID: taskID, + ID: config.PushNotificationConfig.ID, + TaskID: config.TaskID, Data: string(data), } - return upsert(c.db, &dbPushNotification) + return save(c.db, &dbPushNotification) +} + +// GetPushNotification retrieves a push notification configuration from the database +func (c *clientImpl) GetPushNotification(taskID string, configID string) (*protocol.TaskPushNotificationConfig, error) { + dbPushNotification, err := get[PushNotification](c.db, + Clause{Key: "task_id", Value: taskID}, + Clause{Key: "id", Value: configID}) + if err != nil { + return nil, fmt.Errorf("failed to get push notification config: %w", err) + } + + var config protocol.TaskPushNotificationConfig + if err := json.Unmarshal([]byte(dbPushNotification.Data), &config); err != nil { + return nil, fmt.Errorf("failed to deserialize push notification config: %w", err) + } + + return &config, nil +} + +func (c *clientImpl) ListPushNotifications(taskID string) ([]*protocol.TaskPushNotificationConfig, error) { + pushNotifications, err := list[PushNotification](c.db, Clause{Key: "task_id", Value: taskID}) + if err != nil { + return nil, err + } + + protocolPushNotifications := make([]*protocol.TaskPushNotificationConfig, 0, len(pushNotifications)) + for _, pushNotification := range pushNotifications { + var protocolPushNotification protocol.TaskPushNotificationConfig + if err := json.Unmarshal([]byte(pushNotification.Data), &protocolPushNotification); err != nil { + return nil, fmt.Errorf("failed to deserialize push notification config: %w", err) + } + protocolPushNotifications = append(protocolPushNotifications, &protocolPushNotification) + } + + return protocolPushNotifications, nil } // DeletePushNotification deletes a push notification configuration from the database diff --git a/go/internal/database/fake/client.go b/go/internal/database/fake/client.go index 75a866e67..c81684fbe 100644 --- a/go/internal/database/fake/client.go +++ b/go/internal/database/fake/client.go @@ -6,7 +6,7 @@ import ( "sort" "sync" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "github.com/kagent-dev/kagent/go/internal/database" "gorm.io/gorm" "trpc.group/trpc-go/trpc-a2a-go/protocol" @@ -21,9 +21,8 @@ type InMemmoryFakeClient struct { agents map[string]*database.Agent // changed from teams toolServers map[string]*database.ToolServer tools map[string]*database.Tool - messagesBySession map[string][]*database.Message // key: sessionId - messagesByTask map[string][]*database.Message // key: taskID - messages map[string]*database.Message // key: messageID + eventsBySession map[string][]*database.Event // key: sessionId + events map[string]*database.Event // key: eventID pushNotifications map[string]*protocol.TaskPushNotificationConfig // key: taskID nextFeedbackID int } @@ -37,9 +36,8 @@ func NewClient() database.Client { agents: make(map[string]*database.Agent), toolServers: make(map[string]*database.ToolServer), tools: make(map[string]*database.Tool), - messagesBySession: make(map[string][]*database.Message), - messagesByTask: make(map[string][]*database.Message), - messages: make(map[string]*database.Message), + eventsBySession: make(map[string][]*database.Event), + events: make(map[string]*database.Event), pushNotifications: make(map[string]*protocol.TaskPushNotificationConfig), nextFeedbackID: 1, } @@ -60,48 +58,22 @@ func (c *InMemmoryFakeClient) sessionKey(sessionID, userID string) string { return fmt.Sprintf("%s_%s", sessionID, userID) } -func (c *InMemmoryFakeClient) CreatePushNotification(taskID string, config *protocol.TaskPushNotificationConfig) error { +func (c *InMemmoryFakeClient) DeletePushNotification(taskID string) error { c.mu.Lock() defer c.mu.Unlock() - c.pushNotifications[taskID] = config + delete(c.pushNotifications, taskID) return nil } -func (c *InMemmoryFakeClient) GetPushNotification(taskID string) (*protocol.TaskPushNotificationConfig, error) { +func (c *InMemmoryFakeClient) GetPushNotification(taskID, userID string) (*protocol.TaskPushNotificationConfig, error) { c.mu.RLock() defer c.mu.RUnlock() return c.pushNotifications[taskID], nil } -func (c *InMemmoryFakeClient) CreateTask(task *protocol.Task) error { - c.mu.Lock() - defer c.mu.Unlock() - - jsn, err := json.Marshal(task) - if err != nil { - return err - } - c.tasks[task.ID] = &database.Task{ - ID: task.ID, - Data: string(jsn), - } - return nil -} - -func (c *InMemmoryFakeClient) GetMessage(messageID string) (*database.Message, error) { - c.mu.RLock() - defer c.mu.RUnlock() - - message, exists := c.messages[messageID] - if !exists { - return nil, gorm.ErrRecordNotFound - } - return message, nil -} - -func (c *InMemmoryFakeClient) GetTask(taskID string) (*database.Task, error) { +func (c *InMemmoryFakeClient) GetTask(taskID string) (*protocol.Task, error) { c.mu.RLock() defer c.mu.RUnlock() @@ -110,7 +82,12 @@ func (c *InMemmoryFakeClient) GetTask(taskID string) (*database.Task, error) { if !exists { return nil, gorm.ErrRecordNotFound } - return task, nil + parsedTask := &protocol.Task{} + err := json.Unmarshal([]byte(task.Data), parsedTask) + if err != nil { + return nil, err + } + return parsedTask, nil } func (c *InMemmoryFakeClient) DeleteTask(taskID string) error { @@ -122,8 +99,8 @@ func (c *InMemmoryFakeClient) DeleteTask(taskID string) error { return nil } -// CreateFeedback creates a new feedback record -func (c *InMemmoryFakeClient) CreateFeedback(feedback *database.Feedback) error { +// StoreFeedback creates a new feedback record +func (c *InMemmoryFakeClient) StoreFeedback(feedback *database.Feedback) error { c.mu.Lock() defer c.mu.Unlock() @@ -137,34 +114,21 @@ func (c *InMemmoryFakeClient) CreateFeedback(feedback *database.Feedback) error return nil } -// CreateMessages creates a new message record - -func (c *InMemmoryFakeClient) CreateMessages(messages ...*protocol.Message) error { +// StoreEvents creates a new event record +func (c *InMemmoryFakeClient) StoreEvents(events ...*database.Event) error { c.mu.Lock() defer c.mu.Unlock() - for _, message := range messages { - jsn, err := json.Marshal(message) - if err != nil { - return err - } - marshaledMessage := &database.Message{ - ID: message.MessageID, - Data: string(jsn), - } - if message.TaskID != nil { - c.messagesByTask[*message.TaskID] = append(c.messagesByTask[*message.TaskID], marshaledMessage) - } - if message.ContextID != nil { - c.messagesBySession[*message.ContextID] = append(c.messagesBySession[*message.ContextID], marshaledMessage) - } + for _, event := range events { + c.events[event.ID] = event + c.eventsBySession[event.SessionID] = append(c.eventsBySession[event.SessionID], event) } return nil } -// CreateSession creates a new session record -func (c *InMemmoryFakeClient) CreateSession(session *database.Session) error { +// StoreSession creates a new session record +func (c *InMemmoryFakeClient) StoreSession(session *database.Session) error { c.mu.Lock() defer c.mu.Unlock() @@ -173,26 +137,42 @@ func (c *InMemmoryFakeClient) CreateSession(session *database.Session) error { return nil } -// CreateAgent creates a new agent record -func (c *InMemmoryFakeClient) CreateAgent(agent *database.Agent) error { +// StoreAgent creates a new agent record +func (c *InMemmoryFakeClient) StoreAgent(agent *database.Agent) error { c.mu.Lock() defer c.mu.Unlock() - c.agents[agent.Name] = agent + c.agents[agent.ID] = agent return nil } -// UpsertAgent upserts an agent record -func (c *InMemmoryFakeClient) UpsertAgent(agent *database.Agent) error { +// StoreTask creates a new task record +func (c *InMemmoryFakeClient) StoreTask(task *protocol.Task) error { c.mu.Lock() defer c.mu.Unlock() - c.agents[agent.Name] = agent + jsn, err := json.Marshal(task) + if err != nil { + return err + } + c.tasks[task.ID] = &database.Task{ + ID: task.ID, + Data: string(jsn), + } return nil } -// CreateToolServer creates a new tool server record -func (c *InMemmoryFakeClient) CreateToolServer(toolServer *database.ToolServer) (*database.ToolServer, error) { +// StorePushNotification creates a new push notification record +func (c *InMemmoryFakeClient) StorePushNotification(config *protocol.TaskPushNotificationConfig) error { + c.mu.Lock() + defer c.mu.Unlock() + + c.pushNotifications[config.TaskID] = config + return nil +} + +// StoreToolServer creates a new tool server record +func (c *InMemmoryFakeClient) StoreToolServer(toolServer *database.ToolServer) (*database.ToolServer, error) { c.mu.Lock() defer c.mu.Unlock() @@ -205,7 +185,7 @@ func (c *InMemmoryFakeClient) CreateTool(tool *database.Tool) error { c.mu.Lock() defer c.mu.Unlock() - c.tools[tool.Name] = tool + c.tools[tool.ID] = tool return nil } @@ -306,15 +286,18 @@ func (c *InMemmoryFakeClient) ListFeedback(userID string) ([]database.Feedback, return result, nil } -// ListSessionTasks lists all tasks for a specific session -func (c *InMemmoryFakeClient) ListSessionTasks(sessionID string, userID string) ([]database.Task, error) { +func (c *InMemmoryFakeClient) ListTasksForSession(sessionID string) ([]*protocol.Task, error) { c.mu.RLock() defer c.mu.RUnlock() - var result []database.Task + var result []*protocol.Task for _, task := range c.tasks { - if task.SessionID == sessionID && task.UserID == userID { - result = append(result, *task) + if task.SessionID == sessionID { + parsed, err := task.Parse() + if err != nil { + return nil, err + } + result = append(result, &parsed) } } return result, nil @@ -338,7 +321,7 @@ func (c *InMemmoryFakeClient) ListSessions(userID string) ([]database.Session, e } // ListSessionsForAgent lists all sessions for an agent -func (c *InMemmoryFakeClient) ListSessionsForAgent(agentID uint, userID string) ([]database.Session, error) { +func (c *InMemmoryFakeClient) ListSessionsForAgent(agentID string, userID string) ([]database.Session, error) { c.mu.RLock() defer c.mu.RUnlock() @@ -409,43 +392,33 @@ func (c *InMemmoryFakeClient) ListToolsForServer(serverName string) ([]database. return result, nil } -// ListMessagesForTask retrieves messages for a specific task -func (c *InMemmoryFakeClient) ListMessagesForTask(taskID, userID string) ([]database.Message, error) { +func (c *InMemmoryFakeClient) ListPushNotifications(taskID string) ([]*protocol.TaskPushNotificationConfig, error) { c.mu.RLock() defer c.mu.RUnlock() - messages, exists := c.messagesByTask[taskID] - if !exists { - return []database.Message{}, nil - } - - // Convert []*Message to []Message - result := make([]database.Message, len(messages)) - for i, msg := range messages { - result[i] = *msg + var result []*protocol.TaskPushNotificationConfig + config, exists := c.pushNotifications[taskID] + if exists { + result = append(result, config) } return result, nil } -// ListMessagesForSession retrieves messages for a specific session -func (c *InMemmoryFakeClient) ListMessagesForSession(sessionID, userID string) ([]database.Message, error) { +// ListEventsForSession retrieves events for a specific session +func (c *InMemmoryFakeClient) ListEventsForSession(sessionID, userID string, options database.QueryOptions) ([]*database.Event, error) { c.mu.RLock() defer c.mu.RUnlock() - messages, exists := c.messagesBySession[sessionID] + events, exists := c.eventsBySession[sessionID] if !exists { - return []database.Message{}, nil + return nil, nil } - result := make([]database.Message, len(messages)) - for i, msg := range messages { - result[i] = *msg - } - return result, nil + return events, nil } // RefreshToolsForServer refreshes a tool server -func (c *InMemmoryFakeClient) RefreshToolsForServer(serverName string, tools []*autogen_client.NamedTool) error { +func (c *InMemmoryFakeClient) RefreshToolsForServer(serverName string, tools ...*v1alpha1.MCPTool) error { c.mu.Lock() defer c.mu.Unlock() @@ -478,7 +451,7 @@ func (c *InMemmoryFakeClient) UpdateAgent(agent *database.Agent) error { c.mu.Lock() defer c.mu.Unlock() - c.agents[agent.Name] = agent + c.agents[agent.ID] = agent return nil } @@ -496,7 +469,7 @@ func (c *InMemmoryFakeClient) AddTool(tool *database.Tool) { c.mu.Lock() defer c.mu.Unlock() - c.tools[tool.Name] = tool + c.tools[tool.ID] = tool } // AddTask adds a task for testing purposes @@ -518,7 +491,17 @@ func (c *InMemmoryFakeClient) Clear() { c.agents = make(map[string]*database.Agent) c.toolServers = make(map[string]*database.ToolServer) c.tools = make(map[string]*database.Tool) - c.messagesBySession = make(map[string][]*database.Message) - c.messagesByTask = make(map[string][]*database.Message) + c.eventsBySession = make(map[string][]*database.Event) + c.events = make(map[string]*database.Event) + c.pushNotifications = make(map[string]*protocol.TaskPushNotificationConfig) c.nextFeedbackID = 1 } + +// UpsertAgent upserts an agent record +func (c *InMemmoryFakeClient) UpsertAgent(agent *database.Agent) error { + c.mu.Lock() + defer c.mu.Unlock() + + c.agents[agent.ID] = agent + return nil +} diff --git a/go/internal/database/manager.go b/go/internal/database/manager.go index 055755220..613de3b1a 100644 --- a/go/internal/database/manager.go +++ b/go/internal/database/manager.go @@ -2,6 +2,7 @@ package database import ( "fmt" + "os" "sync" "github.com/glebarez/sqlite" @@ -37,19 +38,39 @@ type Config struct { PostgresConfig *PostgresConfig } +const ( + gormLogLevel = "GORM_LOG_LEVEL" +) + // NewManager creates a new database manager func NewManager(config *Config) (*Manager, error) { var db *gorm.DB var err error + logLevel := logger.Silent + if val, ok := os.LookupEnv(gormLogLevel); ok { + switch val { + case "error": + logLevel = logger.Error + case "warn": + logLevel = logger.Warn + case "info": + logLevel = logger.Info + case "silent": + logLevel = logger.Silent + } + } + switch config.DatabaseType { case DatabaseTypeSqlite: db, err = gorm.Open(sqlite.Open(config.SqliteConfig.DatabasePath), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Warn), + Logger: logger.Default.LogMode(logLevel), + TranslateError: true, }) case DatabaseTypePostgres: db, err = gorm.Open(postgres.Open(config.PostgresConfig.URL), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Warn), + Logger: logger.Default.LogMode(logLevel), + TranslateError: true, }) default: return nil, fmt.Errorf("invalid database type: %s", config.DatabaseType) @@ -74,7 +95,7 @@ func (m *Manager) Initialize() error { &Agent{}, &Session{}, &Task{}, - &Message{}, + &Event{}, &PushNotification{}, &Feedback{}, &Tool{}, @@ -100,7 +121,7 @@ func (m *Manager) Reset(recreateTables bool) error { &Agent{}, &Session{}, &Task{}, - &Message{}, + &Event{}, &PushNotification{}, &Feedback{}, &Tool{}, diff --git a/go/internal/database/models.go b/go/internal/database/models.go index 4d481b321..8dea413b1 100644 --- a/go/internal/database/models.go +++ b/go/internal/database/models.go @@ -6,7 +6,8 @@ import ( "errors" "time" - "github.com/kagent-dev/kagent/go/internal/autogen/api" + "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" + "github.com/kagent-dev/kagent/go/internal/adk" "gorm.io/gorm" "trpc.group/trpc-go/trpc-a2a-go/protocol" ) @@ -39,24 +40,26 @@ func (j JSONMap) Value() (driver.Value, error) { // Agent represents an agent configuration type Agent struct { - gorm.Model - Name string `gorm:"unique;not null" json:"name"` - Component api.Component `gorm:"type:json;not null" json:"component"` + ID string `gorm:"primaryKey" json:"id"` + CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` + UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` + + Config *adk.AgentConfig `gorm:"type:json;not null" json:"config"` } -type Message struct { +type Event struct { ID string `gorm:"primaryKey;not null" json:"id"` + SessionID string `gorm:"index" json:"session_id"` UserID string `gorm:"primaryKey;not null" json:"user_id"` CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` - Data string `gorm:"type:text;not null" json:"data"` // JSON serialized protocol.Message - SessionID *string `gorm:"index" json:"session_id"` - TaskID *string `gorm:"index" json:"task_id"` + Data string `gorm:"type:text;not null" json:"data"` // JSON serialized protocol.Message } -func (m *Message) Parse() (protocol.Message, error) { +func (m *Event) Parse() (protocol.Message, error) { var data protocol.Message err := json.Unmarshal([]byte(m.Data), &data) if err != nil { @@ -65,33 +68,31 @@ func (m *Message) Parse() (protocol.Message, error) { return data, nil } -func ParseMessages(messages []Message) ([]protocol.Message, error) { - result := make([]protocol.Message, 0, len(messages)) +func ParseMessages(messages []Event) ([]*protocol.Message, error) { + result := make([]*protocol.Message, 0, len(messages)) for _, message := range messages { parsedMessage, err := message.Parse() if err != nil { return nil, err } - result = append(result, parsedMessage) + result = append(result, &parsedMessage) } return result, nil } type Session struct { ID string `gorm:"primaryKey;not null" json:"id"` - Name string `gorm:"index;not null" json:"name"` + Name *string `gorm:"index" json:"name,omitempty"` UserID string `gorm:"primaryKey" json:"user_id"` CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` - AgentID *uint `gorm:"index" json:"agent_id"` + AgentID *string `gorm:"index" json:"agent_id"` } type Task struct { ID string `gorm:"primaryKey;not null" json:"id"` - Name *string `gorm:"index" json:"name,omitempty"` - UserID string `gorm:"primaryKey" json:"user_id"` CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` @@ -108,10 +109,25 @@ func (t *Task) Parse() (protocol.Task, error) { return data, nil } +func ParseTasks(tasks []Task) ([]*protocol.Task, error) { + result := make([]*protocol.Task, 0, len(tasks)) + for _, task := range tasks { + parsedTask, err := task.Parse() + if err != nil { + return nil, err + } + result = append(result, &parsedTask) + } + return result, nil +} + type PushNotification struct { - gorm.Model - TaskID string `gorm:"not null;index" json:"task_id"` - Data string `gorm:"type:text;not null" json:"data"` // JSON serialized push notification config + ID string `gorm:"primaryKey;not null" json:"id"` + TaskID string `gorm:"not null;index" json:"task_id"` + CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` + UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` + Data string `gorm:"type:text;not null" json:"data"` // JSON serialized push notification config } // FeedbackIssueType represents the category of feedback issue @@ -136,23 +152,28 @@ type Feedback struct { // Tool represents a single tool that can be used by an agent type Tool struct { - gorm.Model - Name string `gorm:"index;unique;not null" json:"name"` - Component api.Component `gorm:"type:json;not null" json:"component"` - ServerName string `gorm:"not null;index" json:"server_name,omitempty"` + ID string `gorm:"primaryKey;not null" json:"id"` + ServerName string `gorm:"primaryKey;not null" json:"server_name"` + CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` + UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` + Description string `json:"description"` } // ToolServer represents a tool server that provides tools type ToolServer struct { - gorm.Model - Name string `gorm:"primaryKey;not null" json:"name"` - LastConnected *time.Time `json:"last_connected,omitempty"` - Component api.Component `gorm:"type:json;not null" json:"component"` + CreatedAt time.Time `gorm:"autoCreateTime" json:"created_at"` + UpdatedAt time.Time `gorm:"autoUpdateTime" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"index" json:"deleted_at"` + Name string `gorm:"primaryKey;not null" json:"name"` + Description string `json:"description"` + LastConnected *time.Time `json:"last_connected,omitempty"` + Config v1alpha1.ToolServerConfig `gorm:"type:json" json:"config"` } // TableName methods to match Python table names func (Agent) TableName() string { return "agent" } -func (Message) TableName() string { return "message" } +func (Event) TableName() string { return "event" } func (Session) TableName() string { return "session" } func (Task) TableName() string { return "task" } func (PushNotification) TableName() string { return "push_notification" } diff --git a/go/internal/database/service.go b/go/internal/database/service.go index b626df4eb..d8617b876 100644 --- a/go/internal/database/service.go +++ b/go/internal/database/service.go @@ -45,22 +45,20 @@ func get[T Model](db *gorm.DB, clauses ...Clause) (*T, error) { return &model, nil } -func create[T Model](db *gorm.DB, model *T) error { - err := db.Create(model).Error - if err != nil { +// TODO: Make this upsert actually idempotent +// args: +// - db: the database connection +// - model: the model to save +func save[T Model](db *gorm.DB, model *T) error { + if err := db.Create(model).Error; err != nil { + if err == gorm.ErrDuplicatedKey { + return db.Save(model).Error + } return fmt.Errorf("failed to create model: %w", err) } return nil } -func upsert[T Model](db *gorm.DB, model *T) error { - err := db.Save(model).Error - if err != nil { - return fmt.Errorf("failed to update model: %w", err) - } - return nil -} - func delete[T Model](db *gorm.DB, clauses ...Clause) error { t := new(T) query := db diff --git a/go/internal/httpserver/errors/errors.go b/go/internal/httpserver/errors/errors.go index 002e8b79b..1257fc866 100644 --- a/go/internal/httpserver/errors/errors.go +++ b/go/internal/httpserver/errors/errors.go @@ -73,3 +73,11 @@ func NewConflictError(message string, err error) *APIError { Err: err, } } + +func NewNotImplementedError(message string, err error) *APIError { + return &APIError{ + Code: http.StatusNotImplemented, + Message: message, + Err: err, + } +} diff --git a/go/internal/httpserver/handlers/agents.go b/go/internal/httpserver/handlers/agents.go index d9316965d..a19454c78 100644 --- a/go/internal/httpserver/handlers/agents.go +++ b/go/internal/httpserver/handlers/agents.go @@ -2,15 +2,11 @@ package handlers import ( "context" - "fmt" "net/http" - "strings" "github.com/go-logr/logr" "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" "github.com/kagent-dev/kagent/go/controller/translator" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/internal/database" "github.com/kagent-dev/kagent/go/internal/httpserver/errors" "github.com/kagent-dev/kagent/go/internal/utils" common "github.com/kagent-dev/kagent/go/internal/utils" @@ -35,22 +31,22 @@ func (h *AgentsHandler) HandleListAgents(w ErrorResponseWriter, r *http.Request) agentList := &v1alpha1.AgentList{} if err := h.KubeClient.List(r.Context(), agentList); err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to list Teams from Kubernetes", err)) + w.RespondWithError(errors.NewInternalServerError("Failed to list Agents from Kubernetes", err)) return } agentsWithID := make([]api.AgentResponse, 0) - for _, team := range agentList.Items { - teamRef := common.GetObjectRef(&team) - log.V(1).Info("Processing Team", "teamRef", teamRef) + for _, agent := range agentList.Items { + agentRef := common.GetObjectRef(&agent) + log.V(1).Info("Processing Agent", "agentRef", agentRef) - agent, err := h.DatabaseService.GetAgent(teamRef) - if err != nil { - w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) - return - } + // dgAgent, err := h.DatabaseService.GetAgent(common.ConvertToPythonIdentifier(agentRef)) + // if err != nil { + // w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) + // return + // } - agentResponse, err := h.getAgentResponse(r.Context(), log, &team, agent) + agentResponse, err := h.getAgentResponse(r.Context(), log, &agent) if err != nil { w.RespondWithError(err) return @@ -64,10 +60,10 @@ func (h *AgentsHandler) HandleListAgents(w ErrorResponseWriter, r *http.Request) RespondWithJSON(w, http.StatusOK, data) } -func (h *AgentsHandler) getAgentResponse(ctx context.Context, log logr.Logger, agent *v1alpha1.Agent, dbAgent *database.Agent) (api.AgentResponse, error) { +func (h *AgentsHandler) getAgentResponse(ctx context.Context, log logr.Logger, agent *v1alpha1.Agent) (api.AgentResponse, error) { agentRef := common.GetObjectRef(agent) - log.V(1).Info("Processing Team", "teamRef", agentRef) + log.V(1).Info("Processing Agent", "agentRef", agentRef) // Get the ModelConfig for the team modelConfig := &v1alpha1.ModelConfig{} @@ -131,9 +127,9 @@ func (h *AgentsHandler) getAgentResponse(ctx context.Context, log logr.Logger, a } return api.AgentResponse{ - ID: dbAgent.ID, - Agent: agent, - Component: &dbAgent.Component, + ID: common.ConvertToPythonIdentifier(agentRef), + Agent: agent, + // Config: dbAgent.Config, ModelProvider: modelConfig.Spec.Provider, Model: modelConfig.Spec.Model, ModelConfigRef: common.GetObjectRef(modelConfig), @@ -172,14 +168,14 @@ func (h *AgentsHandler) HandleGetAgent(w ErrorResponseWriter, r *http.Request) { return } - log.V(1).Info("Getting agent from database") - dbAgent, err := h.DatabaseService.GetAgent(fmt.Sprintf("%s/%s", agentNamespace, agentName)) - if err != nil { - w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) - return - } + // log.V(1).Info("Getting agent from database") + // dbAgent, err := h.DatabaseService.GetAgent(fmt.Sprintf("%s/%s", agentNamespace, agentName)) + // if err != nil { + // w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) + // return + // } - agentResponse, err := h.getAgentResponse(r.Context(), log, agent, dbAgent) + agentResponse, err := h.getAgentResponse(r.Context(), log, agent) if err != nil { w.RespondWithError(err) return @@ -217,52 +213,15 @@ func (h *AgentsHandler) HandleCreateAgent(w ErrorResponseWriter, r *http.Request kubeClientWrapper := utils.NewKubeClientWrapper(h.KubeClient) kubeClientWrapper.AddInMemory(&agentReq) - apiTranslator := translator.NewAutogenApiTranslator( + apiTranslator := translator.NewAdkApiTranslator( kubeClientWrapper, h.DefaultModelConfig, ) - log.V(1).Info("Translating Agent to Autogen format") - autogenAgent, err := apiTranslator.TranslateGroupChatForAgent(r.Context(), &agentReq) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to translate Agent to Autogen format", err)) - return - } - - validateReq := autogen_client.ValidationRequest{ - Component: &autogenAgent.Component, - } - - // Validate the team - log.V(1).Info("Validating Team") - validationResp, err := h.AutogenClient.Validate(r.Context(), &validateReq) + log.V(1).Info("Translating Agent to ADK format") + _, err = apiTranslator.TranslateAgent(r.Context(), &agentReq) if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to validate Team", err)) - return - } - - if !validationResp.IsValid { - log.Info("Team validation failed", - "errors", validationResp.Errors, - "warnings", validationResp.Warnings) - - // Improved error message with validation details - errorMsg := "Team validation failed: " - if len(validationResp.Errors) > 0 { - // Convert validation errors to strings - errorStrings := make([]string, 0, len(validationResp.Errors)) - for _, validationErr := range validationResp.Errors { - if validationErr != nil { - // Use the error as a string or extract relevant information - errorStrings = append(errorStrings, fmt.Sprintf("%v", validationErr)) - } - } - errorMsg += strings.Join(errorStrings, ", ") - } else { - errorMsg += "unknown validation error" - } - - w.RespondWithError(errors.NewValidationError(errorMsg, nil)) + w.RespondWithError(errors.NewInternalServerError("Failed to translate Agent to ADK format", err)) return } diff --git a/go/internal/httpserver/handlers/agents_test.go b/go/internal/httpserver/handlers/agents_test.go index 6a5061aea..4847ff271 100644 --- a/go/internal/httpserver/handlers/agents_test.go +++ b/go/internal/httpserver/handlers/agents_test.go @@ -15,8 +15,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client/fake" "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - autogen_api "github.com/kagent-dev/kagent/go/internal/autogen/api" - autogen_fake "github.com/kagent-dev/kagent/go/internal/autogen/client/fake" + "github.com/kagent-dev/kagent/go/internal/adk" "github.com/kagent-dev/kagent/go/internal/database" database_fake "github.com/kagent-dev/kagent/go/internal/database/fake" "github.com/kagent-dev/kagent/go/internal/httpserver/handlers" @@ -32,7 +31,7 @@ func createTestModelConfig() *v1alpha1.ModelConfig { Namespace: "default", }, Spec: v1alpha1.ModelConfigSpec{ - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, Model: "gpt-4", }, } @@ -57,12 +56,10 @@ func setupTestHandler(objects ...client.Object) (*handlers.AgentsHandler, string Build() userID := common.GetGlobalUserID() - autogenClient := autogen_fake.NewInMemoryAutogenClient() dbClient := database_fake.NewClient() base := &handlers.Base{ - KubeClient: kubeClient, - AutogenClient: autogenClient, + KubeClient: kubeClient, DefaultModelConfig: types.NamespacedName{ Name: "test-model-config", Namespace: "default", @@ -73,14 +70,12 @@ func setupTestHandler(objects ...client.Object) (*handlers.AgentsHandler, string return handlers.NewAgentsHandler(base), userID } -func createAutogenTeam(client database.Client, agent *v1alpha1.Agent) { - autogenTeam := &database.Agent{ - Component: autogen_api.Component{ - Label: common.GetObjectRef(agent), - }, - Name: common.GetObjectRef(agent), +func createAgent(client database.Client, agent *v1alpha1.Agent) { + dbAgent := &database.Agent{ + Config: &adk.AgentConfig{}, + ID: common.GetObjectRef(agent), } - client.CreateAgent(autogenTeam) + client.StoreAgent(dbAgent) } func TestHandleGetAgent(t *testing.T) { @@ -89,7 +84,7 @@ func TestHandleGetAgent(t *testing.T) { team := createTestAgent("test-team", modelConfig) handler, _ := setupTestHandler(team, modelConfig) - createAutogenTeam(handler.Base.DatabaseService, team) + createAgent(handler.Base.DatabaseService, team) req := httptest.NewRequest("GET", "/api/agents/default/test-team", nil) req = mux.SetURLVars(req, map[string]string{"namespace": "default", "name": "test-team"}) @@ -105,7 +100,7 @@ func TestHandleGetAgent(t *testing.T) { require.Equal(t, "test-team", response.Data.Agent.Name) require.Equal(t, "default/test-model-config", response.Data.ModelConfigRef) require.Equal(t, "gpt-4", response.Data.Model) - require.Equal(t, v1alpha1.OpenAI, response.Data.ModelProvider) + require.Equal(t, v1alpha1.ModelProviderOpenAI, response.Data.ModelProvider) }) t.Run("returns 404 for missing agent", func(t *testing.T) { @@ -127,7 +122,7 @@ func TestHandleListTeams(t *testing.T) { team := createTestAgent("test-team", modelConfig) handler, _ := setupTestHandler(team, modelConfig) - createAutogenTeam(handler.Base.DatabaseService, team) + createAgent(handler.Base.DatabaseService, team) req := httptest.NewRequest("GET", "/api/agents", nil) w := httptest.NewRecorder() @@ -143,7 +138,7 @@ func TestHandleListTeams(t *testing.T) { require.Equal(t, "test-team", response.Data[0].Agent.Name) require.Equal(t, "default/test-model-config", response.Data[0].ModelConfigRef) require.Equal(t, "gpt-4", response.Data[0].Model) - require.Equal(t, v1alpha1.OpenAI, response.Data[0].ModelProvider) + require.Equal(t, v1alpha1.ModelProviderOpenAI, response.Data[0].ModelProvider) }) } @@ -216,7 +211,7 @@ func TestHandleCreateAgent(t *testing.T) { agent := &v1alpha1.Agent{ ObjectMeta: metav1.ObjectMeta{Name: "test-team", Namespace: "default"}, Spec: v1alpha1.AgentSpec{ - ModelConfig: common.GetObjectRef(modelConfig), + ModelConfig: modelConfig.Name, SystemMessage: "You are an imagenary agent", Description: "Test team description", }, @@ -237,7 +232,7 @@ func TestHandleCreateAgent(t *testing.T) { require.Equal(t, "test-team", response.Data.Name) require.Equal(t, "default", response.Data.Namespace) require.Equal(t, "You are an imagenary agent", response.Data.Spec.SystemMessage) - require.Equal(t, "default/test-model-config", response.Data.Spec.ModelConfig) + require.Equal(t, "test-model-config", response.Data.Spec.ModelConfig) }) } @@ -248,7 +243,7 @@ func TestHandleDeleteTeam(t *testing.T) { } handler, _ := setupTestHandler(team) - createAutogenTeam(handler.Base.DatabaseService, team) + createAgent(handler.Base.DatabaseService, team) req := httptest.NewRequest("DELETE", "/api/agents/default/test-team", nil) req = mux.SetURLVars(req, map[string]string{"namespace": "default", "name": "test-team"}) diff --git a/go/internal/httpserver/handlers/feedback.go b/go/internal/httpserver/handlers/feedback.go index 1d0eea7b2..35eeda35c 100644 --- a/go/internal/httpserver/handlers/feedback.go +++ b/go/internal/httpserver/handlers/feedback.go @@ -50,7 +50,7 @@ func (h *FeedbackHandler) HandleCreateFeedback(w ErrorResponseWriter, r *http.Re return } - err = h.DatabaseService.CreateFeedback(&feedbackReq) + err = h.DatabaseService.StoreFeedback(&feedbackReq) if err != nil { log.Error(err, "Failed to create feedback") w.RespondWithError(errors.NewInternalServerError("Failed to create feedback", err)) diff --git a/go/internal/httpserver/handlers/handlers.go b/go/internal/httpserver/handlers/handlers.go index f22f7032b..66c1f0d62 100644 --- a/go/internal/httpserver/handlers/handlers.go +++ b/go/internal/httpserver/handlers/handlers.go @@ -4,7 +4,6 @@ import ( "k8s.io/apimachinery/pkg/types" "sigs.k8s.io/controller-runtime/pkg/client" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" "github.com/kagent-dev/kagent/go/internal/database" ) @@ -27,16 +26,14 @@ type Handlers struct { // Base holds common dependencies for all handlers type Base struct { KubeClient client.Client - AutogenClient autogen_client.Client DefaultModelConfig types.NamespacedName DatabaseService database.Client } // NewHandlers creates a new Handlers instance with all handler components -func NewHandlers(kubeClient client.Client, autogenClient autogen_client.Client, defaultModelConfig types.NamespacedName, dbService database.Client, watchedNamespaces []string) *Handlers { +func NewHandlers(kubeClient client.Client, defaultModelConfig types.NamespacedName, dbService database.Client, watchedNamespaces []string) *Handlers { base := &Base{ KubeClient: kubeClient, - AutogenClient: autogenClient, DefaultModelConfig: defaultModelConfig, DatabaseService: dbService, } diff --git a/go/internal/httpserver/handlers/memory_test.go b/go/internal/httpserver/handlers/memory_test.go index af0b19c46..45c319410 100644 --- a/go/internal/httpserver/handlers/memory_test.go +++ b/go/internal/httpserver/handlers/memory_test.go @@ -19,7 +19,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client/fake" "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - autogen_fake "github.com/kagent-dev/kagent/go/internal/autogen/client/fake" database_fake "github.com/kagent-dev/kagent/go/internal/database/fake" "github.com/kagent-dev/kagent/go/internal/httpserver/handlers" "github.com/kagent-dev/kagent/go/pkg/client/api" @@ -38,7 +37,6 @@ func TestMemoryHandler(t *testing.T) { base := &handlers.Base{ KubeClient: kubeClient, DefaultModelConfig: types.NamespacedName{Namespace: "default", Name: "default"}, - AutogenClient: autogen_fake.NewInMemoryAutogenClient(), DatabaseService: database_fake.NewClient(), } handler := handlers.NewMemoryHandler(base) diff --git a/go/internal/httpserver/handlers/modelconfig.go b/go/internal/httpserver/handlers/modelconfig.go index 080b230e3..6d08e10f4 100644 --- a/go/internal/httpserver/handlers/modelconfig.go +++ b/go/internal/httpserver/handlers/modelconfig.go @@ -222,7 +222,7 @@ func (h *ModelConfigHandler) HandleCreateModelConfig(w ErrorResponseWriter, r *h } // Set secret references if needed, but don't create secret yet - if providerTypeEnum != v1alpha1.Ollama && req.APIKey != "" { + if providerTypeEnum != v1alpha1.ModelProviderOllama && req.APIKey != "" { secretName := modelConfigRef.Name secretNamespace := modelConfigRef.Namespace secretKey := fmt.Sprintf("%s_API_KEY", strings.ToUpper(req.Provider.Type)) @@ -240,21 +240,21 @@ func (h *ModelConfigHandler) HandleCreateModelConfig(w ErrorResponseWriter, r *h var providerConfigErr error switch providerTypeEnum { - case v1alpha1.OpenAI: + case v1alpha1.ModelProviderOpenAI: if req.OpenAIParams != nil { modelConfig.Spec.OpenAI = req.OpenAIParams log.V(1).Info("Assigned OpenAI params to spec") } else { log.V(1).Info("No OpenAI params provided in create.") } - case v1alpha1.Anthropic: + case v1alpha1.ModelProviderAnthropic: if req.AnthropicParams != nil { modelConfig.Spec.Anthropic = req.AnthropicParams log.V(1).Info("Assigned Anthropic params to spec") } else { log.V(1).Info("No Anthropic params provided in create.") } - case v1alpha1.AzureOpenAI: + case v1alpha1.ModelProviderAzureOpenAI: if req.AzureParams == nil { providerConfigErr = fmt.Errorf("azureOpenAI parameters are required for AzureOpenAI provider") } else { @@ -266,7 +266,7 @@ func (h *ModelConfigHandler) HandleCreateModelConfig(w ErrorResponseWriter, r *h log.V(1).Info("Assigned AzureOpenAI params to spec") } } - case v1alpha1.Ollama: + case v1alpha1.ModelProviderOllama: if req.OllamaParams != nil { modelConfig.Spec.Ollama = req.OllamaParams log.V(1).Info("Assigned Ollama params to spec") @@ -290,7 +290,7 @@ func (h *ModelConfigHandler) HandleCreateModelConfig(w ErrorResponseWriter, r *h } log.V(1).Info("Successfully created ModelConfig") - if providerTypeEnum != v1alpha1.Ollama && req.APIKey != "" { + if providerTypeEnum != v1alpha1.ModelProviderOllama && req.APIKey != "" { secretName := modelConfigRef.Name secretNamespace := modelConfigRef.Namespace secretKey := fmt.Sprintf("%s_API_KEY", strings.ToUpper(req.Provider.Type)) @@ -388,7 +388,7 @@ func (h *ModelConfigHandler) HandleUpdateModelConfig(w ErrorResponseWriter, r *h } // --- Update Secret if API Key is provided (and not Ollama) --- - shouldUpdateSecret := req.APIKey != nil && *req.APIKey != "" && modelConfig.Spec.Provider != v1alpha1.Ollama + shouldUpdateSecret := req.APIKey != nil && *req.APIKey != "" && modelConfig.Spec.Provider != v1alpha1.ModelProviderOllama if shouldUpdateSecret { log.V(1).Info("Updating API key secret") @@ -408,21 +408,21 @@ func (h *ModelConfigHandler) HandleUpdateModelConfig(w ErrorResponseWriter, r *h var providerConfigErr error switch modelConfig.Spec.Provider { - case v1alpha1.OpenAI: + case v1alpha1.ModelProviderOpenAI: if req.OpenAIParams != nil { modelConfig.Spec.OpenAI = req.OpenAIParams log.V(1).Info("Assigned updated OpenAI params to spec") } else { log.V(1).Info("No OpenAI params provided in update.") } - case v1alpha1.Anthropic: + case v1alpha1.ModelProviderAnthropic: if req.AnthropicParams != nil { modelConfig.Spec.Anthropic = req.AnthropicParams log.V(1).Info("Assigned updated Anthropic params to spec") } else { log.V(1).Info("No Anthropic params provided in update.") } - case v1alpha1.AzureOpenAI: + case v1alpha1.ModelProviderAzureOpenAI: if req.AzureParams == nil { // Allow clearing Azure params if provider changes AWAY from Azure, // but require params if provider IS Azure. @@ -436,7 +436,7 @@ func (h *ModelConfigHandler) HandleUpdateModelConfig(w ErrorResponseWriter, r *h log.V(1).Info("Assigned updated AzureOpenAI params to spec") } } - case v1alpha1.Ollama: + case v1alpha1.ModelProviderOllama: if req.OllamaParams != nil { modelConfig.Spec.Ollama = req.OllamaParams log.V(1).Info("Assigned updated Ollama params to spec") diff --git a/go/internal/httpserver/handlers/modelconfig_test.go b/go/internal/httpserver/handlers/modelconfig_test.go index ced133f26..b7801b544 100644 --- a/go/internal/httpserver/handlers/modelconfig_test.go +++ b/go/internal/httpserver/handlers/modelconfig_test.go @@ -54,7 +54,7 @@ func TestModelConfigHandler(t *testing.T) { }, Spec: v1alpha1.ModelConfigSpec{ Model: "gpt-4", - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, APIKeySecretRef: "test-secret", APIKeySecretKey: "OPENAI_API_KEY", OpenAI: &v1alpha1.OpenAIConfig{ @@ -132,7 +132,7 @@ func TestModelConfigHandler(t *testing.T) { require.NoError(t, err) assert.Equal(t, "test-config", config.Data.Name) assert.Equal(t, "default", config.Data.Namespace) - assert.Equal(t, v1alpha1.OpenAI, config.Data.Spec.Provider) + assert.Equal(t, v1alpha1.ModelProviderOpenAI, config.Data.Spec.Provider) assert.Equal(t, "gpt-4", config.Data.Spec.Model) }) @@ -162,7 +162,7 @@ func TestModelConfigHandler(t *testing.T) { var config api.StandardResponse[v1alpha1.ModelConfig] err := json.Unmarshal(responseRecorder.Body.Bytes(), &config) require.NoError(t, err) - assert.Equal(t, v1alpha1.Anthropic, config.Data.Spec.Provider) + assert.Equal(t, v1alpha1.ModelProviderAnthropic, config.Data.Spec.Provider) }) t.Run("Success_Ollama_NoAPIKey", func(t *testing.T) { @@ -191,7 +191,7 @@ func TestModelConfigHandler(t *testing.T) { var config api.StandardResponse[v1alpha1.ModelConfig] err := json.Unmarshal(responseRecorder.Body.Bytes(), &config) require.NoError(t, err) - assert.Equal(t, v1alpha1.Ollama, config.Data.Spec.Provider) + assert.Equal(t, v1alpha1.ModelProviderOllama, config.Data.Spec.Provider) assert.Empty(t, config.Data.Spec.APIKeySecretRef) }) @@ -220,7 +220,7 @@ func TestModelConfigHandler(t *testing.T) { var config api.StandardResponse[v1alpha1.ModelConfig] err := json.Unmarshal(responseRecorder.Body.Bytes(), &config) require.NoError(t, err) - assert.Equal(t, v1alpha1.AzureOpenAI, config.Data.Spec.Provider) + assert.Equal(t, v1alpha1.ModelProviderAzureOpenAI, config.Data.Spec.Provider) }) t.Run("InvalidJSON", func(t *testing.T) { @@ -266,7 +266,7 @@ func TestModelConfigHandler(t *testing.T) { }, Spec: v1alpha1.ModelConfigSpec{ Model: "gpt-4", - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, }, } err := kubeClient.Create(context.Background(), existingConfig) @@ -345,7 +345,7 @@ func TestModelConfigHandler(t *testing.T) { }, Spec: v1alpha1.ModelConfigSpec{ Model: "gpt-4", - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, APIKeySecretRef: "test-secret", APIKeySecretKey: "OPENAI_API_KEY", OpenAI: &v1alpha1.OpenAIConfig{ @@ -409,7 +409,7 @@ func TestModelConfigHandler(t *testing.T) { }, Spec: v1alpha1.ModelConfigSpec{ Model: "gpt-3.5-turbo", - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, OpenAI: &v1alpha1.OpenAIConfig{ BaseURL: "https://api.openai.com/v1", Temperature: "0.5", @@ -508,7 +508,7 @@ func TestModelConfigHandler(t *testing.T) { }, Spec: v1alpha1.ModelConfigSpec{ Model: "gpt-4", - Provider: v1alpha1.OpenAI, + Provider: v1alpha1.ModelProviderOpenAI, }, } diff --git a/go/internal/httpserver/handlers/models.go b/go/internal/httpserver/handlers/models.go index bc56d85ff..17456e56d 100644 --- a/go/internal/httpserver/handlers/models.go +++ b/go/internal/httpserver/handlers/models.go @@ -4,7 +4,6 @@ import ( "net/http" "github.com/kagent-dev/kagent/go/internal/httpserver/errors" - "github.com/kagent-dev/kagent/go/pkg/client/api" ctrllog "sigs.k8s.io/controller-runtime/pkg/log" ) @@ -23,12 +22,8 @@ func (h *ModelHandler) HandleListSupportedModels(w ErrorResponseWriter, r *http. log.Info("Listing supported models") - models, err := h.AutogenClient.ListSupportedModels(r.Context()) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to list supported models", err)) - return - } + // TODO: Implement this - data := api.NewResponse(models, "Successfully listed supported models", false) - RespondWithJSON(w, http.StatusOK, data) + w.RespondWithError(errors.NewNotImplementedError("Not implemented", nil)) + return } diff --git a/go/internal/httpserver/handlers/providers.go b/go/internal/httpserver/handlers/providers.go index eaa398b7f..c8fd7ba7a 100644 --- a/go/internal/httpserver/handlers/providers.go +++ b/go/internal/httpserver/handlers/providers.go @@ -22,10 +22,10 @@ func NewProviderHandler(base *Base) *ProviderHandler { // Helper function to get JSON keys specifically marked as required func getRequiredKeysForModelProvider(providerType v1alpha1.ModelProvider) []string { switch providerType { - case v1alpha1.AzureOpenAI: + case v1alpha1.ModelProviderAzureOpenAI: // Based on the +required comments in the AzureOpenAIConfig struct definition return []string{"azureEndpoint", "apiVersion"} - case v1alpha1.OpenAI, v1alpha1.Anthropic, v1alpha1.Ollama: + case v1alpha1.ModelProviderOpenAI, v1alpha1.ModelProviderAnthropic, v1alpha1.ModelProviderOllama: // These providers currently have no fields marked as strictly required in the API definition return []string{} default: @@ -93,10 +93,10 @@ func (h *ProviderHandler) HandleListSupportedModelProviders(w ErrorResponseWrite providerEnum v1alpha1.ModelProvider configType reflect.Type }{ - {v1alpha1.OpenAI, reflect.TypeOf(v1alpha1.OpenAIConfig{})}, - {v1alpha1.Anthropic, reflect.TypeOf(v1alpha1.AnthropicConfig{})}, - {v1alpha1.AzureOpenAI, reflect.TypeOf(v1alpha1.AzureOpenAIConfig{})}, - {v1alpha1.Ollama, reflect.TypeOf(v1alpha1.OllamaConfig{})}, + {v1alpha1.ModelProviderOpenAI, reflect.TypeOf(v1alpha1.OpenAIConfig{})}, + {v1alpha1.ModelProviderAnthropic, reflect.TypeOf(v1alpha1.AnthropicConfig{})}, + {v1alpha1.ModelProviderAzureOpenAI, reflect.TypeOf(v1alpha1.AzureOpenAIConfig{})}, + {v1alpha1.ModelProviderOllama, reflect.TypeOf(v1alpha1.OllamaConfig{})}, } providersResponse := []map[string]interface{}{} diff --git a/go/internal/httpserver/handlers/sessions.go b/go/internal/httpserver/handlers/sessions.go index add340b31..b4ec8a4d5 100644 --- a/go/internal/httpserver/handlers/sessions.go +++ b/go/internal/httpserver/handlers/sessions.go @@ -1,11 +1,10 @@ package handlers import ( - "encoding/json" "net/http" + "strconv" + "time" - "github.com/google/uuid" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" "github.com/kagent-dev/kagent/go/internal/database" "github.com/kagent-dev/kagent/go/internal/httpserver/errors" "github.com/kagent-dev/kagent/go/internal/utils" @@ -53,7 +52,7 @@ func (h *SessionsHandler) HandleGetSessionsForAgent(w ErrorResponseWriter, r *ht } // Get agent ID from agent ref - agent, err := h.DatabaseService.GetAgent(namespace + "/" + agentName) + agent, err := h.DatabaseService.GetAgent(utils.ConvertToPythonIdentifier(namespace + "/" + agentName)) if err != nil { w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) return @@ -116,13 +115,12 @@ func (h *SessionsHandler) HandleCreateSession(w ErrorResponseWriter, r *http.Req } log = log.WithValues("agentRef", *sessionRequest.AgentRef) - id := uuid.New().String() - name := id - if sessionRequest.Name != nil { - name = *sessionRequest.Name + id := protocol.GenerateContextID() + if sessionRequest.ID != nil && *sessionRequest.ID != "" { + id = *sessionRequest.ID } - agent, err := h.DatabaseService.GetAgent(*sessionRequest.AgentRef) + agent, err := h.DatabaseService.GetAgent(utils.ConvertToPythonIdentifier(*sessionRequest.AgentRef)) if err != nil { w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) return @@ -130,7 +128,7 @@ func (h *SessionsHandler) HandleCreateSession(w ErrorResponseWriter, r *http.Req session := &database.Session{ ID: id, - Name: name, + Name: sessionRequest.Name, UserID: sessionRequest.UserID, AgentID: &agent.ID, } @@ -139,7 +137,7 @@ func (h *SessionsHandler) HandleCreateSession(w ErrorResponseWriter, r *http.Req "agentRef", sessionRequest.AgentRef, "name", sessionRequest.Name) - if err := h.DatabaseService.CreateSession(session); err != nil { + if err := h.DatabaseService.StoreSession(session); err != nil { w.RespondWithError(errors.NewInternalServerError("Failed to create session", err)) return } @@ -149,6 +147,11 @@ func (h *SessionsHandler) HandleCreateSession(w ErrorResponseWriter, r *http.Req RespondWithJSON(w, http.StatusCreated, data) } +type SessionResponse struct { + Session *database.Session `json:"session"` + Events []*database.Event `json:"events"` +} + // HandleGetSession handles GET /api/sessions/{session_id} requests using database func (h *SessionsHandler) HandleGetSession(w ErrorResponseWriter, r *http.Request) { log := ctrllog.FromContext(r.Context()).WithName("sessions-handler").WithValues("operation", "get-db") @@ -174,8 +177,39 @@ func (h *SessionsHandler) HandleGetSession(w ErrorResponseWriter, r *http.Reques return } + queryOptions := database.QueryOptions{ + Limit: 0, + } + after := r.URL.Query().Get("after") + if after != "" { + afterTime, err := time.Parse(time.RFC3339, after) + if err != nil { + w.RespondWithError(errors.NewBadRequestError("Failed to parse after timestamp", err)) + return + } + queryOptions.After = afterTime + } + + limit := r.URL.Query().Get("limit") + if limit != "" { + queryOptions.Limit, err = strconv.Atoi(limit) + if err != nil { + w.RespondWithError(errors.NewBadRequestError("Failed to parse limit", err)) + return + } + } + + events, err := h.DatabaseService.ListEventsForSession(sessionID, userID, queryOptions) + if err != nil { + w.RespondWithError(errors.NewInternalServerError("Failed to get events for session", err)) + return + } + log.Info("Successfully retrieved session") - data := api.NewResponse(session, "Successfully retrieved session", false) + data := api.NewResponse(SessionResponse{ + Session: session, + Events: events, + }, "Successfully retrieved session", false) RespondWithJSON(w, http.StatusOK, data) } @@ -207,7 +241,7 @@ func (h *SessionsHandler) HandleUpdateSession(w ErrorResponseWriter, r *http.Req return } - agent, err := h.DatabaseService.GetAgent(*sessionRequest.AgentRef) + agent, err := h.DatabaseService.GetAgent(utils.ConvertToPythonIdentifier(*sessionRequest.AgentRef)) if err != nil { w.RespondWithError(errors.NewNotFoundError("Agent not found", err)) return @@ -216,7 +250,7 @@ func (h *SessionsHandler) HandleUpdateSession(w ErrorResponseWriter, r *http.Req // Update fields session.AgentID = &agent.ID - if err := h.DatabaseService.UpdateSession(session); err != nil { + if err := h.DatabaseService.StoreSession(session); err != nil { w.RespondWithError(errors.NewInternalServerError("Failed to update session", err)) return } @@ -255,7 +289,7 @@ func (h *SessionsHandler) HandleDeleteSession(w ErrorResponseWriter, r *http.Req } // HandleListSessionRuns handles GET /api/sessions/{session_id}/tasks requests using database -func (h *SessionsHandler) HandleListSessionTasks(w ErrorResponseWriter, r *http.Request) { +func (h *SessionsHandler) HandleListTasksForSession(w ErrorResponseWriter, r *http.Request) { log := ctrllog.FromContext(r.Context()).WithName("sessions-handler").WithValues("operation", "list-tasks-db") sessionID, err := GetPathParam(r, "session_id") @@ -272,195 +306,22 @@ func (h *SessionsHandler) HandleListSessionTasks(w ErrorResponseWriter, r *http. } log = log.WithValues("userID", userID) - log.V(1).Info("Getting session tasks from database") - tasks, err := h.DatabaseService.ListSessionTasks(sessionID, userID) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to get session runs", err)) - return - } - - log.Info("Successfully retrieved session tasks", "count", len(tasks)) - data := api.NewResponse(tasks, "Successfully retrieved session tasks", false) - RespondWithJSON(w, http.StatusOK, data) -} - -func (h *SessionsHandler) HandleInvokeSession(w ErrorResponseWriter, r *http.Request) { - log := ctrllog.FromContext(r.Context()).WithName("sessions-handler").WithValues("operation", "invoke-session") - - sessionID, err := GetPathParam(r, "session_id") - if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get session ID from path", err)) - return - } - - userID, err := GetUserID(r) - if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get user ID", err)) - return - } - log = log.WithValues("userID", userID) - - var req autogen_client.InvokeTaskRequest - if err := DecodeJSONBody(r, &req); err != nil { - w.RespondWithError(errors.NewBadRequestError("Invalid request body", err)) - return - } - session, err := h.DatabaseService.GetSession(sessionID, userID) - if err != nil { - w.RespondWithError(errors.NewNotFoundError("Session not found", err)) - return - } - - messages, err := h.DatabaseService.ListMessagesForSession(session.ID, userID) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to get messages for session", err)) - return - } - - parsedMessages, err := database.ParseMessages(messages) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to parse messages", err)) - return - } - - autogenEvents, err := utils.ConvertMessagesToAutogenEvents(parsedMessages) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to convert messages to autogen events", err)) - return - } - req.Messages = autogenEvents - - result, err := h.AutogenClient.InvokeTask(r.Context(), &req) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to invoke session", err)) - return - } - - messageToSave := utils.ConvertAutogenEventsToMessages(nil, &sessionID, result.TaskResult.Messages...) - if err := h.DatabaseService.CreateMessages(messageToSave...); err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to create messages", err)) - return - } - - data := api.NewResponse(result.TaskResult.Messages, "Successfully invoked session", false) - RespondWithJSON(w, http.StatusOK, data) -} - -func (h *SessionsHandler) HandleInvokeSessionStream(w ErrorResponseWriter, r *http.Request) { - log := ctrllog.FromContext(r.Context()).WithName("sessions-handler").WithValues("operation", "invoke-session") - - sessionID, err := GetPathParam(r, "session_id") + // Verify session exists + _, err = h.DatabaseService.GetSession(sessionID, userID) if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get session ID from path", err)) + w.RespondWithError(errors.NewNotFoundError("Session not found for given ID", err)) return } - userID, err := GetUserID(r) - if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get user ID", err)) - return - } - log = log.WithValues("userID", userID) - - var req autogen_client.InvokeTaskRequest - if err := DecodeJSONBody(r, &req); err != nil { - w.RespondWithError(errors.NewBadRequestError("Invalid request body", err)) - return - } - session, err := h.DatabaseService.GetSession(sessionID, userID) - if err != nil { - w.RespondWithError(errors.NewNotFoundError("Session not found", err)) - return - } - - messages, err := h.DatabaseService.ListMessagesForSession(session.ID, userID) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to get messages for session", err)) - return - } - - parsedMessages, err := database.ParseMessages(messages) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to parse messages", err)) - return - } - - autogenEvents, err := utils.ConvertMessagesToAutogenEvents(parsedMessages) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to convert messages to autogen events", err)) - return - } - req.Messages = autogenEvents - - ch, err := h.AutogenClient.InvokeTaskStream(r.Context(), &req) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to invoke session", err)) - return - } - - w.Header().Set("Content-Type", "text/event-stream") - w.WriteHeader(http.StatusOK) - w.Flush() - - taskResult := autogen_client.InvokeTaskResult{} - - for event := range ch { - log.Info(event.String()) - w.Write([]byte(event.String())) - w.Flush() - - if event.Event == "task_result" { - if err := json.Unmarshal(event.Data, &taskResult); err != nil { - log.Error(err, "Failed to unmarshal task result") - continue - } - } - - } - - messageToSave := utils.ConvertAutogenEventsToMessages(nil, &sessionID, taskResult.TaskResult.Messages...) - log.Info("Saving messages", "count", len(messageToSave)) - if err := h.DatabaseService.CreateMessages(messageToSave...); err != nil { - log.Error(err, "Failed to create messages") - } -} - -func (h *SessionsHandler) HandleListSessionMessages(w ErrorResponseWriter, r *http.Request) { - log := ctrllog.FromContext(r.Context()).WithName("sessions-handler").WithValues("operation", "list-messages-db") - - sessionID, err := GetPathParam(r, "session_id") - if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get session ID from path", err)) - return - } - log = log.WithValues("session_id", sessionID) - - userID, err := GetUserID(r) - if err != nil { - w.RespondWithError(errors.NewBadRequestError("Failed to get user ID", err)) - return - } - log = log.WithValues("userID", userID) - - messages, err := h.DatabaseService.ListMessagesForSession(sessionID, userID) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to get messages for session", err)) - return - } - - parsedMessages, err := database.ParseMessages(messages) - if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to parse messages", err)) - return - } - - autogenEvents, err := utils.ConvertMessagesToAutogenEvents(parsedMessages) + log.V(1).Info("Getting session tasks from database") + tasks, err := h.DatabaseService.ListTasksForSession(sessionID) if err != nil { - w.RespondWithError(errors.NewInternalServerError("Failed to convert messages to autogen events", err)) + w.RespondWithError(errors.NewInternalServerError("Failed to get session runs", err)) return } - data := api.NewResponse(autogenEvents, "Successfully retrieved session messages", false) + log.Info("Successfully retrieved session tasks", "count", len(tasks)) + data := api.NewResponse(tasks, "Successfully retrieved session tasks", false) RespondWithJSON(w, http.StatusOK, data) } @@ -481,9 +342,8 @@ func (h *SessionsHandler) HandleAddEventToSession(w ErrorResponseWriter, r *http log = log.WithValues("userID", userID) var eventData struct { - Type string `json:"type"` - Data map[string]any `json:"data"` - TaskID string `json:"task_id"` + ID string `json:"id"` + Data string `json:"data"` } if err := DecodeJSONBody(r, &eventData); err != nil { w.RespondWithError(errors.NewBadRequestError("Invalid request body", err)) @@ -491,33 +351,24 @@ func (h *SessionsHandler) HandleAddEventToSession(w ErrorResponseWriter, r *http } // Get session to verify it exists - session, err := h.DatabaseService.GetSession(sessionID, userID) + _, err = h.DatabaseService.GetSession(sessionID, userID) if err != nil { w.RespondWithError(errors.NewNotFoundError("Session not found", err)) return } - protocolMessage := protocol.Message{ - ContextID: &session.ID, - MessageID: uuid.New().String(), - Parts: []protocol.Part{ - protocol.DataPart{ - Kind: protocol.KindData, - Data: eventData, - }, - }, - TaskID: &eventData.TaskID, - Metadata: map[string]interface{}{ - "event_type": eventData.Type, - }, + event := &database.Event{ + ID: eventData.ID, + SessionID: sessionID, + Data: eventData.Data, + UserID: userID, } - - if err := h.DatabaseService.CreateMessages(&protocolMessage); err != nil { + if err := h.DatabaseService.StoreEvents(event); err != nil { w.RespondWithError(errors.NewInternalServerError("Failed to store event", err)) return } log.Info("Successfully added event to session") - data := api.NewResponse(protocolMessage, "Event added to session successfully", false) + data := api.NewResponse(event, "Event added to session successfully", false) RespondWithJSON(w, http.StatusCreated, data) } diff --git a/go/internal/httpserver/handlers/sessions_test.go b/go/internal/httpserver/handlers/sessions_test.go index 87abb0703..1fc2fb949 100644 --- a/go/internal/httpserver/handlers/sessions_test.go +++ b/go/internal/httpserver/handlers/sessions_test.go @@ -14,14 +14,12 @@ import ( "k8s.io/apimachinery/pkg/types" "k8s.io/utils/ptr" "sigs.k8s.io/controller-runtime/pkg/client/fake" - "trpc.group/trpc-go/trpc-a2a-go/protocol" "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - autogen_fake "github.com/kagent-dev/kagent/go/internal/autogen/client/fake" "github.com/kagent-dev/kagent/go/internal/database" database_fake "github.com/kagent-dev/kagent/go/internal/database/fake" "github.com/kagent-dev/kagent/go/internal/httpserver/handlers" + "github.com/kagent-dev/kagent/go/internal/utils" "github.com/kagent-dev/kagent/go/pkg/client/api" ) @@ -30,52 +28,50 @@ func TestSessionsHandler(t *testing.T) { err := v1alpha1.AddToScheme(scheme) require.NoError(t, err) - setupHandler := func() (*handlers.SessionsHandler, *database_fake.InMemmoryFakeClient, autogen_client.Client, *mockErrorResponseWriter) { + setupHandler := func() (*handlers.SessionsHandler, *database_fake.InMemmoryFakeClient, *mockErrorResponseWriter) { kubeClient := fake.NewClientBuilder().WithScheme(scheme).Build() dbClient := database_fake.NewClient() - autogenClient := autogen_fake.NewInMemoryAutogenClient() base := &handlers.Base{ KubeClient: kubeClient, DatabaseService: dbClient, - AutogenClient: autogenClient, DefaultModelConfig: types.NamespacedName{Namespace: "default", Name: "default"}, } handler := handlers.NewSessionsHandler(base) responseRecorder := newMockErrorResponseWriter() - return handler, dbClient.(*database_fake.InMemmoryFakeClient), autogenClient, responseRecorder + return handler, dbClient.(*database_fake.InMemmoryFakeClient), responseRecorder } createTestAgent := func(dbClient database.Client, agentRef string) *database.Agent { agent := &database.Agent{ - Name: agentRef, + ID: agentRef, } - dbClient.CreateAgent(agent) + dbClient.StoreAgent(agent) // The fake client should assign an ID, but we'll use a default for testing - agent.ID = 1 // Simulate the ID that would be assigned by GORM + agent.ID = "1" // Simulate the ID that would be assigned by GORM return agent } - createTestSession := func(dbClient database.Client, sessionID, userID string, agentID *uint) *database.Session { + createTestSession := func(dbClient database.Client, sessionID, userID string, agentID string) *database.Session { session := &database.Session{ ID: sessionID, - Name: sessionID, + Name: ptr.To(sessionID), UserID: userID, - AgentID: agentID, + AgentID: &agentID, } - dbClient.CreateSession(session) + dbClient.StoreSession(session) return session } t.Run("HandleListSessions", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" // Create test sessions - agentID := uint(1) - session1 := createTestSession(dbClient, "session-1", userID, &agentID) - session2 := createTestSession(dbClient, "session-2", userID, &agentID) + agentID := "1" + session1 := createTestSession(dbClient, "session-1", userID, agentID) + session2 := createTestSession(dbClient, "session-2", userID, agentID) req := httptest.NewRequest("GET", "/api/sessions?user_id="+userID, nil) handler.HandleListSessions(responseRecorder, req) @@ -91,7 +87,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() req := httptest.NewRequest("GET", "/api/sessions", nil) handler.HandleListSessions(responseRecorder, req) @@ -103,9 +99,9 @@ func TestSessionsHandler(t *testing.T) { t.Run("HandleCreateSession", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" - agentRef := "default/test-agent" + agentRef := utils.ConvertToPythonIdentifier("default/test-agent") // Create test agent createTestAgent(dbClient, agentRef) @@ -127,13 +123,14 @@ func TestSessionsHandler(t *testing.T) { var response api.StandardResponse[*database.Session] err := json.Unmarshal(responseRecorder.Body.Bytes(), &response) require.NoError(t, err) - assert.Equal(t, "test-session", response.Data.Name) + assert.Equal(t, "test-session", *response.Data.Name) assert.Equal(t, userID, response.Data.UserID) + assert.NotEmpty(t, response.Data.ID) }) t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() - agentRef := "default/test-agent" + handler, _, responseRecorder := setupHandler() + agentRef := utils.ConvertToPythonIdentifier("default/test-agent") sessionReq := api.SessionRequest{ AgentRef: &agentRef, @@ -150,7 +147,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingAgentRef", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() userID := "test-user" sessionReq := api.SessionRequest{ @@ -168,9 +165,9 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("AgentNotFound", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() userID := "test-user" - agentRef := "default/non-existent-agent" + agentRef := utils.ConvertToPythonIdentifier("default/non-existent-agent") sessionReq := api.SessionRequest{ UserID: userID, @@ -188,7 +185,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("InvalidJSON", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() req := httptest.NewRequest("POST", "/api/sessions", bytes.NewBufferString("invalid json")) req.Header.Set("Content-Type", "application/json") @@ -202,13 +199,13 @@ func TestSessionsHandler(t *testing.T) { t.Run("HandleGetSession", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" sessionID := "test-session" // Create test session - agentID := uint(1) - session := createTestSession(dbClient, sessionID, userID, &agentID) + agentID := "1" + session := createTestSession(dbClient, sessionID, userID, agentID) req := httptest.NewRequest("GET", "/api/sessions/"+sessionID+"?user_id="+userID, nil) req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) @@ -217,15 +214,15 @@ func TestSessionsHandler(t *testing.T) { assert.Equal(t, http.StatusOK, responseRecorder.Code) - var response api.StandardResponse[*database.Session] + var response api.StandardResponse[handlers.SessionResponse] err := json.Unmarshal(responseRecorder.Body.Bytes(), &response) require.NoError(t, err) - assert.Equal(t, session.ID, response.Data.ID) - assert.Equal(t, session.UserID, response.Data.UserID) + assert.Equal(t, session.ID, response.Data.Session.ID) + assert.Equal(t, session.UserID, response.Data.Session.UserID) }) t.Run("SessionNotFound", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() userID := "test-user" sessionID := "non-existent-session" @@ -239,7 +236,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() sessionID := "test-session" req := httptest.NewRequest("GET", "/api/sessions/"+sessionID, nil) @@ -254,16 +251,16 @@ func TestSessionsHandler(t *testing.T) { t.Run("HandleUpdateSession", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" sessionName := "test-session" // Create test agent and session - agentRef := "default/test-agent" + agentRef := utils.ConvertToPythonIdentifier("default/test-agent") agent := createTestAgent(dbClient, agentRef) - session := createTestSession(dbClient, sessionName, userID, &agent.ID) + session := createTestSession(dbClient, sessionName, userID, agent.ID) - newAgentRef := "default/new-agent" + newAgentRef := utils.ConvertToPythonIdentifier("default/new-agent") newAgent := createTestAgent(dbClient, newAgentRef) sessionReq := api.SessionRequest{ @@ -288,7 +285,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingSessionName", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() userID := "test-user" agentRef := "default/test-agent" @@ -308,7 +305,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("SessionNotFound", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" sessionName := "non-existent-session" agentRef := "default/test-agent" @@ -334,13 +331,13 @@ func TestSessionsHandler(t *testing.T) { t.Run("HandleDeleteSession", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" sessionID := "test-session" // Create test session - agentID := uint(1) - createTestSession(dbClient, sessionID, userID, &agentID) + agentID := "1" + createTestSession(dbClient, sessionID, userID, agentID) req := httptest.NewRequest("DELETE", "/api/sessions/"+sessionID+"?user_id="+userID, nil) req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) @@ -356,7 +353,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() sessionID := "test-session" req := httptest.NewRequest("DELETE", "/api/sessions/"+sessionID, nil) @@ -371,16 +368,16 @@ func TestSessionsHandler(t *testing.T) { t.Run("HandleGetSessionsForAgent", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" namespace := "default" agentName := "test-agent" - agentRef := namespace + "/" + agentName + agentRef := utils.ConvertToPythonIdentifier(namespace + "/" + agentName) // Create test agent and sessions agent := createTestAgent(dbClient, agentRef) - session1 := createTestSession(dbClient, "session-1", userID, &agent.ID) - session2 := createTestSession(dbClient, "session-2", userID, &agent.ID) + session1 := createTestSession(dbClient, "session-1", userID, agent.ID) + session2 := createTestSession(dbClient, "session-2", userID, agent.ID) req := httptest.NewRequest("GET", "/api/agents/"+namespace+"/"+agentName+"/sessions?user_id="+userID, nil) req = mux.SetURLVars(req, map[string]string{"namespace": namespace, "name": agentName}) @@ -398,7 +395,7 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("AgentNotFound", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() userID := "test-user" namespace := "default" agentName := "non-existent-agent" @@ -413,26 +410,24 @@ func TestSessionsHandler(t *testing.T) { }) }) - t.Run("HandleListSessionTasks", func(t *testing.T) { + t.Run("HandleListTasksForSession", func(t *testing.T) { t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() + handler, dbClient, responseRecorder := setupHandler() userID := "test-user" sessionID := "test-session" // Create test session and tasks - agentID := uint(1) - createTestSession(dbClient, sessionID, userID, &agentID) + agentID := "1" + createTestSession(dbClient, sessionID, userID, agentID) task1 := &database.Task{ ID: "task-1", SessionID: sessionID, - UserID: userID, Data: "{}", } task2 := &database.Task{ ID: "task-2", SessionID: sessionID, - UserID: userID, Data: "{}", } // Use the fake client's AddTask method for testing @@ -442,7 +437,7 @@ func TestSessionsHandler(t *testing.T) { req := httptest.NewRequest("GET", "/api/sessions/"+sessionID+"/tasks?user_id="+userID, nil) req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) - handler.HandleListSessionTasks(responseRecorder, req) + handler.HandleListTasksForSession(responseRecorder, req) assert.Equal(t, http.StatusOK, responseRecorder.Code) @@ -453,61 +448,17 @@ func TestSessionsHandler(t *testing.T) { }) t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() + handler, _, responseRecorder := setupHandler() sessionID := "test-session" req := httptest.NewRequest("GET", "/api/sessions/"+sessionID+"/tasks", nil) req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) - handler.HandleListSessionTasks(responseRecorder, req) + handler.HandleListTasksForSession(responseRecorder, req) assert.Equal(t, http.StatusBadRequest, responseRecorder.Code) assert.NotNil(t, responseRecorder.errorReceived) }) }) - t.Run("HandleListSessionMessages", func(t *testing.T) { - t.Run("Success", func(t *testing.T) { - handler, dbClient, _, responseRecorder := setupHandler() - userID := "test-user" - sessionID := "test-session" - - // Create test session and messages - agentID := uint(1) - createTestSession(dbClient, sessionID, userID, &agentID) - - // For messages, we'll just test with empty list since the parsing is complex - req := httptest.NewRequest("GET", "/api/sessions/"+sessionID+"/messages?user_id="+userID, nil) - req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) - - message := protocol.NewMessageWithContext(protocol.MessageRoleUser, []protocol.Part{ - protocol.NewTextPart("test-message"), - }, nil, ptr.To(sessionID)) - err := dbClient.CreateMessages(&message) - require.NoError(t, err) - - handler.HandleListSessionMessages(responseRecorder, req) - - assert.Equal(t, http.StatusOK, responseRecorder.Code) - - // The response should be autogen events, not raw messages - var response api.StandardResponse[interface{}] - err = json.Unmarshal(responseRecorder.Body.Bytes(), &response) - require.NoError(t, err) - assert.NotNil(t, response.Data) - }) - - t.Run("MissingUserID", func(t *testing.T) { - handler, _, _, responseRecorder := setupHandler() - sessionID := "test-session" - - req := httptest.NewRequest("GET", "/api/sessions/"+sessionID+"/messages", nil) - req = mux.SetURLVars(req, map[string]string{"session_id": sessionID}) - - handler.HandleListSessionMessages(responseRecorder, req) - - assert.Equal(t, http.StatusBadRequest, responseRecorder.Code) - assert.NotNil(t, responseRecorder.errorReceived) - }) - }) } diff --git a/go/internal/httpserver/handlers/tasks.go b/go/internal/httpserver/handlers/tasks.go index 4b7511d0f..b6929b12d 100644 --- a/go/internal/httpserver/handlers/tasks.go +++ b/go/internal/httpserver/handlers/tasks.go @@ -48,9 +48,12 @@ func (h *TasksHandler) HandleCreateTask(w ErrorResponseWriter, r *http.Request) w.RespondWithError(errors.NewBadRequestError("Invalid request body", err)) return } + if task.ID == "" { + task.ID = protocol.GenerateTaskID() + } log = log.WithValues("task_id", task.ID) - if err := h.DatabaseService.CreateTask(&task); err != nil { + if err := h.DatabaseService.StoreTask(&task); err != nil { w.RespondWithError(errors.NewInternalServerError("Failed to create task", err)) return } diff --git a/go/internal/httpserver/handlers/toolservers_test.go b/go/internal/httpserver/handlers/toolservers_test.go index 1941ef081..3d377d009 100644 --- a/go/internal/httpserver/handlers/toolservers_test.go +++ b/go/internal/httpserver/handlers/toolservers_test.go @@ -23,6 +23,7 @@ import ( "github.com/kagent-dev/kagent/go/internal/httpserver/handlers" common "github.com/kagent-dev/kagent/go/internal/utils" "github.com/kagent-dev/kagent/go/pkg/client/api" + "k8s.io/utils/ptr" ) func TestToolServersHandler(t *testing.T) { @@ -57,17 +58,17 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Test tool server 1", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "python", - Args: []string{"-m", "test_tool"}, - Env: map[string]string{ - "ENV_VAR": "value", - }, - EnvFrom: []v1alpha1.ValueRef{ - { - Name: "API_KEY", - Value: "test-key", + Type: v1alpha1.ToolServerTypeStreamableHttp, + StreamableHttp: &v1alpha1.StreamableHttpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/streamable", + HeadersFrom: []v1alpha1.ValueRef{ + { + Name: "API_KEY", + Value: "test-key", + }, }, + Timeout: &metav1.Duration{Duration: 30 * time.Second}, }, }, }, @@ -76,14 +77,6 @@ func TestToolServersHandler(t *testing.T) { DiscoveredTools: []*v1alpha1.MCPTool{ { Name: "test-tool", - Component: v1alpha1.Component{ - Provider: "test-provider", - ComponentType: "tool", - Version: 1, - ComponentVersion: 1, - Description: "Test tool", - Label: "Test Tool", - }, }, }, }, @@ -97,11 +90,19 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Test tool server 2", Config: v1alpha1.ToolServerConfig{ + Type: v1alpha1.ToolServerTypeSse, Sse: &v1alpha1.SseMcpServerConfig{ HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ URL: "https://example.com/sse", - Headers: map[string]v1alpha1.AnyType{ - "Authorization": {RawMessage: []byte(`"Bearer token"`)}, + HeadersFrom: []v1alpha1.ValueRef{ + { + Name: "Authorization", + ValueFrom: &v1alpha1.ValueSource{ + Type: v1alpha1.SecretValueSource, + ValueRef: "auth-secret", + Key: "token", + }, + }, }, Timeout: &metav1.Duration{Duration: 30 * time.Second}, SseReadTimeout: &metav1.Duration{Duration: 60 * time.Second}, @@ -129,16 +130,15 @@ func TestToolServersHandler(t *testing.T) { // Verify first tool server response toolServer := toolServers.Data[0] require.Equal(t, "default/test-toolserver-1", toolServer.Ref) - require.NotNil(t, toolServer.Config.Stdio) - require.Equal(t, "python", toolServer.Config.Stdio.Command) - require.Equal(t, []string{"-m", "test_tool"}, toolServer.Config.Stdio.Args) + require.Equal(t, v1alpha1.ToolServerTypeStreamableHttp, toolServer.Config.Type) + require.Equal(t, "https://example.com/streamable", toolServer.Config.StreamableHttp.URL) require.Len(t, toolServer.DiscoveredTools, 1) require.Equal(t, "test-tool", toolServer.DiscoveredTools[0].Name) // Verify second tool server response toolServer = toolServers.Data[1] require.Equal(t, "test-ns/test-toolserver-2", toolServer.Ref) - require.NotNil(t, toolServer.Config.Sse) + require.Equal(t, v1alpha1.ToolServerTypeSse, toolServer.Config.Type) require.Equal(t, "https://example.com/sse", toolServer.Config.Sse.URL) }) @@ -158,7 +158,7 @@ func TestToolServersHandler(t *testing.T) { }) t.Run("HandleCreateToolServer", func(t *testing.T) { - t.Run("Success_Stdio", func(t *testing.T) { + t.Run("Success_StreamableHttp", func(t *testing.T) { handler, _, responseRecorder := setupHandler() reqBody := &v1alpha1.ToolServer{ @@ -169,12 +169,19 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Test tool server", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "python", - Args: []string{"-m", "test_tool"}, - Env: map[string]string{ - "API_KEY": "test-key", + Type: v1alpha1.ToolServerTypeStreamableHttp, + StreamableHttp: &v1alpha1.StreamableHttpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/streamable", + HeadersFrom: []v1alpha1.ValueRef{ + { + Name: "API-Key", + Value: "test-key", + }, + }, + Timeout: &metav1.Duration{Duration: 30 * time.Second}, }, + TerminateOnClose: ptr.To(true), }, }, }, @@ -194,8 +201,9 @@ func TestToolServersHandler(t *testing.T) { assert.Equal(t, "test-toolserver", toolServer.Data.Name) assert.Equal(t, "default", toolServer.Data.Namespace) assert.Equal(t, "Test tool server", toolServer.Data.Spec.Description) - assert.NotNil(t, toolServer.Data.Spec.Config.Stdio) - assert.Equal(t, "python", toolServer.Data.Spec.Config.Stdio.Command) + assert.Equal(t, v1alpha1.ToolServerTypeStreamableHttp, toolServer.Data.Spec.Config.Type) + assert.Equal(t, "https://example.com/streamable", toolServer.Data.Spec.Config.StreamableHttp.URL) + assert.True(t, *toolServer.Data.Spec.Config.StreamableHttp.TerminateOnClose) }) t.Run("Success_Sse", func(t *testing.T) { @@ -209,12 +217,10 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Test SSE tool server", Config: v1alpha1.ToolServerConfig{ + Type: v1alpha1.ToolServerTypeSse, Sse: &v1alpha1.SseMcpServerConfig{ HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ URL: "https://example.com/sse", - Headers: map[string]v1alpha1.AnyType{ - "Authorization": {RawMessage: []byte(`"Bearer token"`)}, - }, HeadersFrom: []v1alpha1.ValueRef{ { Name: "X-API-Key", @@ -246,7 +252,7 @@ func TestToolServersHandler(t *testing.T) { require.NoError(t, err) assert.Equal(t, "test-sse-toolserver", toolServer.Data.Name) assert.Equal(t, "default", toolServer.Data.Namespace) - assert.NotNil(t, toolServer.Data.Spec.Config.Sse) + assert.Equal(t, v1alpha1.ToolServerTypeSse, toolServer.Data.Spec.Config.Type) assert.Equal(t, "https://example.com/sse", toolServer.Data.Spec.Config.Sse.URL) }) @@ -261,8 +267,11 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Test tool server", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "python", + Type: v1alpha1.ToolServerTypeStreamableHttp, + StreamableHttp: &v1alpha1.StreamableHttpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/test", + }, }, }, }, @@ -307,8 +316,11 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Existing tool server", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "python", + Type: v1alpha1.ToolServerTypeStreamableHttp, + StreamableHttp: &v1alpha1.StreamableHttpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/existing", + }, }, }, }, @@ -324,8 +336,11 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "New tool server", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "node", + Type: v1alpha1.ToolServerTypeSse, + Sse: &v1alpha1.SseMcpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/new", + }, }, }, }, @@ -355,8 +370,11 @@ func TestToolServersHandler(t *testing.T) { Spec: v1alpha1.ToolServerSpec{ Description: "Tool server to delete", Config: v1alpha1.ToolServerConfig{ - Stdio: &v1alpha1.StdioMcpServerConfig{ - Command: "python", + Type: v1alpha1.ToolServerTypeStreamableHttp, + StreamableHttp: &v1alpha1.StreamableHttpServerConfig{ + HttpToolServerConfig: v1alpha1.HttpToolServerConfig{ + URL: "https://example.com/delete", + }, }, }, }, diff --git a/go/internal/httpserver/server.go b/go/internal/httpserver/server.go index ad91f992d..95e8f9410 100644 --- a/go/internal/httpserver/server.go +++ b/go/internal/httpserver/server.go @@ -7,7 +7,6 @@ import ( "github.com/gorilla/mux" "github.com/kagent-dev/kagent/go/internal/a2a" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" "github.com/kagent-dev/kagent/go/internal/database" "github.com/kagent-dev/kagent/go/internal/httpserver/handlers" common "github.com/kagent-dev/kagent/go/internal/utils" @@ -45,7 +44,6 @@ var defaultModelConfig = types.NamespacedName{ // ServerConfig holds the configuration for the HTTP server type ServerConfig struct { BindAddr string - AutogenClient autogen_client.Client KubeClient ctrl_client.Client A2AHandler a2a.A2AHandlerMux WatchedNamespaces []string @@ -69,7 +67,7 @@ func NewHTTPServer(config ServerConfig) (*HTTPServer, error) { return &HTTPServer{ config: config, router: mux.NewRouter(), - handlers: handlers.NewHandlers(config.KubeClient, config.AutogenClient, defaultModelConfig, config.DbClient, config.WatchedNamespaces), + handlers: handlers.NewHandlers(config.KubeClient, defaultModelConfig, config.DbClient, config.WatchedNamespaces), }, nil } @@ -153,12 +151,9 @@ func (s *HTTPServer) setupRoutes() { s.router.HandleFunc(APIPathSessions, adaptHandler(s.handlers.Sessions.HandleCreateSession)).Methods(http.MethodPost) s.router.HandleFunc(APIPathSessions+"/agent/{namespace}/{name}", adaptHandler(s.handlers.Sessions.HandleGetSessionsForAgent)).Methods(http.MethodGet) s.router.HandleFunc(APIPathSessions+"/{session_id}", adaptHandler(s.handlers.Sessions.HandleGetSession)).Methods(http.MethodGet) - s.router.HandleFunc(APIPathSessions+"/{session_id}/messages", adaptHandler(s.handlers.Sessions.HandleListSessionMessages)).Methods(http.MethodGet) - s.router.HandleFunc(APIPathSessions+"/{session_id}/tasks", adaptHandler(s.handlers.Sessions.HandleListSessionTasks)).Methods(http.MethodGet) + s.router.HandleFunc(APIPathSessions+"/{session_id}/tasks", adaptHandler(s.handlers.Sessions.HandleListTasksForSession)).Methods(http.MethodGet) s.router.HandleFunc(APIPathSessions+"/{session_id}", adaptHandler(s.handlers.Sessions.HandleDeleteSession)).Methods(http.MethodDelete) s.router.HandleFunc(APIPathSessions+"/{session_id}", adaptHandler(s.handlers.Sessions.HandleUpdateSession)).Methods(http.MethodPut) - s.router.HandleFunc(APIPathSessions+"/{session_id}/invoke/stream", adaptHandler(s.handlers.Sessions.HandleInvokeSessionStream)).Methods(http.MethodPost) - s.router.HandleFunc(APIPathSessions+"/{session_id}/invoke", adaptHandler(s.handlers.Sessions.HandleInvokeSession)).Methods(http.MethodPost) s.router.HandleFunc(APIPathSessions+"/{session_id}/events", adaptHandler(s.handlers.Sessions.HandleAddEventToSession)).Methods(http.MethodPost) // Tasks diff --git a/go/internal/utils/message_parsing.go b/go/internal/utils/message_parsing.go deleted file mode 100644 index 8ca776b70..000000000 --- a/go/internal/utils/message_parsing.go +++ /dev/null @@ -1,149 +0,0 @@ -package utils - -import ( - "encoding/json" - "fmt" - - "github.com/kagent-dev/kagent/go/internal/autogen/client" - autogen_client "github.com/kagent-dev/kagent/go/internal/autogen/client" - "trpc.group/trpc-go/trpc-a2a-go/protocol" -) - -func ConvertMessagesToAutogenEvents(messages []protocol.Message) ([]autogen_client.Event, error) { - result := make([]client.Event, 0, len(messages)) - for _, message := range messages { - source := "user" - if message.Role == protocol.MessageRoleAgent { - source = "agent" - } - for _, part := range message.Parts { - if textPart, ok := part.(*protocol.TextPart); ok { - events := autogen_client.NewTextMessage(textPart.Text, source) - result = append(result, events) - } else if dataPart, ok := part.(*protocol.DataPart); ok { - byt, err := json.Marshal(dataPart.Data) - if err != nil { - return nil, fmt.Errorf("failed to marshal data part: %w", err) - } - parsedEvent, err := autogen_client.ParseEvent(byt) - if err != nil { - return nil, fmt.Errorf("failed to parse event: %w", err) - } - result = append(result, parsedEvent) - } - } - } - return result, nil -} - -func ConvertAutogenEventsToMessages(taskId, contextId *string, events ...client.Event) []*protocol.Message { - result := make([]*protocol.Message, 0, len(events)) - - for _, event := range events { - role := protocol.MessageRoleUser - switch typed := event.(type) { - case *client.TextMessage: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewTextPart(typed.Content)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - case *client.ModelClientStreamingChunkEvent: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewDataPart(typed)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - case *client.ToolCallRequestEvent: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewDataPart(typed)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - case *client.ToolCallExecutionEvent: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewDataPart(typed)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - case *client.MemoryQueryEvent: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewDataPart(typed)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - case *client.ToolCallSummaryMessage: - if typed.Source != "user" { - role = protocol.MessageRoleAgent - } - result = append(result, newMessage( - role, - []protocol.Part{protocol.NewDataPart(typed)}, - taskId, - contextId, - typed.Metadata, - typed.ModelsUsage, - )) - } - } - return result -} - -func newMessage( - role protocol.MessageRole, - parts []protocol.Part, - taskId, - contextId *string, - metadata map[string]string, - modelsUsage *client.ModelsUsage, -) *protocol.Message { - msg := protocol.NewMessageWithContext( - role, - parts, - taskId, - contextId, - ) - msg.Metadata = buildMetadata(metadata, modelsUsage) - return &msg -} - -func buildMetadata(metadata map[string]string, modelsUsage *client.ModelsUsage) map[string]interface{} { - result := make(map[string]interface{}) - for k, v := range metadata { - result[k] = v - } - if modelsUsage != nil { - result["usage"] = modelsUsage.ToMap() - } - return result -} diff --git a/go/pkg/client/api/types.go b/go/pkg/client/api/types.go index e5c2cec8d..86506c79d 100644 --- a/go/pkg/client/api/types.go +++ b/go/pkg/client/api/types.go @@ -2,7 +2,6 @@ package api import ( "github.com/kagent-dev/kagent/go/controller/api/v1alpha1" - autogen_api "github.com/kagent-dev/kagent/go/internal/autogen/api" "github.com/kagent-dev/kagent/go/internal/database" ) @@ -77,9 +76,9 @@ type UpdateModelConfigRequest struct { // Agent types type AgentResponse struct { - ID uint `json:"id"` - Agent *v1alpha1.Agent `json:"agent"` - Component *autogen_api.Component `json:"component"` + ID string `json:"id"` + Agent *v1alpha1.Agent `json:"agent"` + // Config *adk.AgentConfig `json:"config"` ModelProvider v1alpha1.ModelProvider `json:"modelProvider"` Model string `json:"model"` ModelConfigRef string `json:"modelConfigRef"` @@ -94,6 +93,7 @@ type SessionRequest struct { AgentRef *string `json:"agent_ref,omitempty"` Name *string `json:"name,omitempty"` UserID string `json:"user_id"` + ID *string `json:"id,omitempty"` } // Run types @@ -107,7 +107,7 @@ type RunRequest struct { type Task = database.Task // Message represents a message from the database -type Message = database.Message +type Message = database.Event // Session represents a session from the database type Session = database.Session diff --git a/go/pkg/client/model.go b/go/pkg/client/model.go index 30fe2a7af..468ee3974 100644 --- a/go/pkg/client/model.go +++ b/go/pkg/client/model.go @@ -1,15 +1,8 @@ package client -import ( - "context" - - "github.com/kagent-dev/kagent/go/internal/autogen/client" - "github.com/kagent-dev/kagent/go/pkg/client/api" -) - // Model defines the model operations type Model interface { - ListSupportedModels(ctx context.Context) (*api.StandardResponse[*client.ProviderModels], error) + // ListSupportedModels(ctx context.Context) (*api.StandardResponse[*client.ProviderModels], error) } // modelClient handles model-related requests @@ -23,16 +16,16 @@ func NewModelClient(client *BaseClient) Model { } // ListSupportedModels lists all supported models -func (c *modelClient) ListSupportedModels(ctx context.Context) (*api.StandardResponse[*client.ProviderModels], error) { - resp, err := c.client.Get(ctx, "/api/models", "") - if err != nil { - return nil, err - } - - var models api.StandardResponse[*client.ProviderModels] - if err := DecodeResponse(resp, &models); err != nil { - return nil, err - } - - return &models, nil -} +// func (c *modelClient) ListSupportedModels(ctx context.Context) (*api.StandardResponse[*client.ProviderModels], error) { +// resp, err := c.client.Get(ctx, "/api/models", "") +// if err != nil { +// return nil, err +// } + +// var models api.StandardResponse[*client.ProviderModels] +// if err := DecodeResponse(resp, &models); err != nil { +// return nil, err +// } + +// return &models, nil +// } diff --git a/go/pkg/client/tool.go b/go/pkg/client/tool.go index 091081e3e..df7cb8c27 100644 --- a/go/pkg/client/tool.go +++ b/go/pkg/client/tool.go @@ -34,10 +34,10 @@ func (c *toolClient) ListTools(ctx context.Context, userID string) ([]api.Tool, return nil, err } - var tools []api.Tool + var tools api.StandardResponse[[]api.Tool] if err := DecodeResponse(resp, &tools); err != nil { return nil, err } - return tools, nil + return tools.Data, nil } diff --git a/go/test/e2e/invoke_api_test.go b/go/test/e2e/invoke_api_test.go index fe0e52a58..1a5007e4c 100644 --- a/go/test/e2e/invoke_api_test.go +++ b/go/test/e2e/invoke_api_test.go @@ -2,6 +2,7 @@ package e2e_test import ( "context" + "encoding/json" "os" "testing" "time" @@ -37,16 +38,19 @@ func TestInvokeAPI(t *testing.T) { msg, err := a2aClient.SendMessage(ctx, protocol.SendMessageParams{ Message: protocol.Message{ + Kind: protocol.KindMessage, Role: protocol.MessageRoleUser, Parts: []protocol.Part{protocol.NewTextPart("List all pods in the cluster")}, }, }) require.NoError(t, err) - msgResult, ok := msg.Result.(*protocol.Message) + taskResult, ok := msg.Result.(*protocol.Task) require.True(t, ok) - text := a2a.ExtractText(*msgResult) - require.Contains(t, text, "kube-scheduler-kagent-control-plane") + text := a2a.ExtractText(taskResult.History[len(taskResult.History)-1]) + jsn, err := json.Marshal(taskResult) + require.NoError(t, err) + require.Contains(t, text, "kube-scheduler-kagent-control-plane", string(jsn)) }) t.Run("should successfully handle a streaming agent invocation", func(t *testing.T) { @@ -55,55 +59,28 @@ func TestInvokeAPI(t *testing.T) { msg, err := a2aClient.StreamMessage(ctx, protocol.SendMessageParams{ Message: protocol.Message{ + Kind: protocol.KindMessage, Role: protocol.MessageRoleUser, Parts: []protocol.Part{protocol.NewTextPart("List all pods in the cluster")}, }, }) require.NoError(t, err) + resultList := []protocol.StreamingMessageEvent{} var text string for event := range msg { - msgResult, ok := event.Result.(*protocol.Message) + msgResult, ok := event.Result.(*protocol.TaskStatusUpdateEvent) if !ok { continue } - text += a2a.ExtractText(*msgResult) + if msgResult.Status.Message != nil { + text += a2a.ExtractText(*msgResult.Status.Message) + } + resultList = append(resultList, event) } - require.Contains(t, text, "kube-scheduler-kagent-control-plane") + jsn, err := json.Marshal(resultList) + require.NoError(t, err) + require.Contains(t, string(jsn), "kube-scheduler-kagent-control-plane", string(jsn)) }) }) } - -// waitForTaskCompletion polls the task until it's completed or times out -func waitForTaskCompletion(ctx context.Context, a2aClient *client.A2AClient, taskID string, timeout time.Duration) (*protocol.Task, error) { - ctx, cancel := context.WithTimeout(ctx, timeout) - defer cancel() - - ticker := time.NewTicker(2 * time.Second) - defer ticker.Stop() - - for { - select { - case <-ticker.C: - task, err := a2aClient.GetTasks(ctx, protocol.TaskQueryParams{ - ID: taskID, - }) - if err != nil { - return nil, err - } - - switch task.Status.State { - case protocol.TaskStateSubmitted, - protocol.TaskStateWorking: - continue // Keep polling - case protocol.TaskStateCompleted, - protocol.TaskStateFailed, - protocol.TaskStateCanceled: - return task, nil - } - - case <-ctx.Done(): - return nil, ctx.Err() - } - } -} diff --git a/helm/kagent-crds/templates/kagent.dev_agents.yaml b/helm/kagent-crds/templates/kagent.dev_agents.yaml index 52a035ac9..0eb5cd7f1 100644 --- a/helm/kagent-crds/templates/kagent.dev_agents.yaml +++ b/helm/kagent-crds/templates/kagent.dev_agents.yaml @@ -15,14 +15,18 @@ spec: scope: Namespaced versions: - additionalPrinterColumns: - - description: Whether or not the agent has been accepted by the system. - jsonPath: .status.conditions[0].status - name: Accepted - type: string - description: The ModelConfig resource referenced by this agent. jsonPath: .spec.modelConfig name: ModelConfig type: string + - description: Whether or not the agent is ready to serve requests. + jsonPath: .status.conditions[?(@.type=='Ready')].status + name: Ready + type: string + - description: Whether or not the agent has been accepted by the system. + jsonPath: .status.conditions[?(@.type=='Accepted')].status + name: Accepted + type: string name: v1alpha1 schema: openAPIV3Schema: @@ -100,6 +104,1949 @@ spec: minItems: 1 type: array type: object + deployment: + properties: + annotations: + additionalProperties: + type: string + type: object + env: + items: + description: EnvVar represents an environment variable present + in a Container. + properties: + name: + description: Name of the environment variable. Must be a + C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any service environment variables. If a variable cannot be resolved, + the reference in the input string will be unchanged. Double $$ are reduced + to a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. + "$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)". + Escaped references will never be expanded, regardless of whether the variable + exists or not. + Defaults to "". + type: string + valueFrom: + description: Source for the environment variable's value. + Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: Specify whether the ConfigMap or its + key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: |- + Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`, + spec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs. + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select in the + specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported. + properties: + containerName: + description: 'Container name: required for volumes, + optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of the + exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the pod's + namespace + properties: + key: + description: The key of the secret to select from. Must + be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: Specify whether the Secret or its key + must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + imagePullSecrets: + items: + description: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + type: array + labels: + additionalProperties: + type: string + type: object + replicas: + description: If not specified, the default value is 1. + format: int32 + minimum: 1 + type: integer + volumes: + items: + description: Volume represents a named volume in a pod that + may be accessed by any container in the pod. + properties: + awsElasticBlockStore: + description: |- + awsElasticBlockStore represents an AWS Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + Deprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree + awsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + properties: + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: string + partition: + description: |- + partition is the partition in the volume that you want to mount. + If omitted, the default is to mount by volume name. + Examples: For volume /dev/sda1, you specify the partition as "1". + Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). + format: int32 + type: integer + readOnly: + description: |- + readOnly value true will force the readOnly setting in VolumeMounts. + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: boolean + volumeID: + description: |- + volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). + More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore + type: string + required: + - volumeID + type: object + azureDisk: + description: |- + azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. + Deprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type + are redirected to the disk.csi.azure.com CSI driver. + properties: + cachingMode: + description: 'cachingMode is the Host Caching mode: + None, Read Only, Read Write.' + type: string + diskName: + description: diskName is the Name of the data disk in + the blob storage + type: string + diskURI: + description: diskURI is the URI of data disk in the + blob storage + type: string + fsType: + default: ext4 + description: |- + fsType is Filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + kind: + description: 'kind expected values are Shared: multiple + blob disks per storage account Dedicated: single + blob disk per storage account Managed: azure managed + data disk (only in managed availability set). defaults + to shared' + type: string + readOnly: + default: false + description: |- + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + required: + - diskName + - diskURI + type: object + azureFile: + description: |- + azureFile represents an Azure File Service mount on the host and bind mount to the pod. + Deprecated: AzureFile is deprecated. All operations for the in-tree azureFile type + are redirected to the file.csi.azure.com CSI driver. + properties: + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretName: + description: secretName is the name of secret that + contains Azure Storage Account Name and Key + type: string + shareName: + description: shareName is the azure share Name + type: string + required: + - secretName + - shareName + type: object + cephfs: + description: |- + cephFS represents a Ceph FS mount on the host that shares a pod's lifetime. + Deprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported. + properties: + monitors: + description: |- + monitors is Required: Monitors is a collection of Ceph monitors + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + items: + type: string + type: array + x-kubernetes-list-type: atomic + path: + description: 'path is Optional: Used as the mounted + root, rather than the full Ceph tree, default is /' + type: string + readOnly: + description: |- + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: boolean + secretFile: + description: |- + secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: string + secretRef: + description: |- + secretRef is Optional: SecretRef is reference to the authentication secret for User, default is empty. + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + user: + description: |- + user is optional: User is the rados user name, default is admin + More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it + type: string + required: + - monitors + type: object + cinder: + description: |- + cinder represents a cinder volume attached and mounted on kubelets host machine. + Deprecated: Cinder is deprecated. All operations for the in-tree cinder type + are redirected to the cinder.csi.openstack.org CSI driver. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: boolean + secretRef: + description: |- + secretRef is optional: points to a secret object containing parameters used to connect + to OpenStack. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + volumeID: + description: |- + volumeID used to identify the volume in cinder. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md + type: string + required: + - volumeID + type: object + configMap: + description: configMap represents a configMap that should + populate this volume + properties: + defaultMode: + description: |- + defaultMode is optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the ConfigMap, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path within a + volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional specify whether the ConfigMap + or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + csi: + description: csi (Container Storage Interface) represents + ephemeral storage that is handled by certain external + CSI drivers. + properties: + driver: + description: |- + driver is the name of the CSI driver that handles this volume. + Consult with your admin for the correct name as registered in the cluster. + type: string + fsType: + description: |- + fsType to mount. Ex. "ext4", "xfs", "ntfs". + If not provided, the empty value is passed to the associated CSI driver + which will determine the default filesystem to apply. + type: string + nodePublishSecretRef: + description: |- + nodePublishSecretRef is a reference to the secret object containing + sensitive information to pass to the CSI driver to complete the CSI + NodePublishVolume and NodeUnpublishVolume calls. + This field is optional, and may be empty if no secret is required. If the + secret object contains more than one secret, all secret references are passed. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + readOnly: + description: |- + readOnly specifies a read-only configuration for the volume. + Defaults to false (read/write). + type: boolean + volumeAttributes: + additionalProperties: + type: string + description: |- + volumeAttributes stores driver-specific properties that are passed to the CSI + driver. Consult your driver's documentation for supported values. + type: object + required: + - driver + type: object + downwardAPI: + description: downwardAPI represents downward API about the + pod that should populate this volume + properties: + defaultMode: + description: |- + Optional: mode bits to use on created files by default. Must be a + Optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: Items is a list of downward API volume + file + items: + description: DownwardAPIVolumeFile represents information + to create the file containing the pod field + properties: + fieldRef: + description: 'Required: Selects a field of the + pod: only annotations, labels, name, namespace + and uid are supported.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select in + the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: 'Required: Path is the relative + path name of the file to be created. Must not + be absolute or contain the ''..'' path. Must + be utf-8 encoded. The first item of the relative + path must not start with ''..''' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of + the exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + required: + - path + type: object + type: array + x-kubernetes-list-type: atomic + type: object + emptyDir: + description: |- + emptyDir represents a temporary directory that shares a pod's lifetime. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + properties: + medium: + description: |- + medium represents what type of storage medium should back this directory. + The default is "" which means to use the node's default medium. + Must be an empty string (default) or Memory. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + type: string + sizeLimit: + anyOf: + - type: integer + - type: string + description: |- + sizeLimit is the total amount of local storage required for this EmptyDir volume. + The size limit is also applicable for memory medium. + The maximum usage on memory medium EmptyDir would be the minimum value between + the SizeLimit specified here and the sum of memory limits of all containers in a pod. + The default is nil which means that the limit is undefined. + More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + ephemeral: + description: |- + ephemeral represents a volume that is handled by a cluster storage driver. + The volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts, + and deleted when the pod is removed. + + Use this if: + a) the volume is only needed while the pod runs, + b) features of normal volumes like restoring from snapshot or capacity + tracking are needed, + c) the storage driver is specified through a storage class, and + d) the storage driver supports dynamic volume provisioning through + a PersistentVolumeClaim (see EphemeralVolumeSource for more + information on the connection between this volume type + and PersistentVolumeClaim). + + Use PersistentVolumeClaim or one of the vendor-specific + APIs for volumes that persist for longer than the lifecycle + of an individual pod. + + Use CSI for light-weight local ephemeral volumes if the CSI driver is meant to + be used that way - see the documentation of the driver for + more information. + + A pod can use both types of ephemeral volumes and + persistent volumes at the same time. + properties: + volumeClaimTemplate: + description: |- + Will be used to create a stand-alone PVC to provision the volume. + The pod in which this EphemeralVolumeSource is embedded will be the + owner of the PVC, i.e. the PVC will be deleted together with the + pod. The name of the PVC will be `-` where + `` is the name from the `PodSpec.Volumes` array + entry. Pod validation will reject the pod if the concatenated name + is not valid for a PVC (for example, too long). + + An existing PVC with that name that is not owned by the pod + will *not* be used for the pod to avoid using an unrelated + volume by mistake. Starting the pod is then blocked until + the unrelated PVC is removed. If such a pre-created PVC is + meant to be used by the pod, the PVC has to updated with an + owner reference to the pod once the pod exists. Normally + this should not be necessary, but it may be useful when + manually reconstructing a broken cluster. + + This field is read-only and no changes will be made by Kubernetes + to the PVC after it has been created. + + Required, must not be nil. + properties: + metadata: + description: |- + May contain labels and annotations that will be copied into the PVC + when creating it. No other fields are allowed and will be rejected during + validation. + type: object + spec: + description: |- + The specification for the PersistentVolumeClaim. The entire content is + copied unchanged into the PVC that gets created from this + template. The same fields as in a PersistentVolumeClaim + are also valid here. + properties: + accessModes: + description: |- + accessModes contains the desired access modes the volume should have. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 + items: + type: string + type: array + x-kubernetes-list-type: atomic + dataSource: + description: |- + dataSource field can be used to specify either: + * An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot) + * An existing PVC (PersistentVolumeClaim) + If the provisioner or an external controller can support the specified data source, + it will create a new volume based on the contents of the specified data source. + When the AnyVolumeDataSource feature gate is enabled, dataSource contents will be copied to dataSourceRef, + and dataSourceRef contents will be copied to dataSource when dataSourceRef.namespace is not specified. + If the namespace is specified, then dataSourceRef will not be copied to dataSource. + properties: + apiGroup: + description: |- + APIGroup is the group for the resource being referenced. + If APIGroup is not specified, the specified Kind must be in the core API group. + For any other third-party types, APIGroup is required. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + required: + - kind + - name + type: object + x-kubernetes-map-type: atomic + dataSourceRef: + description: |- + dataSourceRef specifies the object from which to populate the volume with data, if a non-empty + volume is desired. This may be any object from a non-empty API group (non + core object) or a PersistentVolumeClaim object. + When this field is specified, volume binding will only succeed if the type of + the specified object matches some installed volume populator or dynamic + provisioner. + This field will replace the functionality of the dataSource field and as such + if both fields are non-empty, they must have the same value. For backwards + compatibility, when namespace isn't specified in dataSourceRef, + both fields (dataSource and dataSourceRef) will be set to the same + value automatically if one of them is empty and the other is non-empty. + When namespace is specified in dataSourceRef, + dataSource isn't set to the same value and must be empty. + There are three important differences between dataSource and dataSourceRef: + * While dataSource only allows two specific types of objects, dataSourceRef + allows any non-core object, as well as PersistentVolumeClaim objects. + * While dataSource ignores disallowed values (dropping them), dataSourceRef + preserves all values, and generates an error if a disallowed value is + specified. + * While dataSource only allows local objects, dataSourceRef allows objects + in any namespaces. + (Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled. + (Alpha) Using the namespace field of dataSourceRef requires the CrossNamespaceVolumeDataSource feature gate to be enabled. + properties: + apiGroup: + description: |- + APIGroup is the group for the resource being referenced. + If APIGroup is not specified, the specified Kind must be in the core API group. + For any other third-party types, APIGroup is required. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + namespace: + description: |- + Namespace is the namespace of resource being referenced + Note that when a namespace is specified, a gateway.networking.k8s.io/ReferenceGrant object is required in the referent namespace to allow that namespace's owner to accept the reference. See the ReferenceGrant documentation for details. + (Alpha) This field requires the CrossNamespaceVolumeDataSource feature gate to be enabled. + type: string + required: + - kind + - name + type: object + resources: + description: |- + resources represents the minimum resources the volume should have. + If RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements + that are lower than previous value but must still be higher than capacity recorded in the + status field of the claim. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Requests describes the minimum amount of compute resources required. + If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, + otherwise to an implementation-defined value. Requests cannot exceed Limits. + More info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + type: object + type: object + selector: + description: selector is a label query over + volumes to consider for binding. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. If the operator is Exists or DoesNotExist, + the values array must be empty. This array is replaced during a strategic + merge patch. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: |- + matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels + map is equivalent to an element of matchExpressions, whose key field is "key", the + operator is "In", and the values array contains only "value". The requirements are ANDed. + type: object + type: object + x-kubernetes-map-type: atomic + storageClassName: + description: |- + storageClassName is the name of the StorageClass required by the claim. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 + type: string + volumeAttributesClassName: + description: |- + volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim. + If specified, the CSI driver will create or update the volume with the attributes defined + in the corresponding VolumeAttributesClass. This has a different purpose than storageClassName, + it can be changed after the claim is created. An empty string value means that no VolumeAttributesClass + will be applied to the claim but it's not allowed to reset this field to empty string once it is set. + If unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass + will be set by the persistentvolume controller if it exists. + If the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be + set to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource + exists. + More info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/ + (Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default). + type: string + volumeMode: + description: |- + volumeMode defines what type of volume is required by the claim. + Value of Filesystem is implied when not included in claim spec. + type: string + volumeName: + description: volumeName is the binding reference + to the PersistentVolume backing this claim. + type: string + type: object + required: + - spec + type: object + type: object + fc: + description: fc represents a Fibre Channel resource that + is attached to a kubelet's host machine and then exposed + to the pod. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + lun: + description: 'lun is Optional: FC target lun number' + format: int32 + type: integer + readOnly: + description: |- + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + targetWWNs: + description: 'targetWWNs is Optional: FC target worldwide + names (WWNs)' + items: + type: string + type: array + x-kubernetes-list-type: atomic + wwids: + description: |- + wwids Optional: FC volume world wide identifiers (wwids) + Either wwids or combination of targetWWNs and lun must be set, but not both simultaneously. + items: + type: string + type: array + x-kubernetes-list-type: atomic + type: object + flexVolume: + description: |- + flexVolume represents a generic volume resource that is + provisioned/attached using an exec based plugin. + Deprecated: FlexVolume is deprecated. Consider using a CSIDriver instead. + properties: + driver: + description: driver is the name of the driver to use + for this volume. + type: string + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script. + type: string + options: + additionalProperties: + type: string + description: 'options is Optional: this field holds + extra command options if any.' + type: object + readOnly: + description: |- + readOnly is Optional: defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef is Optional: secretRef is reference to the secret object containing + sensitive information to pass to the plugin scripts. This may be + empty if no secret object is specified. If the secret object + contains more than one secret, all secrets are passed to the plugin + scripts. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + required: + - driver + type: object + flocker: + description: |- + flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running. + Deprecated: Flocker is deprecated and the in-tree flocker type is no longer supported. + properties: + datasetName: + description: |- + datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker + should be considered as deprecated + type: string + datasetUUID: + description: datasetUUID is the UUID of the dataset. + This is unique identifier of a Flocker dataset + type: string + type: object + gcePersistentDisk: + description: |- + gcePersistentDisk represents a GCE Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + Deprecated: GCEPersistentDisk is deprecated. All operations for the in-tree + gcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + properties: + fsType: + description: |- + fsType is filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: string + partition: + description: |- + partition is the partition in the volume that you want to mount. + If omitted, the default is to mount by volume name. + Examples: For volume /dev/sda1, you specify the partition as "1". + Similarly, the volume partition for /dev/sda is "0" (or you can leave the property empty). + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + format: int32 + type: integer + pdName: + description: |- + pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk + type: boolean + required: + - pdName + type: object + gitRepo: + description: |- + gitRepo represents a git repository at a particular revision. + Deprecated: GitRepo is deprecated. To provision a container with a git repo, mount an + EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir + into the Pod's container. + properties: + directory: + description: |- + directory is the target directory name. + Must not contain or start with '..'. If '.' is supplied, the volume directory will be the + git repository. Otherwise, if specified, the volume will contain the git repository in + the subdirectory with the given name. + type: string + repository: + description: repository is the URL + type: string + revision: + description: revision is the commit hash for the specified + revision. + type: string + required: + - repository + type: object + glusterfs: + description: |- + glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. + Deprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported. + More info: https://examples.k8s.io/volumes/glusterfs/README.md + properties: + endpoints: + description: |- + endpoints is the endpoint name that details Glusterfs topology. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: string + path: + description: |- + path is the Glusterfs volume path. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: string + readOnly: + description: |- + readOnly here will force the Glusterfs volume to be mounted with read-only permissions. + Defaults to false. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: boolean + required: + - endpoints + - path + type: object + hostPath: + description: |- + hostPath represents a pre-existing file or directory on the host + machine that is directly exposed to the container. This is generally + used for system agents or other privileged things that are allowed + to see the host machine. Most containers will NOT need this. + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + properties: + path: + description: |- + path of the directory on the host. + If the path is a symlink, it will follow the link to the real path. + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + type: string + type: + description: |- + type for HostPath Volume + Defaults to "" + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + type: string + required: + - path + type: object + image: + description: |- + image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine. + The volume is resolved at pod startup depending on which PullPolicy value is provided: + + - Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. + - Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. + - IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. + + The volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation. + A failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message. + The types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field. + The OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images. + The volume will be mounted read-only (ro) and non-executable files (noexec). + Sub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath) before 1.33. + The field spec.securityContext.fsGroupChangePolicy has no effect on this volume type. + properties: + pullPolicy: + description: |- + Policy for pulling OCI objects. Possible values are: + Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails. + Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present. + IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails. + Defaults to Always if :latest tag is specified, or IfNotPresent otherwise. + type: string + reference: + description: |- + Required: Image or artifact reference to be used. + Behaves in the same way as pod.spec.containers[*].image. + Pull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets. + More info: https://kubernetes.io/docs/concepts/containers/images + This field is optional to allow higher level config management to default or override + container images in workload controllers like Deployments and StatefulSets. + type: string + type: object + iscsi: + description: |- + iscsi represents an ISCSI Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + More info: https://examples.k8s.io/volumes/iscsi/README.md + properties: + chapAuthDiscovery: + description: chapAuthDiscovery defines whether support + iSCSI Discovery CHAP authentication + type: boolean + chapAuthSession: + description: chapAuthSession defines whether support + iSCSI Session CHAP authentication + type: boolean + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi + type: string + initiatorName: + description: |- + initiatorName is the custom iSCSI Initiator Name. + If initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface + : will be created for the connection. + type: string + iqn: + description: iqn is the target iSCSI Qualified Name. + type: string + iscsiInterface: + default: default + description: |- + iscsiInterface is the interface Name that uses an iSCSI transport. + Defaults to 'default' (tcp). + type: string + lun: + description: lun represents iSCSI Target Lun number. + format: int32 + type: integer + portals: + description: |- + portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port + is other than default (typically TCP ports 860 and 3260). + items: + type: string + type: array + x-kubernetes-list-type: atomic + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + type: boolean + secretRef: + description: secretRef is the CHAP Secret for iSCSI + target and initiator authentication + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + targetPortal: + description: |- + targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port + is other than default (typically TCP ports 860 and 3260). + type: string + required: + - iqn + - lun + - targetPortal + type: object + name: + description: |- + name of the volume. + Must be a DNS_LABEL and unique within the pod. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + nfs: + description: |- + nfs represents an NFS mount on the host that shares a pod's lifetime + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + properties: + path: + description: |- + path that is exported by the NFS server. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: string + readOnly: + description: |- + readOnly here will force the NFS export to be mounted with read-only permissions. + Defaults to false. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: boolean + server: + description: |- + server is the hostname or IP address of the NFS server. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: string + required: + - path + - server + type: object + persistentVolumeClaim: + description: |- + persistentVolumeClaimVolumeSource represents a reference to a + PersistentVolumeClaim in the same namespace. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims + properties: + claimName: + description: |- + claimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume. + More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims + type: string + readOnly: + description: |- + readOnly Will force the ReadOnly setting in VolumeMounts. + Default false. + type: boolean + required: + - claimName + type: object + photonPersistentDisk: + description: |- + photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine. + Deprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + pdID: + description: pdID is the ID that identifies Photon Controller + persistent disk + type: string + required: + - pdID + type: object + portworxVolume: + description: |- + portworxVolume represents a portworx volume attached and mounted on kubelets host machine. + Deprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type + are redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate + is on. + properties: + fsType: + description: |- + fSType represents the filesystem type to mount + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs". Implicitly inferred to be "ext4" if unspecified. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + volumeID: + description: volumeID uniquely identifies a Portworx + volume + type: string + required: + - volumeID + type: object + projected: + description: projected items for all in one resources secrets, + configmaps, and downward API + properties: + defaultMode: + description: |- + defaultMode are the mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + sources: + description: |- + sources is the list of volume projections. Each entry in this list + handles one source. + items: + description: |- + Projection that may be projected along with other supported volume types. + Exactly one of these fields must be set. + properties: + clusterTrustBundle: + description: |- + ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field + of ClusterTrustBundle objects in an auto-updating file. + + Alpha, gated by the ClusterTrustBundleProjection feature gate. + + ClusterTrustBundle objects can either be selected by name, or by the + combination of signer name and a label selector. + + Kubelet performs aggressive normalization of the PEM contents written + into the pod filesystem. Esoteric PEM features such as inter-block + comments and block headers are stripped. Certificates are deduplicated. + The ordering of certificates within the file is arbitrary, and Kubelet + may change the order over time. + properties: + labelSelector: + description: |- + Select all ClusterTrustBundles that match this label selector. Only has + effect if signerName is set. Mutually-exclusive with name. If unset, + interpreted as "match nothing". If set but empty, interpreted as "match + everything". + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. If the operator is Exists or DoesNotExist, + the values array must be empty. This array is replaced during a strategic + merge patch. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: |- + matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels + map is equivalent to an element of matchExpressions, whose key field is "key", the + operator is "In", and the values array contains only "value". The requirements are ANDed. + type: object + type: object + x-kubernetes-map-type: atomic + name: + description: |- + Select a single ClusterTrustBundle by object name. Mutually-exclusive + with signerName and labelSelector. + type: string + optional: + description: |- + If true, don't block pod startup if the referenced ClusterTrustBundle(s) + aren't available. If using name, then the named ClusterTrustBundle is + allowed not to exist. If using signerName, then the combination of + signerName and labelSelector is allowed to match zero + ClusterTrustBundles. + type: boolean + path: + description: Relative path from the volume + root to write the bundle. + type: string + signerName: + description: |- + Select all ClusterTrustBundles that match this signer name. + Mutually-exclusive with name. The contents of all selected + ClusterTrustBundles will be unified and deduplicated. + type: string + required: + - path + type: object + configMap: + description: configMap information about the configMap + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the ConfigMap, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional specify whether the + ConfigMap or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + downwardAPI: + description: downwardAPI information about the + downwardAPI data to project + properties: + items: + description: Items is a list of DownwardAPIVolume + file + items: + description: DownwardAPIVolumeFile represents + information to create the file containing + the pod field + properties: + fieldRef: + description: 'Required: Selects a field + of the pod: only annotations, labels, + name, namespace and uid are supported.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: 'Required: Path is the + relative path name of the file to + be created. Must not be absolute or + contain the ''..'' path. Must be utf-8 + encoded. The first item of the relative + path must not start with ''..''' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + required: + - path + type: object + type: array + x-kubernetes-list-type: atomic + type: object + secret: + description: secret information about the secret + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the Secret, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + optional: + description: optional field specify whether + the Secret or its key must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + serviceAccountToken: + description: serviceAccountToken is information + about the serviceAccountToken data to project + properties: + audience: + description: |- + audience is the intended audience of the token. A recipient of a token + must identify itself with an identifier specified in the audience of the + token, and otherwise should reject the token. The audience defaults to the + identifier of the apiserver. + type: string + expirationSeconds: + description: |- + expirationSeconds is the requested duration of validity of the service + account token. As the token approaches expiration, the kubelet volume + plugin will proactively rotate the service account token. The kubelet will + start trying to rotate the token if the token is older than 80 percent of + its time to live or if the token is older than 24 hours.Defaults to 1 hour + and must be at least 10 minutes. + format: int64 + type: integer + path: + description: |- + path is the path relative to the mount point of the file to project the + token into. + type: string + required: + - path + type: object + type: object + type: array + x-kubernetes-list-type: atomic + type: object + quobyte: + description: |- + quobyte represents a Quobyte mount on the host that shares a pod's lifetime. + Deprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported. + properties: + group: + description: |- + group to map volume access to + Default is no group + type: string + readOnly: + description: |- + readOnly here will force the Quobyte volume to be mounted with read-only permissions. + Defaults to false. + type: boolean + registry: + description: |- + registry represents a single or multiple Quobyte Registry services + specified as a string as host:port pair (multiple entries are separated with commas) + which acts as the central registry for volumes + type: string + tenant: + description: |- + tenant owning the given Quobyte volume in the Backend + Used with dynamically provisioned Quobyte volumes, value is set by the plugin + type: string + user: + description: |- + user to map volume access to + Defaults to serivceaccount user + type: string + volume: + description: volume is a string that references an already + created Quobyte volume by name. + type: string + required: + - registry + - volume + type: object + rbd: + description: |- + rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. + Deprecated: RBD is deprecated and the in-tree rbd type is no longer supported. + More info: https://examples.k8s.io/volumes/rbd/README.md + properties: + fsType: + description: |- + fsType is the filesystem type of the volume that you want to mount. + Tip: Ensure that the filesystem type is supported by the host operating system. + Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + More info: https://kubernetes.io/docs/concepts/storage/volumes#rbd + type: string + image: + description: |- + image is the rados image name. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + keyring: + default: /etc/ceph/keyring + description: |- + keyring is the path to key ring for RBDUser. + Default is /etc/ceph/keyring. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + monitors: + description: |- + monitors is a collection of Ceph monitors. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + items: + type: string + type: array + x-kubernetes-list-type: atomic + pool: + default: rbd + description: |- + pool is the rados pool name. + Default is rbd. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: boolean + secretRef: + description: |- + secretRef is name of the authentication secret for RBDUser. If provided + overrides keyring. + Default is nil. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + user: + default: admin + description: |- + user is the rados user name. + Default is admin. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + required: + - image + - monitors + type: object + scaleIO: + description: |- + scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. + Deprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported. + properties: + fsType: + default: xfs + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". + Default is "xfs". + type: string + gateway: + description: gateway is the host address of the ScaleIO + API Gateway. + type: string + protectionDomain: + description: protectionDomain is the name of the ScaleIO + Protection Domain for the configured storage. + type: string + readOnly: + description: |- + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef references to the secret for ScaleIO user and other + sensitive information. If this is not provided, Login operation will fail. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + sslEnabled: + description: sslEnabled Flag enable/disable SSL communication + with Gateway, default false + type: boolean + storageMode: + default: ThinProvisioned + description: |- + storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned. + Default is ThinProvisioned. + type: string + storagePool: + description: storagePool is the ScaleIO Storage Pool + associated with the protection domain. + type: string + system: + description: system is the name of the storage system + as configured in ScaleIO. + type: string + volumeName: + description: |- + volumeName is the name of a volume already created in the ScaleIO system + that is associated with this volume source. + type: string + required: + - gateway + - secretRef + - system + type: object + secret: + description: |- + secret represents a secret that should populate this volume. + More info: https://kubernetes.io/docs/concepts/storage/volumes#secret + properties: + defaultMode: + description: |- + defaultMode is Optional: mode bits used to set permissions on created files by default. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values + for mode bits. Defaults to 0644. + Directories within the path are not affected by this setting. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + items: + description: |- + items If unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume as a file whose name is the + key and content is the value. If specified, the listed keys will be + projected into the specified paths, and unlisted keys will not be + present. If a key is specified which is not present in the Secret, + the volume setup will error unless it is marked optional. Paths must be + relative and may not contain the '..' path or start with '..'. + items: + description: Maps a string key to a path within a + volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: |- + mode is Optional: mode bits used to set permissions on this file. + Must be an octal value between 0000 and 0777 or a decimal value between 0 and 511. + YAML accepts both octal and decimal values, JSON requires decimal values for mode bits. + If not specified, the volume defaultMode will be used. + This might be in conflict with other options that affect the file + mode, like fsGroup, and the result can be other mode bits set. + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + May not contain the path element '..'. + May not start with the string '..'. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + optional: + description: optional field specify whether the Secret + or its keys must be defined + type: boolean + secretName: + description: |- + secretName is the name of the secret in the pod's namespace to use. + More info: https://kubernetes.io/docs/concepts/storage/volumes#secret + type: string + type: object + storageos: + description: |- + storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. + Deprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef specifies the secret to use for obtaining the StorageOS API + credentials. If not specified, default values will be attempted. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. Instances of this type with an empty value here are + almost certainly wrong. + More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names + type: string + type: object + x-kubernetes-map-type: atomic + volumeName: + description: |- + volumeName is the human-readable name of the StorageOS volume. Volume + names are only unique within a namespace. + type: string + volumeNamespace: + description: |- + volumeNamespace specifies the scope of the volume within StorageOS. If no + namespace is specified then the Pod's namespace will be used. This allows the + Kubernetes name scoping to be mirrored within StorageOS for tighter integration. + Set VolumeName to any name to override the default behaviour. + Set to "default" if you are not using namespaces within StorageOS. + Namespaces that do not pre-exist within StorageOS will be created. + type: string + type: object + vsphereVolume: + description: |- + vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine. + Deprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type + are redirected to the csi.vsphere.vmware.com CSI driver. + properties: + fsType: + description: |- + fsType is filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. + type: string + storagePolicyID: + description: storagePolicyID is the storage Policy Based + Management (SPBM) profile ID associated with the StoragePolicyName. + type: string + storagePolicyName: + description: storagePolicyName is the storage Policy + Based Management (SPBM) profile name. + type: string + volumePath: + description: volumePath is the path that identifies + vSphere volume vmdk + type: string + required: + - volumePath + type: object + required: + - name + type: object + type: array + type: object description: type: string memory: @@ -234,9 +2181,18 @@ spec: - type type: object type: array + configHash: + description: |- + This is used to determine if the agent config has changed. + If it has changed, the agent will be restarted. + format: byte + type: string observedGeneration: format: int64 type: integer + required: + - configHash + - observedGeneration type: object type: object served: true diff --git a/helm/kagent-crds/templates/kagent.dev_modelconfigs.yaml b/helm/kagent-crds/templates/kagent.dev_modelconfigs.yaml index 10318d797..42dc4e865 100644 --- a/helm/kagent-crds/templates/kagent.dev_modelconfigs.yaml +++ b/helm/kagent-crds/templates/kagent.dev_modelconfigs.yaml @@ -46,6 +46,7 @@ spec: metadata: type: object spec: + description: ModelConfigSpec defines the desired state of ModelConfig. properties: anthropic: description: Anthropic-specific configuration @@ -101,9 +102,8 @@ spec: type: string apiKeySecretRef: description: The reference to the secret that contains the API key. - Can either be a reference to the name of a secret in the same namespace - as the referencing ModelConfig, or a reference to the name of a - Secret in a different namespace in the form / + Must be a reference to the name of a secret in the same namespace + as the referencing ModelConfig type: string azureOpenAI: description: Azure OpenAI-specific configuration @@ -137,8 +137,11 @@ spec: additionalProperties: type: string type: object - geminiVertexAI: + gemini: description: Gemini-specific configuration + type: object + geminiVertexAI: + description: Gemini Vertex AI-specific configuration properties: candidateCount: description: Candidate count @@ -248,6 +251,7 @@ spec: - OpenAI - AzureOpenAI - Ollama + - Gemini - GeminiVertexAI - AnthropicVertexAI type: string @@ -264,6 +268,8 @@ spec: rule: '!(has(self.azureOpenAI) && self.provider != ''AzureOpenAI'')' - message: provider.ollama must be nil if the provider is not Ollama rule: '!(has(self.ollama) && self.provider != ''Ollama'')' + - message: provider.gemini must be nil if the provider is not Gemini + rule: '!(has(self.gemini) && self.provider != ''Gemini'')' - message: provider.geminiVertexAI must be nil if the provider is not GeminiVertexAI rule: '!(has(self.geminiVertexAI) && self.provider != ''GeminiVertexAI'')' diff --git a/helm/kagent-crds/templates/kagent.dev_toolservers.yaml b/helm/kagent-crds/templates/kagent.dev_toolservers.yaml index 685d04e6d..942794ad6 100644 --- a/helm/kagent-crds/templates/kagent.dev_toolservers.yaml +++ b/helm/kagent-crds/templates/kagent.dev_toolservers.yaml @@ -42,6 +42,7 @@ spec: description: ToolServerSpec defines the desired state of ToolServer. properties: config: + description: Only one of stdio, sse, or streamableHttp can be specified. properties: sse: properties: @@ -201,7 +202,14 @@ spec: required: - url type: object + type: + type: string type: object + x-kubernetes-validations: + - message: Exactly one of stdio, sse, or streamableHttp must be specified + rule: (has(self.stdio) && !has(self.sse) && !has(self.streamableHttp)) + || (!has(self.stdio) && has(self.sse) && !has(self.streamableHttp)) + || (!has(self.stdio) && !has(self.sse) && has(self.streamableHttp)) description: type: string required: @@ -296,10 +304,12 @@ spec: - provider - version type: object + description: + type: string name: type: string required: - - component + - description - name type: object type: array diff --git a/helm/kagent/templates/controller-deployment.yaml b/helm/kagent/templates/controller-deployment.yaml index b53a6459e..a080a3d78 100644 --- a/helm/kagent/templates/controller-deployment.yaml +++ b/helm/kagent/templates/controller-deployment.yaml @@ -42,16 +42,14 @@ spec: - {{ .Values.controller.loglevel }} - -watch-namespaces - "{{ include "kagent.watchNamespaces" . }}" - - -autogen-base-url - - "http://{{ include "kagent.fullname" . }}-engine.{{ include "kagent.namespace" . }}.svc.cluster.local:{{ .Values.engine.service.ports.port }}/api" - -database-type - - {{ .Values.controller.database.type }} - {{- if eq .Values.controller.database.type "sqlite" }} + - {{ .Values.database.type }} + {{- if eq .Values.database.type "sqlite" }} - -sqlite-database-path - - /sqlite-volume/{{ .Values.controller.database.sqlite.databaseName }} - {{- else if eq .Values.controller.database.type "postgres" }} + - /sqlite-volume/{{ .Values.database.sqlite.databaseName }} + {{- else if eq .Values.database.type "postgres" }} - -postgres-database-url - - {{ .Values.controller.database.postgres.url }} + - {{ .Values.database.postgres.url }} {{- end }} securityContext: {{- toYaml .Values.controller.securityContext | nindent 12 }} @@ -60,6 +58,10 @@ spec: resources: {{- toYaml .Values.controller.resources | nindent 12 }} env: + - name: KAGENT_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace - name: LOG_LEVEL value: {{ .Values.controller.loglevel | quote }} - name: OTEL_TRACING_ENABLED @@ -70,10 +72,6 @@ spec: value: {{ .Values.otel.tracing.exporter.otlp.timeout | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_INSECURE value: {{ .Values.otel.tracing.exporter.otlp.insecure | quote }} - - name: KAGENT_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace {{- with .Values.controller.env }} {{- toYaml . | nindent 12 }} {{- end }} @@ -81,11 +79,60 @@ spec: - name: http containerPort: {{ .Values.controller.service.ports.targetPort }} protocol: TCP + startupProbe: + httpGet: + path: /health + port: http + periodSeconds: 1 + initialDelaySeconds: 1 readinessProbe: - tcpSocket: + httpGet: + path: /health port: http - initialDelaySeconds: 15 - periodSeconds: 15 + periodSeconds: 30 volumeMounts: - name: sqlite-volume mountPath: /sqlite-volume + - name: grafana-mcp # Temporary workaround until kmcp is ready. + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: mcp/grafana:latest + imagePullPolicy: IfNotPresent + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + env: + - name: GRAFANA_URL + value: {{ .Values.tools.grafana.url | quote }} + - name: GRAFANA_API_KEY + value: {{ .Values.tools.grafana.apiKey | quote }} + - name: KAGENT_NAMESPACE + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.namespace + - name: OTEL_TRACING_ENABLED + value: {{ .Values.otel.tracing.enabled | quote }} + - name: OTEL_EXPORTER_OTLP_ENDPOINT + value: {{ .Values.otel.tracing.exporter.otlp.endpoint | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + value: {{ .Values.otel.tracing.exporter.otlp.timeout | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_INSECURE + value: {{ .Values.otel.tracing.exporter.otlp.insecure | quote }} + ports: + - name: http + containerPort: 8000 + protocol: TCP + startupProbe: + tcpSocket: + port: http + periodSeconds: 1 + initialDelaySeconds: 1 + readinessProbe: + tcpSocket: + port: http + periodSeconds: 30 \ No newline at end of file diff --git a/helm/kagent/templates/controller-service.yaml b/helm/kagent/templates/controller-service.yaml index 54933c355..417927d91 100644 --- a/helm/kagent/templates/controller-service.yaml +++ b/helm/kagent/templates/controller-service.yaml @@ -12,5 +12,9 @@ spec: targetPort: {{ .Values.controller.service.ports.targetPort }} protocol: TCP name: controller + - port: 8000 + targetPort: 8000 + protocol: TCP + name: grafana-mcp selector: {{- include "kagent.controller.selectorLabels" . | nindent 4 }} diff --git a/helm/kagent/templates/engine-deployment.yaml b/helm/kagent/templates/engine-deployment.yaml deleted file mode 100644 index afafa1eff..000000000 --- a/helm/kagent/templates/engine-deployment.yaml +++ /dev/null @@ -1,62 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "kagent.fullname" . }}-engine - namespace: {{ include "kagent.namespace" . }} - labels: - {{- include "kagent.labels" . | nindent 4 }} -spec: - replicas: {{ .Values.engine.replicas }} - selector: - matchLabels: - {{- include "kagent.engine.selectorLabels" . | nindent 6 }} - template: - metadata: - annotations: - {{- with .Values.engine.podAnnotations | default .Values.podAnnotations }} - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - {{- include "kagent.engine.selectorLabels" . | nindent 8 }} - spec: - {{- with .Values.engine.imagePullSecrets | default .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - securityContext: - {{- toYaml (.Values.engine.podSecurityContext | default .Values.podSecurityContext) | nindent 8 }} - serviceAccountName: {{ include "kagent.fullname" . }}-engine - containers: - - name: engine - securityContext: - {{- toYaml (.Values.engine.securityContext | default .Values.securityContext) | nindent 12 }} - image: "{{ .Values.engine.image.registry }}/{{ .Values.engine.image.repository }}:{{ coalesce .Values.tag .Values.engine.image.tag .Chart.Version }}" - imagePullPolicy: {{ .Values.engine.image.pullPolicy | default .Values.imagePullPolicy }} - env: - - name: LOG_LEVEL - value: {{ .Values.engine.loglevel | quote }} - - name: OTEL_TRACING_ENABLED - value: {{ .Values.otel.tracing.enabled | quote }} - - name: OTEL_EXPORTER_OTLP_ENDPOINT - value: {{ .Values.otel.tracing.exporter.otlp.endpoint | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_TIMEOUT - value: {{ .Values.otel.tracing.exporter.otlp.timeout | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_INSECURE - value: {{ .Values.otel.tracing.exporter.otlp.insecure | quote }} - - name: AUTOGEN_DISABLE_RUNTIME_TRACING - value: "true" - {{- with .Values.engine.env }} - {{- toYaml . | nindent 12 }} - {{- end }} - ports: - - name: http - containerPort: {{ .Values.engine.service.ports.targetPort }} - protocol: TCP - resources: - {{- toYaml .Values.engine.resources | nindent 12 }} - readinessProbe: - httpGet: - path: /api/version - port: http - initialDelaySeconds: 15 - periodSeconds: 15 diff --git a/helm/kagent/templates/engine-service.yaml b/helm/kagent/templates/engine-service.yaml deleted file mode 100644 index 3291c069c..000000000 --- a/helm/kagent/templates/engine-service.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ include "kagent.fullname" . }}-engine - namespace: {{ include "kagent.namespace" . }} - labels: - {{- include "kagent.engine.labels" . | nindent 4 }} -spec: - type: {{ .Values.engine.service.type }} - ports: - - port: {{ .Values.engine.service.ports.port }} - targetPort: {{ .Values.engine.service.ports.targetPort }} - protocol: TCP - name: engine - selector: - {{- include "kagent.engine.selectorLabels" . | nindent 4 }} diff --git a/helm/kagent/templates/engine-serviceaccount.yaml b/helm/kagent/templates/engine-serviceaccount.yaml deleted file mode 100644 index 85be7b3ed..000000000 --- a/helm/kagent/templates/engine-serviceaccount.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "kagent.fullname" . }}-engine - namespace: {{ include "kagent.namespace" . }} - labels: - {{- include "kagent.labels" . | nindent 4 }} diff --git a/helm/kagent/templates/toolserver-grafana.yaml b/helm/kagent/templates/toolserver-grafana.yaml index 652805518..cc0f0f039 100644 --- a/helm/kagent/templates/toolserver-grafana.yaml +++ b/helm/kagent/templates/toolserver-grafana.yaml @@ -16,19 +16,9 @@ metadata: namespace: kagent spec: config: - stdio: - command: /app/python/bin/mcp-grafana - args: - - -t - - stdio - - debug - readTimeoutSeconds: 30 - envFrom: - - name: "GRAFANA_URL" - value: {{ .Values.tools.grafana.url | quote }} - - name: "GRAFANA_API_KEY" - valueFrom: - type: Secret - key: "apiKey" - valueRef: {{ include "kagent.fullname" . }}-mcp-grafana - description: "Toolserver for Official Grafana MCP Server" + type: sse + sse: + url: {{ printf "http://kagent.%s.svc.cluster.local:%v/sse" .Release.Namespace 8000 }} + sseReadTimeout: 5m0s + timeout: 30s + description: "KAgent Grafana MCP" diff --git a/helm/kagent/templates/toolserver-kagent.yaml b/helm/kagent/templates/toolserver-kagent.yaml index 87d1ce980..610684b0d 100644 --- a/helm/kagent/templates/toolserver-kagent.yaml +++ b/helm/kagent/templates/toolserver-kagent.yaml @@ -12,4 +12,4 @@ spec: url: "http://{{ include "kagent.fullname" . }}-tools.{{ include "kagent.namespace" . }}.svc.cluster.local:8084/mcp" timeout: 30s sseReadTimeout: 5m0s - description: "Oficial KAgent tool server" + description: "Official KAgent tool server" diff --git a/helm/kagent/templates/ui-deployment.yaml b/helm/kagent/templates/ui-deployment.yaml index 8a76b4f70..2c8ddf01a 100644 --- a/helm/kagent/templates/ui-deployment.yaml +++ b/helm/kagent/templates/ui-deployment.yaml @@ -44,9 +44,14 @@ spec: protocol: TCP resources: {{- toYaml .Values.ui.resources | nindent 12 }} + startupProbe: + httpGet: + path: /health + port: http + periodSeconds: 1 + initialDelaySeconds: 1 readinessProbe: httpGet: - path: / + path: /health port: http - initialDelaySeconds: 15 - periodSeconds: 15 + periodSeconds: 30 \ No newline at end of file diff --git a/helm/kagent/tests/engine-deployment_test.yaml b/helm/kagent/tests/engine-deployment_test.yaml deleted file mode 100644 index 4503946ab..000000000 --- a/helm/kagent/tests/engine-deployment_test.yaml +++ /dev/null @@ -1,87 +0,0 @@ -suite: test engine deployment -templates: - - engine-deployment.yaml - - engine-configmap.yaml -tests: - - it: should render engine deployment with default values - template: engine-deployment.yaml - asserts: - - isKind: - of: Deployment - - equal: - path: metadata.name - value: RELEASE-NAME-engine - - equal: - path: spec.replicas - value: 1 - - hasDocuments: - count: 1 - - - it: should render engine deployment with custom replica count - template: engine-deployment.yaml - set: - engine: - replicas: 2 - asserts: - - equal: - path: spec.replicas - value: 2 - - - it: should have correct engine container image - template: engine-deployment.yaml - asserts: - - equal: - path: spec.template.spec.containers[0].name - value: engine - - matchRegex: - path: spec.template.spec.containers[0].image - pattern: "^cr\\.kagent\\.dev/kagent-dev/kagent/app:.+" - - - it: should use global tag when set - template: engine-deployment.yaml - set: - tag: "v1.0.0" - asserts: - - equal: - path: spec.template.spec.containers[0].image - value: cr.kagent.dev/kagent-dev/kagent/app:v1.0.0 - - - it: should have correct engine resources - template: engine-deployment.yaml - asserts: - - equal: - path: spec.template.spec.containers[0].resources.requests.cpu - value: 100m - - equal: - path: spec.template.spec.containers[0].resources.requests.memory - value: 256Mi - - equal: - path: spec.template.spec.containers[0].resources.limits.cpu - value: 1000m - - equal: - path: spec.template.spec.containers[0].resources.limits.memory - value: 1Gi - - - it: should have correct service account name - template: engine-deployment.yaml - asserts: - - equal: - path: spec.template.spec.serviceAccountName - value: RELEASE-NAME-engine - - - it: should have correct container port - template: engine-deployment.yaml - asserts: - - equal: - path: spec.template.spec.containers[0].ports[0].containerPort - value: 8081 - - - it: should have readiness probe for engine container - template: engine-deployment.yaml - asserts: - - equal: - path: spec.template.spec.containers[0].readinessProbe.httpGet.path - value: /api/version - - equal: - path: spec.template.spec.containers[0].readinessProbe.httpGet.port - value: http diff --git a/helm/kagent/tests/engine-service_test.yaml b/helm/kagent/tests/engine-service_test.yaml deleted file mode 100644 index 8add615a6..000000000 --- a/helm/kagent/tests/engine-service_test.yaml +++ /dev/null @@ -1,63 +0,0 @@ -suite: test engine service -templates: - - engine-service.yaml -tests: - - it: should render engine service with default values - asserts: - - isKind: - of: Service - - equal: - path: metadata.name - value: RELEASE-NAME-engine - - equal: - path: spec.type - value: ClusterIP - - hasDocuments: - count: 1 - - - it: should have correct port configuration - asserts: - - equal: - path: spec.ports[0].name - value: engine - - equal: - path: spec.ports[0].port - value: 8081 - - equal: - path: spec.ports[0].targetPort - value: 8081 - - equal: - path: spec.ports[0].protocol - value: TCP - - - it: should have correct selector labels - asserts: - - equal: - path: spec.selector["app.kubernetes.io/name"] - value: kagent - - equal: - path: spec.selector["app.kubernetes.io/instance"] - value: RELEASE-NAME - - equal: - path: spec.selector["app.kubernetes.io/component"] - value: engine - - - it: should have correct metadata labels - asserts: - - equal: - path: metadata.labels["app.kubernetes.io/name"] - value: kagent - - equal: - path: metadata.labels["app.kubernetes.io/instance"] - value: RELEASE-NAME - - equal: - path: metadata.labels["app.kubernetes.io/managed-by"] - value: Helm - - isNotEmpty: - path: metadata.labels["helm.sh/chart"] - - - it: should be in correct namespace - asserts: - - equal: - path: metadata.namespace - value: NAMESPACE \ No newline at end of file diff --git a/helm/kagent/tests/rbac_test.yaml b/helm/kagent/tests/rbac_test.yaml index efa8980d4..26f3e76ca 100644 --- a/helm/kagent/tests/rbac_test.yaml +++ b/helm/kagent/tests/rbac_test.yaml @@ -1,7 +1,6 @@ suite: test rbac templates: - controller-serviceaccount.yaml - - engine-serviceaccount.yaml - ui-serviceaccount.yaml - rbac/clusterrole.yaml - rbac/clusterrolebinding.yaml @@ -17,17 +16,6 @@ tests: - hasDocuments: count: 1 - - it: should render engine serviceaccount - template: engine-serviceaccount.yaml - asserts: - - isKind: - of: ServiceAccount - - equal: - path: metadata.name - value: RELEASE-NAME-engine - - hasDocuments: - count: 1 - - it: should render ui serviceaccount template: ui-serviceaccount.yaml asserts: @@ -39,17 +27,6 @@ tests: - hasDocuments: count: 1 - - it: should render engine serviceaccount - template: engine-serviceaccount.yaml - asserts: - - isKind: - of: ServiceAccount - - equal: - path: metadata.name - value: RELEASE-NAME-engine - - hasDocuments: - count: 1 - - it: should render ui serviceaccount template: ui-serviceaccount.yaml asserts: @@ -147,7 +124,6 @@ tests: namespaceOverride: "custom-namespace" templates: - controller-serviceaccount.yaml - - engine-serviceaccount.yaml - ui-serviceaccount.yaml asserts: - equal: @@ -157,7 +133,6 @@ tests: - it: should have correct labels on all resources templates: - controller-serviceaccount.yaml - - engine-serviceaccount.yaml - ui-serviceaccount.yaml - rbac/clusterrole.yaml - rbac/clusterrolebinding.yaml diff --git a/helm/kagent/tests/toolserver_test.yaml b/helm/kagent/tests/toolserver_test.yaml index 1ec279150..23bbe5607 100644 --- a/helm/kagent/tests/toolserver_test.yaml +++ b/helm/kagent/tests/toolserver_test.yaml @@ -14,7 +14,7 @@ tests: value: NAMESPACE - equal: path: spec.description - value: "Oficial KAgent tool server" + value: "Official KAgent tool server" - hasDocuments: count: 1 diff --git a/helm/kagent/values.yaml b/helm/kagent/values.yaml index 1d2d7d8f2..d673a1180 100644 --- a/helm/kagent/values.yaml +++ b/helm/kagent/values.yaml @@ -40,6 +40,21 @@ tolerations: [] # -- Node labels to match for `Pod` [scheduling](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/). nodeSelector: {} +# ============================================================================== +# DATABASE CONFIGURATION +# ============================================================================== + +database: + type: sqlite + sqlite: + databaseName: kagent.db + postgres: + url: postgres://postgres:kagent@pgsql-postgresql.kagent.svc.cluster.local:5432/postgres + +# ============================================================================== +# CONTROLLER CONFIGURATION +# ============================================================================== + controller: replicas: 1 loglevel: "info" @@ -69,34 +84,10 @@ controller: port: 8083 targetPort: 8083 env: {} - database: - type: sqlite - sqlite: - databaseName: kagent.db - postgres: - url: postgres://postgres:kagent@pgsql-postgresql.kagent.svc.cluster.local:5432/postgres - -engine: - replicas: 1 - loglevel: debug - image: - registry: cr.kagent.dev - repository: kagent-dev/kagent/app - tag: "" # Will default to global, then Chart version - pullPolicy: IfNotPresent - resources: - requests: - cpu: 100m - memory: 256Mi - limits: - cpu: 1000m - memory: 1Gi - service: - type: ClusterIP - ports: - port: 8081 - targetPort: 8081 - env: {} + +# ============================================================================== +# UI CONFIGURATION +# ============================================================================== ui: replicas: 1 diff --git a/helm/tools/querydoc/templates/deployment.yaml b/helm/tools/querydoc/templates/deployment.yaml index 6de8e7118..66c36b3fc 100644 --- a/helm/tools/querydoc/templates/deployment.yaml +++ b/helm/tools/querydoc/templates/deployment.yaml @@ -40,3 +40,14 @@ spec: - name: http containerPort: {{ .Values.service.port }} protocol: TCP + startupProbe: + httpGet: + path: /health + port: http + periodSeconds: 1 + initialDelaySeconds: 1 + readinessProbe: + httpGet: + path: /health + port: http + periodSeconds: 30 \ No newline at end of file diff --git a/python/.python-version b/python/.python-version index 7eebfafa0..86f8c02eb 100644 --- a/python/.python-version +++ b/python/.python-version @@ -1 +1 @@ -3.12.11 +3.13.5 diff --git a/python/Dockerfile b/python/Dockerfile index cecac911e..8ac78016d 100644 --- a/python/Dockerfile +++ b/python/Dockerfile @@ -6,11 +6,11 @@ ENV LANG=C.UTF-8 ENV LC_ALL=C.UTF-8 RUN --mount=type=cache,target=/var/cache/apk,rw \ apk update && apk add \ - curl openssl bash git ca-certificates uv nodejs-22 npm + curl openssl bash git ca-certificates uv libstdc++ ### STAGE 2: python FROM base-os AS python-os -ARG TOOLS_PYTHON_VERSION=3.12 +ARG TOOLS_PYTHON_VERSION=3.13 ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 @@ -30,49 +30,42 @@ RUN addgroup -g 1001 pythongroup && \ USER python WORKDIR /app/python -### STAGE 3 : download tools -FROM mcp/grafana:latest AS mcp-grafana -ARG TOOLS_GRAFANA_MCP_VERSION -LABEL org.opencontainers.image.source="mcp/grafana${TOOLS_GRAFANA_MCP_VERSION}" - -### STAGE 4: final +### STAGE 3: final FROM python-os AS final ARG TOOLS_PYTHON_VERSION WORKDIR /app/python -ENV PATH=$PATH:/app/python/bin +ENV PATH=$PATH:/app/python/bin:/app/python/.venv/bin COPY --chown=python:pythongroup pyproject.toml . COPY --chown=python:pythongroup .python-version . COPY --chown=python:pythongroup uv.lock . -COPY --chown=python:pythongroup src src +COPY --chown=python:pythongroup packages packages COPY --chown=python:pythongroup README.md . +ARG VERSION + # Install dependencies RUN echo "Installing dependencies..." \ && uv venv --python=python$TOOLS_PYTHON_VERSION \ - && uv sync --locked --refresh \ - && uv pip install setuptools wheel \ - && find .venv -name "*playwright*" -name "*node*" -type d -exec rm -rf {} + 2>/dev/null || true \ - && uv run kagent-engine --help \ + && uv sync --locked --refresh + +RUN uv version ${VERSION%%-*} --package kagent \ + && uv build --package kagent \ && uv cache prune # Offline mode ENV UV_OFFLINE=1 # Test if the tool is working and fetch all dependencies -RUN uv run kagent-engine --help - - # Grafana MCP -COPY --from=mcp-grafana /app/mcp-grafana /app/python/bin/mcp-grafana +RUN kagent --help -EXPOSE 8081 -ARG VERSION +EXPOSE 8080 LABEL org.opencontainers.image.source=https://github.com/kagent-dev/kagent LABEL org.opencontainers.image.description="Kagent app is the apiserver for running agents." LABEL org.opencontainers.image.authors="Kagent Creators 🤖" LABEL org.opencontainers.image.version="$VERSION" -CMD ["uv", "run", "kagent-engine", "--host", "0.0.0.0", "--port", "8081"] +CMD ["kagent", "--host", "0.0.0.0", "--port", "8080"] \ No newline at end of file diff --git a/python/packages/kagent-adk/.python-version b/python/packages/kagent-adk/.python-version new file mode 100644 index 000000000..86f8c02eb --- /dev/null +++ b/python/packages/kagent-adk/.python-version @@ -0,0 +1 @@ +3.13.5 diff --git a/python/src/autogenstudio/eval/__init__.py b/python/packages/kagent-adk/README.md similarity index 100% rename from python/src/autogenstudio/eval/__init__.py rename to python/packages/kagent-adk/README.md diff --git a/python/packages/kagent-adk/pyproject.toml b/python/packages/kagent-adk/pyproject.toml new file mode 100644 index 000000000..06b3fa7dc --- /dev/null +++ b/python/packages/kagent-adk/pyproject.toml @@ -0,0 +1,44 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "kagent-adk" +version = "0.3.0" +description = "kagent-adk is an sdk for integrating adk agents with kagent" +readme = "README.md" +requires-python = ">=3.12.11" +dependencies = [ + "anyio>=4.9.0", + "openai>=1.72.0", + "mcp>=1.12.0", + "protobuf>=6.31.1", + "opentelemetry-api>=1.32.0", + "opentelemetry-sdk>=1.32.0", + "opentelemetry-exporter-otlp-proto-grpc>=1.32.0", + "opentelemetry-instrumentation-openai>= 0.39.0", + "opentelemetry-instrumentation-httpx >= 0.52.0", + "anthropic[vertex]>=0.49.0", + "fastapi>=0.115.1", + "litellm>=1.74.3", + "google-adk>=1.8.0", + "google-genai>=1.21.1", + "google-auth>=2.40.2", + "httpx>=0.25.0", + "pydantic>=2.5.0", + "typing-extensions>=4.8.0", + "jsonref>=1.1.0", + "a2a-sdk>=0.2.16", +] + +[project.optional-dependencies] +test = [ + "pytest>=8.3.5", + "pytest-asyncio>=0.25.3", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/kagent_adk"] + +[tool.ruff] +extend = "../../pyproject.toml" \ No newline at end of file diff --git a/python/packages/kagent-adk/src/kagent_adk/__init__.py b/python/packages/kagent-adk/src/kagent_adk/__init__.py new file mode 100644 index 000000000..0ba063aa3 --- /dev/null +++ b/python/packages/kagent-adk/src/kagent_adk/__init__.py @@ -0,0 +1,8 @@ +import importlib.metadata + +from .a2a import KAgentApp +from .models import AgentConfig + +__version__ = importlib.metadata.version("kagent_adk") + +__all__ = ["KAgentApp", "AgentConfig"] diff --git a/python/packages/kagent-adk/src/kagent_adk/_agent_executor.py b/python/packages/kagent-adk/src/kagent_adk/_agent_executor.py new file mode 100644 index 000000000..b9a6af5b0 --- /dev/null +++ b/python/packages/kagent-adk/src/kagent_adk/_agent_executor.py @@ -0,0 +1,261 @@ +from __future__ import annotations + +import inspect +import logging +import uuid +from datetime import datetime, timezone +from typing import Any, Awaitable, Callable, Optional + +from a2a.server.agent_execution import AgentExecutor +from a2a.server.agent_execution.context import RequestContext +from a2a.server.events.event_queue import EventQueue +from a2a.types import ( + Artifact, + Message, + Role, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + TextPart, +) +from google.adk.a2a.converters.event_converter import convert_event_to_a2a_events +from google.adk.a2a.converters.request_converter import convert_a2a_request_to_adk_run_args +from google.adk.a2a.converters.utils import _get_adk_metadata_key +from google.adk.a2a.executor.task_result_aggregator import TaskResultAggregator +from google.adk.runners import Runner +from google.adk.utils.feature_decorator import experimental +from pydantic import BaseModel +from typing_extensions import override + +logger = logging.getLogger("google_adk." + __name__) + + +@experimental +class A2aAgentExecutorConfig(BaseModel): + """Configuration for the A2aAgentExecutor.""" + + pass + + +# This class is a copy of the A2aAgentExecutor class in the ADK sdk, +# with the following changes: +# - The runner is ALWAYS a callable that returns a Runner instance +# - The runner is cleaned up at the end of the execution +@experimental +class A2aAgentExecutor(AgentExecutor): + """An AgentExecutor that runs an ADK Agent against an A2A request and + publishes updates to an event queue. + """ + + def __init__( + self, + *, + runner: Callable[..., Runner | Awaitable[Runner]], + config: Optional[A2aAgentExecutorConfig] = None, + ): + super().__init__() + self._runner = runner + self._config = config + + async def _resolve_runner(self) -> Runner: + """Resolve the runner, handling cases where it's a callable that returns a Runner.""" + if callable(self._runner): + # Call the function to get the runner + result = self._runner() + + # Handle async callables + if inspect.iscoroutine(result): + resolved_runner = await result + else: + resolved_runner = result + + # Ensure we got a Runner instance + if not isinstance(resolved_runner, Runner): + raise TypeError(f"Callable must return a Runner instance, got {type(resolved_runner)}") + + # Cache the resolved runner for future calls + # self._runner = resolved_runner + return resolved_runner + + raise TypeError( + f"Runner must be a Runner instance or a callable that returns a Runner, got {type(self._runner)}" + ) + + @override + async def cancel(self, context: RequestContext, event_queue: EventQueue): + """Cancel the execution.""" + # TODO: Implement proper cancellation logic if needed + raise NotImplementedError("Cancellation is not supported") + + @override + async def execute( + self, + context: RequestContext, + event_queue: EventQueue, + ): + """Executes an A2A request and publishes updates to the event queue + specified. It runs as following: + * Takes the input from the A2A request + * Convert the input to ADK input content, and runs the ADK agent + * Collects output events of the underlying ADK Agent + * Converts the ADK output events into A2A task updates + * Publishes the updates back to A2A server via event queue + """ + if not context.message: + raise ValueError("A2A request must have a message") + + # for new task, create a task submitted event + if not context.current_task: + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + status=TaskStatus( + state=TaskState.submitted, + message=context.message, + timestamp=datetime.now(timezone.utc).isoformat(), + ), + context_id=context.context_id, + final=False, + ) + ) + + # Handle the request and publish updates to the event queue + runner = await self._resolve_runner() + try: + await self._handle_request(context, event_queue, runner) + except Exception as e: + logger.error("Error handling A2A request: %s", e, exc_info=True) + # Publish failure event + try: + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + status=TaskStatus( + state=TaskState.failed, + timestamp=datetime.now(timezone.utc).isoformat(), + message=Message( + message_id=str(uuid.uuid4()), + role=Role.agent, + parts=[TextPart(text=str(e))], + ), + ), + context_id=context.context_id, + final=True, + ) + ) + except Exception as enqueue_error: + logger.error("Failed to publish failure event: %s", enqueue_error, exc_info=True) + finally: + await runner.close() + + async def _handle_request( + self, + context: RequestContext, + event_queue: EventQueue, + runner: Runner, + ): + # Convert the a2a request to ADK run args + run_args = convert_a2a_request_to_adk_run_args(context) + + # ensure the session exists + session = await self._prepare_session(context, run_args, runner) + + # create invocation context + invocation_context = runner._new_invocation_context( + session=session, + new_message=run_args["new_message"], + run_config=run_args["run_config"], + ) + + # publish the task working event + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + status=TaskStatus( + state=TaskState.working, + timestamp=datetime.now(timezone.utc).isoformat(), + ), + context_id=context.context_id, + final=False, + metadata={ + _get_adk_metadata_key("app_name"): runner.app_name, + _get_adk_metadata_key("user_id"): run_args["user_id"], + _get_adk_metadata_key("session_id"): run_args["session_id"], + }, + ) + ) + + task_result_aggregator = TaskResultAggregator() + async for adk_event in runner.run_async(**run_args): + for a2a_event in convert_event_to_a2a_events( + adk_event, invocation_context, context.task_id, context.context_id + ): + task_result_aggregator.process_event(a2a_event) + await event_queue.enqueue_event(a2a_event) + + # publish the task result event - this is final + if ( + task_result_aggregator.task_state == TaskState.working + and task_result_aggregator.task_status_message is not None + and task_result_aggregator.task_status_message.parts + ): + # if task is still working properly, publish the artifact update event as + # the final result according to a2a protocol. + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + last_chunk=True, + context_id=context.context_id, + artifact=Artifact( + artifact_id=str(uuid.uuid4()), + parts=task_result_aggregator.task_status_message.parts, + ), + ) + ) + # public the final status update event + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + status=TaskStatus( + state=TaskState.completed, + timestamp=datetime.now(timezone.utc).isoformat(), + ), + context_id=context.context_id, + final=True, + ) + ) + else: + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + status=TaskStatus( + state=task_result_aggregator.task_state, + timestamp=datetime.now(timezone.utc).isoformat(), + message=task_result_aggregator.task_status_message, + ), + context_id=context.context_id, + final=True, + ) + ) + + async def _prepare_session(self, context: RequestContext, run_args: dict[str, Any], runner: Runner): + session_id = run_args["session_id"] + # create a new session if not exists + user_id = run_args["user_id"] + session = await runner.session_service.get_session( + app_name=runner.app_name, + user_id=user_id, + session_id=session_id, + ) + if session is None: + session = await runner.session_service.create_session( + app_name=runner.app_name, + user_id=user_id, + state={}, + session_id=session_id, + ) + # Update run_args with the new session_id + run_args["session_id"] = session.id + + return session diff --git a/python/src/adk/kagent_session_service.py b/python/packages/kagent-adk/src/kagent_adk/_session_service.py similarity index 69% rename from python/src/adk/kagent_session_service.py rename to python/packages/kagent-adk/src/kagent_adk/_session_service.py index 0ab6ec4d0..ef82b598f 100644 --- a/python/src/adk/kagent_session_service.py +++ b/python/packages/kagent-adk/src/kagent_adk/_session_service.py @@ -14,19 +14,19 @@ logger = logging.getLogger("kagent." + __name__) -class KagentSessionService(BaseSessionService): +class KAgentSessionService(BaseSessionService): """A session service implementation that uses the Kagent API. This service integrates with the Kagent server to manage session state and persistence through HTTP API calls. """ - def __init__(self, base_url: str): + def __init__(self, client: httpx.AsyncClient): super().__init__() - self.client = httpx.AsyncClient(base_url=base_url.rstrip("/")) + self.client = client async def _get_user_id(self) -> str: """Get the default user ID. Override this method to implement custom user ID logic.""" - return "default-user" + return "admin@kagent.dev" @override async def create_session( @@ -43,7 +43,7 @@ async def create_session( "agent_ref": app_name, # Use app_name as agent reference } if session_id: - request_data["name"] = session_id + request_data["id"] = session_id # Make API call to create session response = await self.client.post( @@ -72,24 +72,49 @@ async def get_session( config: Optional[GetSessionConfig] = None, ) -> Optional[Session]: try: + url = f"/api/sessions/{session_id}?user_id={user_id}" + if config: + if config.after_timestamp: + # TODO: implement + # url += f"&after={config.after_timestamp}" + pass + if config.num_recent_events: + url += f"&limit={config.num_recent_events}" + else: + url += "&limit=-1" + else: + # return all + url += "&limit=-1" + # Make API call to get session - response = await self.client.get( - f"/api/sessions/{session_id}?user_id={user_id}", + response: httpx.Response = await self.client.get( + url, headers={"X-User-ID": user_id}, ) + if response.status_code == 404: + return None response.raise_for_status() data = response.json() if not data.get("data"): return None - session_data = data["data"] + if not data.get("data").get("session"): + return None + session_data = data["data"]["session"] + + events_data = data["data"]["events"] + + events: list[Event] = [] + for event_data in events_data: + events.append(Event.model_validate_json(event_data["data"])) # Convert to ADK Session format return Session( id=session_data["id"], user_id=session_data["user_id"], - app_name="todo", + events=events, + app_name=app_name, state={}, # TODO: restore State ) except httpx.HTTPStatusError as e: @@ -100,7 +125,7 @@ async def get_session( @override async def list_sessions(self, *, app_name: str, user_id: str) -> ListSessionsResponse: # Make API call to list sessions - response = await self.client.get("/api/sessions", headers={"X-User-ID": user_id}) + response = await self.client.get(f"/api/sessions?user_id={user_id}", headers={"X-User-ID": user_id}) response.raise_for_status() data = response.json() @@ -121,7 +146,7 @@ def list_sessions_sync(self, *, app_name: str, user_id: str) -> ListSessionsResp async def delete_session(self, *, app_name: str, user_id: str, session_id: str) -> None: # Make API call to delete session response = await self.client.delete( - f"/api/sessions/{session_id}", + f"/api/sessions/{session_id}?user_id={user_id}", headers={"X-User-ID": user_id}, ) response.raise_for_status() @@ -130,9 +155,8 @@ async def delete_session(self, *, app_name: str, user_id: str, session_id: str) async def append_event(self, session: Session, event: Event) -> Event: # Convert ADK Event to JSON format event_data = { - "type": event.__class__.__name__, - "data": (event.model_dump() if hasattr(event, "model_dump") else event.__dict__), - "task_id": event.invocation_id, + "id": event.id, + "data": event.model_dump_json(), } # Make API call to append event to session @@ -143,4 +167,9 @@ async def append_event(self, session: Session, event: Event) -> Event: ) response.raise_for_status() + # TODO: potentially pull and update the session from the server + # Update the in-memory session. + session.last_update_time = event.timestamp + await super().append_event(session=session, event=event) + return event diff --git a/python/packages/kagent-adk/src/kagent_adk/_task_store.py b/python/packages/kagent-adk/src/kagent_adk/_task_store.py new file mode 100644 index 000000000..a42244693 --- /dev/null +++ b/python/packages/kagent-adk/src/kagent_adk/_task_store.py @@ -0,0 +1,30 @@ +from typing import override + +import httpx +from a2a.server.tasks import TaskStore +from a2a.types import Task + + +class KAgentTaskStore(TaskStore): + client: httpx.AsyncClient + + def __init__(self, client: httpx.AsyncClient): + self.client = client + + @override + async def save(self, task: Task) -> None: + response = await self.client.post("/api/tasks", json=task.model_dump()) + response.raise_for_status() + + @override + async def get(self, task_id: str) -> Task | None: + response = await self.client.get(f"/api/tasks/{task_id}") + if response.status_code == 404: + return None + response.raise_for_status() + return Task.model_validate(response.json()) + + @override + async def delete(self, task_id: str) -> None: + response = await self.client.delete(f"/api/tasks/{task_id}") + response.raise_for_status() diff --git a/python/packages/kagent-adk/src/kagent_adk/a2a.py b/python/packages/kagent-adk/src/kagent_adk/a2a.py new file mode 100644 index 000000000..695e2bbc7 --- /dev/null +++ b/python/packages/kagent-adk/src/kagent_adk/a2a.py @@ -0,0 +1,179 @@ +#! /usr/bin/env python3 +import faulthandler +import inspect +import logging +import os +import sys +from contextlib import asynccontextmanager +from typing import Awaitable, Callable, override + +import httpx +from a2a.auth.user import User +from a2a.server.agent_execution import RequestContext, SimpleRequestContextBuilder +from a2a.server.apps import A2AStarletteApplication +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import TaskStore +from a2a.types import AgentCard, MessageSendParams, Task +from fastapi import FastAPI, Request +from fastapi.responses import PlainTextResponse +from google.adk.agents import BaseAgent +from google.adk.runners import Runner +from google.adk.sessions import InMemorySessionService +from google.genai import types + +from ._agent_executor import A2aAgentExecutor +from ._session_service import KAgentSessionService +from ._task_store import KAgentTaskStore + +# --- Constants --- +USER_ID = "admin@kagent.dev" + +# --- Configure Logging --- +logger = logging.getLogger(__name__) + + +class KAgentUser(User): + def __init__(self, user_id: str): + self.user_id = user_id + + @property + def is_authenticated(self) -> bool: + return False + + @property + def user_name(self) -> str: + return self.user_id + + +class KAgentRequestContextBuilder(SimpleRequestContextBuilder): + """ + A request context builder that will be used to hack in the user_id for now. + """ + + def __init__(self, user_id: str, task_store: TaskStore): + super().__init__(task_store=task_store) + self.user_id = user_id + + async def build( + self, + params: MessageSendParams | None = None, + task_id: str | None = None, + context_id: str | None = None, + task: Task | None = None, + context: ServerCallContext | None = None, + ) -> RequestContext: + if not context: + context = ServerCallContext(user=KAgentUser(user_id=self.user_id)) + else: + context.user = KAgentUser(user_id=self.user_id) + request_context = await super().build(params, task_id, context_id, task, context) + return request_context + + +def health_check(request: Request) -> PlainTextResponse: + return PlainTextResponse("OK") + + +def thread_dump(request: Request) -> PlainTextResponse: + import io + + buf = io.StringIO() + faulthandler.dump_traceback(file=buf) + buf.seek(0) + return PlainTextResponse(buf.read()) + + +kagent_url_override = os.getenv("KAGENT_URL") + + +class KAgentApp: + def __init__( + self, + root_agent: BaseAgent, + agent_card: AgentCard, + kagent_url: str, + app_name: str, + ): + self.root_agent = root_agent + self.kagent_url = kagent_url + self.app_name = app_name + self.agent_card = agent_card + + def build(self) -> FastAPI: + http_client = httpx.AsyncClient(base_url=kagent_url_override or self.kagent_url) + session_service = KAgentSessionService(http_client) + + def create_runner() -> Runner: + return Runner( + agent=self.root_agent, + app_name=self.app_name, + session_service=session_service, + ) + + agent_executor = A2aAgentExecutor( + runner=create_runner, + ) + + kagent_task_store = KAgentTaskStore(http_client) + + request_context_builder = KAgentRequestContextBuilder(user_id=USER_ID, task_store=kagent_task_store) + request_handler = DefaultRequestHandler( + agent_executor=agent_executor, + task_store=kagent_task_store, + request_context_builder=request_context_builder, + ) + + a2a_app = A2AStarletteApplication( + agent_card=self.agent_card, + http_handler=request_handler, + ) + + faulthandler.enable() + app = FastAPI() + + # Health check/readiness probe + app.add_route("/health", methods=["GET"], route=health_check) + app.add_route("/thread_dump", methods=["GET"], route=thread_dump) + a2a_app.add_routes_to_app(app) + + return app + + async def test(self, task: str): + session_service = InMemorySessionService() + SESSION_ID = "12345" + USER_ID = "admin" + await session_service.create_session( + app_name=self.app_name, + session_id=SESSION_ID, + user_id=USER_ID, + ) + if isinstance(self.root_agent, Callable): + agent_factory = self.root_agent + root_agent = agent_factory() + else: + root_agent = self.root_agent + + runner = Runner( + agent=root_agent, + app_name=self.app_name, + session_service=session_service, + ) + + logger.info(f"\n>>> User Query: {task}") + + # Prepare the user's message in ADK format + content = types.Content(role="user", parts=[types.Part(text=task)]) + # Key Concept: run_async executes the agent logic and yields Events. + # We iterate through events to find the final answer. + async for event in runner.run_async( + user_id=USER_ID, + session_id=SESSION_ID, + new_message=content, + ): + # You can uncomment the line below to see *all* events during execution + # print(f" [Event] Author: {event.author}, Type: {type(event).__name__}, Final: {event.is_final_response()}, Content: {event.content}") + + # Key Concept: is_final_response() marks the concluding message for the turn. + jsn = event.model_dump_json() + logger.info(f" [Event] {jsn}") diff --git a/python/packages/kagent-adk/src/kagent_adk/models.py b/python/packages/kagent-adk/src/kagent_adk/models.py new file mode 100644 index 000000000..533b427c0 --- /dev/null +++ b/python/packages/kagent-adk/src/kagent_adk/models.py @@ -0,0 +1,110 @@ +import logging +from typing import Literal, Self, Union + +from a2a.types import AgentCard +from google.adk.agents import Agent +from google.adk.agents.llm_agent import ToolUnion +from google.adk.agents.run_config import RunConfig, StreamingMode +from google.adk.models.anthropic_llm import Claude as ClaudeLLM +from google.adk.models.google_llm import Gemini as GeminiLLM +from google.adk.models.lite_llm import LiteLlm +from google.adk.tools.agent_tool import AgentTool +from google.adk.tools.mcp_tool import MCPToolset, SseConnectionParams, StreamableHTTPConnectionParams +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) + + +class HttpMcpServerConfig(BaseModel): + params: StreamableHTTPConnectionParams + tools: list[str] = Field(default_factory=list) + + +class SseMcpServerConfig(BaseModel): + params: SseConnectionParams + tools: list[str] = Field(default_factory=list) + + +class BaseLLM(BaseModel): + model: str + + +class OpenAI(BaseLLM): + base_url: str | None = None + + type: Literal["openai"] + + +class AzureOpenAI(BaseLLM): + type: Literal["azure_openai"] + + +class Anthropic(BaseLLM): + base_url: str | None = None + + type: Literal["anthropic"] + + +class GeminiVertexAI(BaseLLM): + type: Literal["gemini_vertex_ai"] + + +class GeminiAnthropic(BaseLLM): + type: Literal["gemini_anthropic"] + + +class Ollama(BaseLLM): + type: Literal["ollama"] + + +class Gemini(BaseLLM): + type: Literal["gemini"] + + +class AgentConfig(BaseModel): + kagent_url: str # The URL of the KAgent server + agent_card: AgentCard + name: str + model: Union[OpenAI, Anthropic, GeminiVertexAI, GeminiAnthropic, Ollama, AzureOpenAI, Gemini] = Field( + discriminator="type" + ) + description: str + instruction: str + http_tools: list[HttpMcpServerConfig] | None = None # tools, always MCP + sse_tools: list[SseMcpServerConfig] | None = None # tools, always MCP + agents: list[Self] | None = None # agent names + + def to_agent(self) -> Agent: + mcp_toolsets: list[ToolUnion] = [] + if self.http_tools: + for http_tool in self.http_tools: # add http tools + mcp_toolsets.append(MCPToolset(connection_params=http_tool.params, tool_filter=http_tool.tools)) + if self.sse_tools: + for sse_tool in self.sse_tools: # add stdio tools + mcp_toolsets.append(MCPToolset(connection_params=sse_tool.params, tool_filter=sse_tool.tools)) + if self.agents: + for agent in self.agents: # Add sub agents as tools + mcp_toolsets.append(AgentTool(agent.to_agent())) + if self.model.type == "openai": + model = LiteLlm(model=f"openai/{self.model.model}", base_url=self.model.base_url) + elif self.model.type == "anthropic": + model = LiteLlm(model=f"anthropic/{self.model.model}", base_url=self.model.base_url) + elif self.model.type == "gemini_vertex_ai": + model = GeminiLLM(model=self.model.model) + elif self.model.type == "gemini_anthropic": + model = ClaudeLLM(model=self.model.model) + elif self.model.type == "ollama": + model = LiteLlm(model=f"ollama_chat/{self.model.model}") + elif self.model.type == "azure_openai": + model = LiteLlm(model=f"azure/{self.model.model}") + elif self.model.type == "gemini": + model = self.model.model + else: + raise ValueError(f"Invalid model type: {self.model.type}") + return Agent( + name=self.name, + model=model, + description=self.description, + instruction=self.instruction, + tools=mcp_toolsets, + ) diff --git a/python/packages/kagent/.python-version b/python/packages/kagent/.python-version new file mode 100644 index 000000000..86f8c02eb --- /dev/null +++ b/python/packages/kagent/.python-version @@ -0,0 +1 @@ +3.13.5 diff --git a/python/src/autogenstudio/utils/__init__.py b/python/packages/kagent/README.md similarity index 100% rename from python/src/autogenstudio/utils/__init__.py rename to python/packages/kagent/README.md diff --git a/python/packages/kagent/pyproject.toml b/python/packages/kagent/pyproject.toml new file mode 100644 index 000000000..4e31cb986 --- /dev/null +++ b/python/packages/kagent/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.metadata] +allow-direct-references = true + +[project] +name = "kagent" +version = "0.3.0" +description = "kagent is a tool for building and deploying agent-based applications." +readme = "README.md" +requires-python = ">=3.12.11" +dependencies = [ + "typer>=0.15.0", + "protobuf>=6.31.1", + "opentelemetry-api>=1.32.0", + "opentelemetry-sdk>=1.32.0", + "opentelemetry-exporter-otlp-proto-grpc>=1.32.0", + "opentelemetry-instrumentation-openai>= 0.39.0", + "opentelemetry-instrumentation-httpx >= 0.52.0", + "fastapi>=0.115.1", + "aiofiles>=24.1.0", + "kagent-adk>=0.3.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/kagent"] + +[project.scripts] +kagent = "kagent.cli:run" \ No newline at end of file diff --git a/python/packages/kagent/src/kagent/cli.py b/python/packages/kagent/src/kagent/cli.py new file mode 100644 index 000000000..0fef09fb1 --- /dev/null +++ b/python/packages/kagent/src/kagent/cli.py @@ -0,0 +1,84 @@ +import asyncio +import json +import logging +import os +from typing import Annotated + +import aiofiles +import typer +import uvicorn +from kagent_adk import AgentConfig, KAgentApp +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor +from opentelemetry.instrumentation.openai import OpenAIInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +logger = logging.getLogger(__name__) + +app = typer.Typer() + + +@app.command() +def static( + host: str = "127.0.0.1", + port: int = 8080, + workers: int = 1, + filepath: str = "/config/config.json", + reload: Annotated[bool, typer.Option("--reload")] = False, +): + tracing_enabled = os.getenv("OTEL_TRACING_ENABLED", "false").lower() == "true" + if tracing_enabled: + logging.info("Enabling tracing") + tracer_provider = TracerProvider(resource=Resource({"service.name": "kagent"})) + processor = BatchSpanProcessor(OTLPSpanExporter()) + tracer_provider.add_span_processor(processor) + trace.set_tracer_provider(tracer_provider) + HTTPXClientInstrumentor().instrument() + OpenAIInstrumentor().instrument() + + with open(filepath, "r") as f: + config = json.load(f) + agent_config = AgentConfig.model_validate(config) + root_agent = agent_config.to_agent() + + app = KAgentApp(root_agent, agent_config.agent_card, agent_config.kagent_url, agent_config.name) + + uvicorn.run( + app.build, + host=host, + port=port, + workers=workers, + reload=reload, + ) + + +async def test_agent(filepath: str, task: str): + async with aiofiles.open(filepath, "r") as f: + content = await f.read() + config = json.loads(content) + agent_config = AgentConfig.model_validate(config) + agent = agent_config.to_agent() + + app = KAgentApp(agent, agent_config.agent_card, agent_config.kagent_url, agent_config.name) + await app.test(task) + + +@app.command() +def test( + task: Annotated[str, typer.Option("--task", help="The task to test the agent with")], + filepath: Annotated[str, typer.Option("--filepath", help="The path to the agent config file")], +): + asyncio.run(test_agent(filepath, task)) + + +def run(): + logging.basicConfig(level=logging.INFO) + logging.info("Starting KAgent") + app() + + +if __name__ == "__main__": + run() diff --git a/python/pyproject.toml b/python/pyproject.toml index 3cffdd114..698cb71c8 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,96 +1,16 @@ -[build-system] -requires = [ - "setuptools>=67.0.0", - "wheel" -] -build-backend = "setuptools.build_meta" - -[project] -name = "kagent" -version = "0.3.0" -description = "kagent is a tool for building and deploying agent-based applications." -readme = "README.md" -requires-python = ">=3.12.11" -dependencies = [ - "autogen-core @ git+https://github.com/Microsoft/autogen@c5b893d3f814185c326c8ff95767d2375d95818d#subdirectory=python/packages/autogen-core", - "autogen-agentchat @ git+https://github.com/Microsoft/autogen@c5b893d3f814185c326c8ff95767d2375d95818d#subdirectory=python/packages/autogen-agentchat", - "autogen-ext[anthropic,azure,mcp,ollama,openai] @ git+https://github.com/Microsoft/autogen@c5b893d3f814185c326c8ff95767d2375d95818d#subdirectory=python/packages/autogen-ext", - "openai>=1.72.0", - "tiktoken==0.8.0", - "python-dotenv>=1.1.0", - "pyyaml>=6.0.2", - "mcp>=1.8.1", - "sqlite-vec>=0.1.7a2", - "typer>=0.15.0", - "onnxruntime>=1.21.1", - "numpy>=2.2.4", - "opentelemetry-api>=1.32.0", - "opentelemetry-sdk>=1.32.0", - "opentelemetry-exporter-otlp-proto-grpc>=1.32.0", - "opentelemetry-instrumentation-openai>= 0.39.0", - "opentelemetry-instrumentation-httpx >= 0.52.0", - "anthropic[vertex]>=0.49.0", - "pinecone>=6.0.2", - "loguru>=0.7.3", - "sqlalchemy>=2.0.40", - "sqlmodel>=0.0.24", - "alembic>=1.11.0", - "fastapi>=0.103.1", - "click>=8.0.6", - "pandas>=2.2.3", - "html2text>=2025.4.15", - "bs4>=0.0.2", - "magika>=0.6.2", - "markitdown>=0.1.1", - "websockets>=11.0", - "litellm>=1.74.3", - "google-adk>=1.6.1", - "google-genai>=1.18.0", - "google-auth>=2.40.2", - "h11>=0.16.0", - "protobuf >= 5.29.5", - # Additional dependencies that were missing during build - "aiofiles>=24.1.0", - "httpx>=0.25.0", - "pydantic>=2.5.0", - "typing-extensions>=4.8.0", - "jsonref>=1.1.0", - "pillow>=10.0.0", - "ollama>=0.3.0", - "azure-ai-inference>=1.0.0b9", - "azure-ai-projects>=1.0.0b11", - "azure-core>=1.30.0", - "azure-identity>=1.15.0", - "azure-search-documents>=11.4.0", - "azure-storage-blob>=12.19.0", - "isodate>=0.6.1", - "anyio>=4.0.0", - "sniffio>=1.3.0", - "distro>=1.8.0", - "jiter>=0.4.0", - "idna>=3.4", - "a2a-sdk>=0.2.12", -] +[tool.uv.workspace] +members = ["packages/*"] -[project.optional-dependencies] -jupyter-executor = [ - "ipykernel>=6.29.5", - "nbclient>=0.10.2", -] -test = [ +[dependency-groups] +dev = [ "pytest>=8.3.5", "pytest-asyncio>=0.25.3", -] -lint = [ "ruff>=0.11.5", ] -[project.scripts] -kagent-engine = "kagent.cli:run" -tool_gen = "kagent.tools.utils.tool_gen:main" - [tool.uv.sources] kagent = { workspace = true } +kagent-adk = { workspace = true } [tool.ruff] line-length = 120 @@ -109,7 +29,7 @@ select = ["E", "F", "W", "B", "Q", "I", "ASYNC", "T20"] ignore = ["F401", "E501", "B008", "ASYNC109"] [tool.setuptools.packages.find] -where = ["src"] +where = ["packages"] [tool.setuptools.package-data] "*" = ["*.json", "*.yaml", "*.yml", "*.txt", "*.md"] diff --git a/python/src/adk/kagent_task_store.py b/python/src/adk/kagent_task_store.py deleted file mode 100644 index 070ca9846..000000000 --- a/python/src/adk/kagent_task_store.py +++ /dev/null @@ -1,20 +0,0 @@ -import httpx -from a2a.server.tasks import TaskStore -from a2a.types import Task - - -class KAgentTaskStore(TaskStore): - client: httpx.AsyncClient - - def __init__(self, base_url: str): - self.client = httpx.AsyncClient(base_url=base_url) - - async def save(self, task: Task) -> None: - await self.client.post("/tasks", json=task.model_dump()) - - async def get(self, task_id: str) -> Task | None: - response = await self.client.get(f"/tasks/{task_id}") - return Task.model_validate(response.json()) - - async def delete(self, task_id: str) -> None: - await self.client.delete(f"/tasks/{task_id}") diff --git a/python/src/autogenstudio/__init__.py b/python/src/autogenstudio/__init__.py deleted file mode 100644 index 534108254..000000000 --- a/python/src/autogenstudio/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .teammanager import TeamManager -from .version import __version__ - -__all__ = ["TeamManager", "__version__"] diff --git a/python/src/autogenstudio/cli.py b/python/src/autogenstudio/cli.py deleted file mode 100644 index fc708a5be..000000000 --- a/python/src/autogenstudio/cli.py +++ /dev/null @@ -1,108 +0,0 @@ -import os -import warnings -from typing import Optional - -import typer -import uvicorn -from typing_extensions import Annotated - -from .version import VERSION - -app = typer.Typer() - -# Ignore deprecation warnings from websockets -warnings.filterwarnings("ignore", message="websockets.legacy is deprecated*") -warnings.filterwarnings("ignore", message="websockets.server.WebSocketServerProtocol is deprecated*") - - -def get_env_file_path(): - app_dir = os.path.join(os.path.expanduser("~"), ".autogenstudio") - if not os.path.exists(app_dir): - os.makedirs(app_dir, exist_ok=True) - return os.path.join(app_dir, "temp_env_vars.env") - - -@app.command() -def ui( - host: str = "127.0.0.1", - port: int = 8081, - workers: int = 1, - reload: Annotated[bool, typer.Option("--reload")] = False, -): - """ - Run the AutoGen Studio UI. - - Args: - host (str, optional): Host to run the UI on. Defaults to 127.0.0.1 (localhost). - port (int, optional): Port to run the UI on. Defaults to 8081. - workers (int, optional): Number of workers to run the UI with. Defaults to 1. - reload (bool, optional): Whether to reload the UI on code changes. Defaults to False. - docs (bool, optional): Whether to generate API docs. Defaults to False. - appdir (str, optional): Path to the AutoGen Studio app directory. Defaults to None. - database_uri (str, optional): Database URI to connect to. Defaults to None. - auth_config (str, optional): Path to authentication configuration YAML. Defaults to None. - upgrade_database (bool, optional): Whether to upgrade the database. Defaults to False. - """ - - uvicorn.run( - "autogenstudio.web.app:app", - host=host, - port=port, - workers=workers, - reload=reload, - ) - - -@app.command() -def serve( - team: str = "", - host: str = "127.0.0.1", - port: int = 8084, - workers: int = 1, - reload: Annotated[bool, typer.Option("--reload")] = False, - docs: bool = False, -): - """ - Serve an API Endpoint based on an AutoGen Studio workflow json file. - - Args: - team (str): Path to the team json file. - host (str, optional): Host to run the UI on. Defaults to 127.0.0.1 (localhost). - port (int, optional): Port to run the UI on. Defaults to 8084 - workers (int, optional): Number of workers to run the UI with. Defaults to 1. - reload (bool, optional): Whether to reload the UI on code changes. Defaults to False. - docs (bool, optional): Whether to generate API docs. Defaults to False. - - """ - - os.environ["AUTOGENSTUDIO_API_DOCS"] = str(docs) - os.environ["AUTOGENSTUDIO_TEAM_FILE"] = team - - # validate the team file - if not os.path.exists(team): - raise ValueError(f"Team file not found: {team}") - - uvicorn.run( - "autogenstudio.web.serve:app", - host=host, - port=port, - workers=workers, - reload=reload, - ) - - -@app.command() -def version(): - """ - Print the version of the AutoGen Studio UI CLI. - """ - - typer.echo(f"AutoGen Studio CLI version: {VERSION}") - - -def run(): - app() - - -if __name__ == "__main__": - app() diff --git a/python/src/autogenstudio/datamodel/__init__.py b/python/src/autogenstudio/datamodel/__init__.py deleted file mode 100644 index c93e1d494..000000000 --- a/python/src/autogenstudio/datamodel/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .types import ( - LLMCallEventMessage, - MessageConfig, - Response, - TeamResult, -) - -__all__ = [ - "MessageConfig", - "TeamResult", - "Response", - "LLMCallEventMessage", -] diff --git a/python/src/autogenstudio/datamodel/eval.py b/python/src/autogenstudio/datamodel/eval.py deleted file mode 100644 index 4a1c3ec7d..000000000 --- a/python/src/autogenstudio/datamodel/eval.py +++ /dev/null @@ -1,82 +0,0 @@ -# datamodel/eval.py -from datetime import datetime -from enum import Enum -from typing import Any, Dict, List, Optional, Sequence -from uuid import UUID, uuid4 - -from autogen_agentchat.base import TaskResult -from autogen_core import Image -from pydantic import BaseModel -from sqlmodel import Field - - -class EvalTask(BaseModel): - """Definition of a task to be evaluated.""" - - task_id: UUID | str = Field(default_factory=uuid4) - input: str | Sequence[str | Image] - name: str = "" - description: str = "" - expected_outputs: Optional[List[Any]] = None - metadata: Dict[str, Any] = {} - - -class EvalRunResult(BaseModel): - """Result of an evaluation run.""" - - result: TaskResult | None = None - status: bool = False - start_time: Optional[datetime] = Field(default=datetime.now()) - end_time: Optional[datetime] = None - error: Optional[str] = None - - -class EvalDimensionScore(BaseModel): - """Score for a single evaluation dimension.""" - - dimension: str - score: float - reason: str - max_value: float - min_value: float - - -class EvalScore(BaseModel): - """Composite score from evaluation.""" - - overall_score: Optional[float] = None - dimension_scores: List[EvalDimensionScore] = [] - reason: Optional[str] = None - max_value: float = 10.0 - min_value: float = 0.0 - metadata: Dict[str, Any] = {} - - -class EvalJudgeCriteria(BaseModel): - """Criteria for judging evaluation results.""" - - dimension: str - prompt: str - max_value: float = 10.0 - min_value: float = 0.0 - metadata: Dict[str, Any] = {} - - -class EvalRunStatus(str, Enum): - """Status of an evaluation run.""" - - PENDING = "pending" - RUNNING = "running" - COMPLETED = "completed" - FAILED = "failed" - CANCELED = "canceled" - - -class EvalResult(BaseModel): - """Result of an evaluation run.""" - - task_id: UUID | str - # runner_id: UUID | str - status: EvalRunStatus = EvalRunStatus.PENDING - start_time: Optional[datetime] = Field(default=datetime.now()) - end_time: Optional[datetime] = None diff --git a/python/src/autogenstudio/datamodel/types.py b/python/src/autogenstudio/datamodel/types.py deleted file mode 100644 index f010908d6..000000000 --- a/python/src/autogenstudio/datamodel/types.py +++ /dev/null @@ -1,46 +0,0 @@ -# from dataclasses import Field -from datetime import datetime -from typing import Any, Dict, List, Literal, Optional, Sequence - -from autogen_agentchat.base import TaskResult -from autogen_agentchat.messages import ChatMessage, TextMessage -from autogen_core import ComponentModel -from autogen_core.models import UserMessage -from autogen_ext.models.openai import OpenAIChatCompletionClient -from pydantic import BaseModel, ConfigDict, SecretStr - - -class MessageConfig(BaseModel): - source: str - content: str | ChatMessage | Sequence[ChatMessage] | None - message_type: Optional[str] = "text" - - -class TeamResult(BaseModel): - task_result: TaskResult - usage: str - duration: float - - -class LLMCallEventMessage(TextMessage): - source: str = "llm_call_event" - - def to_text(self) -> str: - return self.content - - def to_model_text(self) -> str: - return self.content - - def to_model_message(self) -> UserMessage: - raise NotImplementedError("This message type is not supported.") - - type: Literal["LLMCallEventMessage"] = "LLMCallEventMessage" - - -# web request/response data models - - -class Response(BaseModel): - message: str - status: bool - data: Optional[Any] = None diff --git a/python/src/autogenstudio/eval/judges.py b/python/src/autogenstudio/eval/judges.py deleted file mode 100644 index e98b800a8..000000000 --- a/python/src/autogenstudio/eval/judges.py +++ /dev/null @@ -1,267 +0,0 @@ -import asyncio -from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional, Tuple - -from autogen_core import CancellationToken, Component, ComponentBase -from autogen_core.models import ChatCompletionClient, UserMessage -from loguru import logger -from pydantic import BaseModel -from typing_extensions import Self - -from ..datamodel.eval import EvalDimensionScore, EvalJudgeCriteria, EvalRunResult, EvalScore, EvalTask - - -class BaseEvalJudgeConfig(BaseModel): - """Base configuration for evaluation judges.""" - - name: str = "Base Judge" - description: str = "" - metadata: Dict[str, Any] = {} - - -class BaseEvalJudge(ABC, ComponentBase[BaseEvalJudgeConfig]): - """Abstract base class for evaluation judges.""" - - component_type = "eval_judge" - - def __init__(self, name: str = "Base Judge", description: str = "", metadata: Optional[Dict[str, Any]] = None): - self.name = name - self.description = description - self.metadata = metadata or {} - - @abstractmethod - async def judge( - self, - task: EvalTask, - result: EvalRunResult, - criteria: List[EvalJudgeCriteria], - cancellation_token: Optional[CancellationToken] = None, - ) -> EvalScore: - """Judge the result of an evaluation run.""" - pass - - def _to_config(self) -> BaseEvalJudgeConfig: - """Convert the judge configuration to a configuration object for serialization.""" - return BaseEvalJudgeConfig(name=self.name, description=self.description, metadata=self.metadata) - - -class LLMEvalJudgeConfig(BaseEvalJudgeConfig): - """Configuration for LLMEvalJudge.""" - - model_client: Any # ComponentModel - - -class LLMEvalJudge(BaseEvalJudge, Component[LLMEvalJudgeConfig]): - """Judge that uses an LLM to evaluate results.""" - - component_config_schema = LLMEvalJudgeConfig - component_type = "eval_judge" - component_provider_override = "autogenstudio.eval.judges.LLMEvalJudge" - - def __init__( - self, - model_client: ChatCompletionClient, - name: str = "LLM Judge", - description: str = "Evaluates results using an LLM", - metadata: Optional[Dict[str, Any]] = None, - ): - super().__init__(name, description, metadata) - self.model_client = model_client - - async def judge( - self, - task: EvalTask, - result: EvalRunResult, - criteria: List[EvalJudgeCriteria], - cancellation_token: Optional[CancellationToken] = None, - ) -> EvalScore: - """Judge the result using an LLM.""" - # Create a score object - score = EvalScore(max_value=10.0) - - # Judge each dimension in parallel - dimension_score_tasks = [] - for criterion in criteria: - dimension_score_tasks.append(self._judge_dimension(task, result, criterion, cancellation_token)) - - dimension_scores = await asyncio.gather(*dimension_score_tasks) - score.dimension_scores = dimension_scores - - # Calculate overall score (average of dimension scores) - valid_scores = [ds.score for ds in dimension_scores if ds.score is not None] - if valid_scores: - score.overall_score = sum(valid_scores) / len(valid_scores) - - return score - - async def _judge_dimension( - self, - task: EvalTask, - result: EvalRunResult, - criterion: EvalJudgeCriteria, - cancellation_token: Optional[CancellationToken] = None, - ) -> EvalDimensionScore: - """Judge a specific dimension.""" - # Format task and result for the LLM - task_description = self._format_task(task) - result_description = result.model_dump() - - # Create the prompt - prompt = f""" - You are evaluating the quality of a system response to a task. - Task: {task_description}Response: {result_description} - Evaluation criteria: {criterion.dimension} - {criterion.prompt} - Score the response on a scale from {criterion.min_value} to {criterion.max_value}. - First, provide a detailed explanation of your evaluation. - Then, give your final score as a single number between 0 and {criterion.max_value}. - Format your answer should be a json for the EvalDimensionScore class: - {{ - "dimension": "{criterion.dimension}", - "reason": "", - "score": - }} - Please ensure the score is a number between {criterion.min_value} and {criterion.max_value}. - If you cannot evaluate the response, please return a score of null. - If the response is not relevant, please return a score of 0. - If the response is perfect, please return a score of {criterion.max_value}. - If the response is not relevant, please return a score of 0. - If the response is perfect, please return a score of {criterion.max_value}. - """ - - # Get judgment from LLM - model_input = [] - text_message = UserMessage(content=prompt, source="user") - model_input.append(text_message) - - # Run with the model client in the same format as used in runners - model_result = await self.model_client.create( - messages=model_input, - cancellation_token=cancellation_token, - json_output=EvalDimensionScore, - ) - - # Extract content from the response - model_response = model_result.content if isinstance(model_result.content, str) else str(model_result.content) - - try: - # validate response string as EvalDimensionScore - model_response = EvalDimensionScore.model_validate_json(model_response) - return model_response - except Exception as e: - logger.warning(f"Failed to parse LLM response: {e}", model_result.content) - return EvalDimensionScore( - dimension=criterion.dimension, - reason="Failed to parse response", - score=0.0, - max_value=criterion.max_value, - min_value=criterion.min_value, - ) - - def _format_task(self, task: EvalTask) -> str: - """Format the task for the LLM.""" - task_parts = [] - - if task.description: - task_parts.append(task.description) - if isinstance(task.input, str): - task_parts.append(task.input) - elif isinstance(task.input, list): - task_parts.append("\n".join(str(x) for x in task.input if isinstance(x, str))) - - return "\n".join(task_parts) - - def _parse_judgment(self, judgment_text: str, max_value: float) -> Tuple[str, Optional[float]]: - """Parse judgment text to extract explanation and score.""" - explanation = "" - score = None - - # Simple parsing - could be improved with regex - lines = judgment_text.split("\n") - for line in lines: - if line.strip().lower().startswith("explanation:"): - explanation = line.split(":", 1)[1].strip() - elif line.strip().lower().startswith("score:"): - try: - score_str = line.split(":", 1)[1].strip() - score = float(score_str) - # Ensure score is within bounds - score = min(max(score, 0), max_value) - except (ValueError, IndexError): - pass - - return explanation, score - - def _to_config(self) -> LLMEvalJudgeConfig: - """Convert to configuration object including model client configuration.""" - base_config = super()._to_config() - return LLMEvalJudgeConfig( - name=base_config.name, - description=base_config.description, - metadata=base_config.metadata, - model_client=self.model_client.dump_component(), - ) - - @classmethod - def _from_config(cls, config: LLMEvalJudgeConfig) -> Self: - """Create from configuration object with serialized model client.""" - model_client = ChatCompletionClient.load_component(config.model_client) - return cls( - model_client=model_client, name=config.name, description=config.description, metadata=config.metadata - ) - - -# # Usage example -# async def example_usage(): -# # Create a model client -# from autogen_ext.models import OpenAIChatCompletionClient - -# model_client = OpenAIChatCompletionClient( -# model="gpt-4", -# api_key="your-api-key" -# ) - -# # Create a judge -# llm_judge = LLMEvalJudge(model_client=model_client) - -# # Serialize the judge to a ComponentModel -# judge_config = llm_judge.dump_component() -# print(f"Serialized judge: {judge_config}") - -# # Deserialize back to a LLMEvalJudge -# deserialized_judge = LLMEvalJudge.load_component(judge_config) - -# # Create criteria for evaluation -# criteria = [ -# EvalJudgeCriteria( -# dimension="relevance", -# prompt="Evaluate how relevant the response is to the query.", -# min_value=0, -# max_value=10 -# ), -# EvalJudgeCriteria( -# dimension="accuracy", -# prompt="Evaluate the factual accuracy of the response.", -# min_value=0, -# max_value=10 -# ) -# ] - -# # Create a mock task and result -# task = EvalTask( -# id="task-123", -# name="Sample Task", -# description="A sample task for evaluation", -# input="What is the capital of France?" -# ) - -# result = EvalRunResult( -# status=True, -# result={ -# "messages": [{"content": "The capital of France is Paris.", "source": "model"}] -# } -# ) - -# # Run the evaluation -# score = await deserialized_judge.judge(task, result, criteria) -# print(f"Evaluation score: {score}") diff --git a/python/src/autogenstudio/eval/orchestrator.py b/python/src/autogenstudio/eval/orchestrator.py deleted file mode 100644 index ec5c71124..000000000 --- a/python/src/autogenstudio/eval/orchestrator.py +++ /dev/null @@ -1,789 +0,0 @@ -import asyncio -import uuid -from datetime import datetime -from pdb import run -from typing import Any, Dict, List, Optional, TypedDict, Union - -from loguru import logger -from pydantic import BaseModel - -from ..database.db_manager import DatabaseManager -from ..datamodel.db import EvalCriteriaDB, EvalRunDB, EvalTaskDB -from ..datamodel.eval import EvalJudgeCriteria, EvalRunResult, EvalRunStatus, EvalScore, EvalTask -from .judges import BaseEvalJudge -from .runners import BaseEvalRunner - - -class DimensionScore(TypedDict): - score: Optional[float] - reason: Optional[str] - - -class RunEntry(TypedDict): - id: str - name: str - task_name: str - runner_type: str - overall_score: Optional[float] - scores: List[Optional[float]] - reasons: Optional[List[Optional[str]]] - - -class TabulatedResults(TypedDict): - dimensions: List[str] - runs: List[RunEntry] - - -class EvalOrchestrator: - """ - Orchestrator for evaluation runs. - - This class manages the lifecycle of evaluation tasks, criteria, and runs. - It can operate with or without a database manager for persistence. - """ - - def __init__(self, db_manager: Optional[DatabaseManager] = None): - """ - Initialize the orchestrator. - - Args: - db_manager: Optional database manager for persistence. - If None, data is stored in memory only. - """ - self._db_manager = db_manager - - # In-memory storage (used when db_manager is None) - self._tasks: Dict[str, EvalTask] = {} - self._criteria: Dict[str, EvalJudgeCriteria] = {} - self._runs: Dict[str, Dict[str, Any]] = {} - - # Active runs tracking - self._active_runs: Dict[str, asyncio.Task] = {} - - # ----- Task Management ----- - - async def create_task(self, task: EvalTask) -> str: - """ - Create a new evaluation task. - - Args: - task: The evaluation task to create - - Returns: - Task ID - """ - if not task.task_id: - task.task_id = str(uuid.uuid4()) - - if self._db_manager: - # Store in database - task_db = EvalTaskDB(name=task.name, description=task.description, config=task) - response = self._db_manager.upsert(task_db) - if not response.status: - logger.error(f"Failed to store task: {response.message}") - raise RuntimeError(f"Failed to store task: {response.message}") - task_id = str(response.data.get("id")) if response.data else str(task.task_id) - else: - # Store in memory - task_id = str(task.task_id) - self._tasks[task_id] = task - - return task_id - - async def get_task(self, task_id: str) -> Optional[EvalTask]: - """ - Retrieve an evaluation task by ID. - - Args: - task_id: The ID of the task to retrieve - - Returns: - The task if found, None otherwise - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get(EvalTaskDB, filters={"id": int(task_id) if task_id.isdigit() else task_id}) - - if response.status and response.data and len(response.data) > 0: - task_data = response.data[0] - return ( - task_data.get("config") - if isinstance(task_data.get("config"), EvalTask) - else EvalTask.model_validate(task_data.get("config")) - ) - else: - # Retrieve from memory - return self._tasks.get(task_id) - - return None - - async def list_tasks(self) -> List[EvalTask]: - """ - List all available evaluation tasks. - - Returns: - List of evaluation tasks - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get(EvalTaskDB) - - tasks = [] - if response.status and response.data: - for task_data in response.data: - config = task_data.get("config") - if config: - if isinstance(config, EvalTask): - tasks.append(config) - else: - tasks.append(EvalTask.model_validate(config)) - return tasks - else: - # Retrieve from memory - return list(self._tasks.values()) - - # ----- Criteria Management ----- - - async def create_criteria(self, criteria: EvalJudgeCriteria) -> str: - """ - Create new evaluation criteria. - - Args: - criteria: The evaluation criteria to create - - Returns: - Criteria ID - """ - criteria_id = str(uuid.uuid4()) - - if self._db_manager: - # Store in database - criteria_db = EvalCriteriaDB(name=criteria.dimension, description=criteria.prompt, config=criteria) - response = self._db_manager.upsert(criteria_db) - if not response.status: - logger.error(f"Failed to store criteria: {response.message}") - raise RuntimeError(f"Failed to store criteria: {response.message}") - criteria_id = str(response.data.get("id")) if response.data else criteria_id - else: - # Store in memory - self._criteria[criteria_id] = criteria - - return criteria_id - - async def get_criteria(self, criteria_id: str) -> Optional[EvalJudgeCriteria]: - """ - Retrieve evaluation criteria by ID. - - Args: - criteria_id: The ID of the criteria to retrieve - - Returns: - The criteria if found, None otherwise - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get( - EvalCriteriaDB, filters={"id": int(criteria_id) if criteria_id.isdigit() else criteria_id} - ) - - if response.status and response.data and len(response.data) > 0: - criteria_data = response.data[0] - return ( - criteria_data.get("config") - if isinstance(criteria_data.get("config"), EvalJudgeCriteria) - else EvalJudgeCriteria.model_validate(criteria_data.get("config")) - ) - else: - # Retrieve from memory - return self._criteria.get(criteria_id) - - return None - - async def list_criteria(self) -> List[EvalJudgeCriteria]: - """ - List all available evaluation criteria. - - Returns: - List of evaluation criteria - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get(EvalCriteriaDB) - - criteria_list = [] - if response.status and response.data: - for criteria_data in response.data: - config = criteria_data.get("config") - if config: - if isinstance(config, EvalJudgeCriteria): - criteria_list.append(config) - else: - criteria_list.append(EvalJudgeCriteria.model_validate(config)) - return criteria_list - else: - # Retrieve from memory - return list(self._criteria.values()) - - # ----- Run Management ----- - - async def create_run( - self, - task: Union[str, EvalTask], - runner: BaseEvalRunner, - judge: BaseEvalJudge, - criteria: List[Union[str, EvalJudgeCriteria]], - name: str = "", - description: str = "", - ) -> str: - """ - Create a new evaluation run configuration. - - Args: - task: The task to evaluate (ID or task object) - runner: The runner to use for evaluation - judge: The judge to use for evaluation - criteria: List of criteria to use for evaluation (IDs or criteria objects) - name: Name for the run - description: Description for the run - - Returns: - Run ID - """ - # Resolve task - task_obj = None - if isinstance(task, str): - task_obj = await self.get_task(task) - if not task_obj: - raise ValueError(f"Task not found: {task}") - else: - task_obj = task - - # Resolve criteria - criteria_objs = [] - for criterion in criteria: - if isinstance(criterion, str): - criterion_obj = await self.get_criteria(criterion) - if not criterion_obj: - raise ValueError(f"Criteria not found: {criterion}") - criteria_objs.append(criterion_obj) - else: - criteria_objs.append(criterion) - - # Generate run ID - run_id = str(uuid.uuid4()) - - # Create run configuration - runner_config = runner.dump_component() if hasattr(runner, "dump_component") else runner._to_config() - judge_config = judge.dump_component() if hasattr(judge, "dump_component") else judge._to_config() - - if self._db_manager: - # Store in database - run_db = EvalRunDB( - name=name or f"Run {run_id}", - description=description, - task_id=int(task) if isinstance(task, str) and task.isdigit() else None, - runner_config=runner_config.model_dump(), - judge_config=judge_config.model_dump(), - criteria_configs=criteria_objs, - status=EvalRunStatus.PENDING, - ) - response = self._db_manager.upsert(run_db) - if not response.status: - logger.error(f"Failed to store run: {response.message}") - raise RuntimeError(f"Failed to store run: {response.message}") - run_id = str(response.data.get("id")) if response.data else run_id - else: - # Store in memory - self._runs[run_id] = { - "task": task_obj, - "runner_config": runner_config, - "judge_config": judge_config, - "criteria_configs": [c.model_dump() for c in criteria_objs], - "status": EvalRunStatus.PENDING, - "created_at": datetime.now(), - "run_result": None, - "score_result": None, - "name": name or f"Run {run_id}", - "description": description, - } - - return run_id - - async def start_run(self, run_id: str) -> None: - """ - Start an evaluation run. - - Args: - run_id: The ID of the run to start - """ - # Check if run is already active - if run_id in self._active_runs: - logger.warning(f"Run {run_id} is already active") - return - - # Start the run asynchronously - run_task = asyncio.create_task(self._execute_run(run_id)) - self._active_runs[run_id] = run_task - - # Update run status - await self._update_run_status(run_id, EvalRunStatus.RUNNING) - - async def _execute_run(self, run_id: str) -> None: - """ - Execute an evaluation run. - - Args: - run_id: The ID of the run to execute - """ - try: - # Get run configuration - run_config = await self._get_run_config(run_id) - if not run_config: - raise ValueError(f"Run not found: {run_id}") - - # Get task - task = run_config.get("task") - if not task: - raise ValueError(f"Task not found for run: {run_id}") - - # Initialize runner - runner_config = run_config.get("runner_config") - runner = BaseEvalRunner.load_component(runner_config) if runner_config else None - - # Initialize judge - judge_config = run_config.get("judge_config") - judge = BaseEvalJudge.load_component(judge_config) if judge_config else None - - if not runner or not judge: - raise ValueError(f"Runner or judge not found for run: {run_id}") - - # Initialize criteria - criteria_configs = run_config.get("criteria_configs") - criteria = [] - if criteria_configs: - criteria = [ - EvalJudgeCriteria.model_validate(c) if not isinstance(c, EvalJudgeCriteria) else c - for c in criteria_configs - ] - - # Execute runner - logger.info(f"Starting runner for run {run_id}") - start_time = datetime.now() - run_result = await runner.run(task) - - # Update run result - await self._update_run_result(run_id, run_result) - - if not run_result.status: - logger.error(f"Runner failed for run {run_id}: {run_result.error}") - await self._update_run_status(run_id, EvalRunStatus.FAILED) - return - - # Execute judge - logger.info(f"Starting judge for run {run_id}") - score_result = await judge.judge(task, run_result, criteria) - - # Update score result - await self._update_score_result(run_id, score_result) - - # Update run status - end_time = datetime.now() - await self._update_run_completed(run_id, start_time, end_time) - - logger.info(f"Run {run_id} completed successfully") - - except Exception as e: - logger.exception(f"Error executing run {run_id}: {str(e)}") - await self._update_run_error(run_id, str(e)) - finally: - # Remove from active runs - if run_id in self._active_runs: - del self._active_runs[run_id] - - async def get_run_status(self, run_id: str) -> Optional[EvalRunStatus]: - """ - Get the status of an evaluation run. - - Args: - run_id: The ID of the run - - Returns: - The run status if found, None otherwise - """ - run_config = await self._get_run_config(run_id) - return run_config.get("status") if run_config else None - - async def get_run_result(self, run_id: str) -> Optional[EvalRunResult]: - """ - Get the result of an evaluation run. - - Args: - run_id: The ID of the run - - Returns: - The run result if found, None otherwise - """ - run_config = await self._get_run_config(run_id) - if not run_config: - return None - - run_result = run_config.get("run_result") - if not run_result: - return None - - return run_result if isinstance(run_result, EvalRunResult) else EvalRunResult.model_validate(run_result) - - async def get_run_score(self, run_id: str) -> Optional[EvalScore]: - """ - Get the score of an evaluation run. - - Args: - run_id: The ID of the run - - Returns: - The run score if found, None otherwise - """ - run_config = await self._get_run_config(run_id) - if not run_config: - return None - - score_result = run_config.get("score_result") - if not score_result: - return None - - return score_result if isinstance(score_result, EvalScore) else EvalScore.model_validate(score_result) - - async def list_runs(self) -> List[Dict[str, Any]]: - """ - List all available evaluation runs. - - Returns: - List of run configurations - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get(EvalRunDB) - - runs = [] - if response.status and response.data: - for run_data in response.data: - runs.append( - { - "id": run_data.get("id"), - "name": run_data.get("name"), - "status": run_data.get("status"), - "created_at": run_data.get("created_at"), - "updated_at": run_data.get("updated_at"), - } - ) - return runs - else: - # Retrieve from memory - return [ - { - "id": run_id, - "name": run_config.get("name"), - "status": run_config.get("status"), - "created_at": run_config.get("created_at"), - "updated_at": run_config.get("updated_at", run_config.get("created_at")), - } - for run_id, run_config in self._runs.items() - ] - - async def cancel_run(self, run_id: str) -> bool: - """ - Cancel an active evaluation run. - - Args: - run_id: The ID of the run to cancel - - Returns: - True if the run was cancelled, False otherwise - """ - # Check if run is active - if run_id not in self._active_runs: - logger.warning(f"Run {run_id} is not active") - return False - - # Cancel the run task - try: - self._active_runs[run_id].cancel() - await self._update_run_status(run_id, EvalRunStatus.CANCELED) - del self._active_runs[run_id] - return True - except Exception as e: - logger.error(f"Failed to cancel run {run_id}: {str(e)}") - return False - - # ----- Helper Methods ----- - - async def _get_run_config(self, run_id: str) -> Optional[Dict[str, Any]]: - """ - Get the configuration of an evaluation run. - - Args: - run_id: The ID of the run - - Returns: - The run configuration if found, None otherwise - """ - if self._db_manager: - # Retrieve from database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - - # Get task - task = None - if run_data.get("task_id"): - task_response = self._db_manager.get(EvalTaskDB, filters={"id": run_data.get("task_id")}) - if task_response.status and task_response.data and len(task_response.data) > 0: - task_data = task_response.data[0] - task = ( - task_data.get("config") - if isinstance(task_data.get("config"), EvalTask) - else EvalTask.model_validate(task_data.get("config")) - ) - - return { - "task": task, - "runner_config": run_data.get("runner_config"), - "judge_config": run_data.get("judge_config"), - "criteria_configs": run_data.get("criteria_configs"), - "status": run_data.get("status"), - "run_result": run_data.get("run_result"), - "score_result": run_data.get("score_result"), - "name": run_data.get("name"), - "description": run_data.get("description"), - "created_at": run_data.get("created_at"), - "updated_at": run_data.get("updated_at"), - } - else: - # Retrieve from memory - return self._runs.get(run_id) - - return None - - async def _update_run_status(self, run_id: str, status: EvalRunStatus) -> None: - """ - Update the status of an evaluation run. - - Args: - run_id: The ID of the run - status: The new status - """ - if self._db_manager: - # Update in database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - run_db = EvalRunDB.model_validate(run_data) - run_db.status = status - run_db.updated_at = datetime.now() - self._db_manager.upsert(run_db) - else: - # Update in memory - if run_id in self._runs: - self._runs[run_id]["status"] = status - self._runs[run_id]["updated_at"] = datetime.now() - - async def _update_run_result(self, run_id: str, run_result: EvalRunResult) -> None: - """ - Update the result of an evaluation run. - - Args: - run_id: The ID of the run - run_result: The run result - """ - if self._db_manager: - # Update in database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - run_db = EvalRunDB.model_validate(run_data) - run_db.run_result = run_result - run_db.updated_at = datetime.now() - self._db_manager.upsert(run_db) - else: - # Update in memory - if run_id in self._runs: - self._runs[run_id]["run_result"] = run_result - self._runs[run_id]["updated_at"] = datetime.now() - - async def _update_score_result(self, run_id: str, score_result: EvalScore) -> None: - """ - Update the score of an evaluation run. - - Args: - run_id: The ID of the run - score_result: The score result - """ - if self._db_manager: - # Update in database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - run_db = EvalRunDB.model_validate(run_data) - run_db.score_result = score_result - run_db.updated_at = datetime.now() - self._db_manager.upsert(run_db) - else: - # Update in memory - if run_id in self._runs: - self._runs[run_id]["score_result"] = score_result - self._runs[run_id]["updated_at"] = datetime.now() - - async def _update_run_completed(self, run_id: str, start_time: datetime, end_time: datetime) -> None: - """ - Update a run as completed. - - Args: - run_id: The ID of the run - start_time: The start time - end_time: The end time - """ - if self._db_manager: - # Update in database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - run_db = EvalRunDB.model_validate(run_data) - run_db.status = EvalRunStatus.COMPLETED - run_db.start_time = start_time - run_db.end_time = end_time - run_db.updated_at = datetime.now() - self._db_manager.upsert(run_db) - else: - # Update in memory - if run_id in self._runs: - self._runs[run_id]["status"] = EvalRunStatus.COMPLETED - self._runs[run_id]["start_time"] = start_time - self._runs[run_id]["end_time"] = end_time - self._runs[run_id]["updated_at"] = datetime.now() - - async def _update_run_error(self, run_id: str, error_message: str) -> None: - """ - Update a run with an error. - - Args: - run_id: The ID of the run - error_message: The error message - """ - if self._db_manager: - # Update in database - response = self._db_manager.get(EvalRunDB, filters={"id": int(run_id) if run_id.isdigit() else run_id}) - - if response.status and response.data and len(response.data) > 0: - run_data = response.data[0] - run_db = EvalRunDB.model_validate(run_data) - run_db.status = EvalRunStatus.FAILED - run_db.error_message = error_message - run_db.end_time = datetime.now() - run_db.updated_at = datetime.now() - self._db_manager.upsert(run_db) - else: - # Update in memory - if run_id in self._runs: - self._runs[run_id]["status"] = EvalRunStatus.FAILED - self._runs[run_id]["error_message"] = error_message - self._runs[run_id]["end_time"] = datetime.now() - self._runs[run_id]["updated_at"] = datetime.now() - - async def tabulate_results(self, run_ids: List[str], include_reasons: bool = False) -> TabulatedResults: - """ - Generate a tabular representation of evaluation results across runs. - - This method collects scores across different runs and organizes them by - dimension, making it easy to create visualizations like radar charts. - - Args: - run_ids: List of run IDs to include in the tabulation - include_reasons: Whether to include scoring reasons in the output - - Returns: - A dictionary with structured data suitable for visualization - """ - result: TabulatedResults = {"dimensions": [], "runs": []} - - # Parallelize fetching of run configs and scores - fetch_tasks = [] - for run_id in run_ids: - fetch_tasks.append(self._get_run_config(run_id)) - fetch_tasks.append(self.get_run_score(run_id)) - - # Wait for all fetches to complete - fetch_results = await asyncio.gather(*fetch_tasks) - - # Process fetched data - dimensions_set = set() - run_data = {} - - for i in range(0, len(fetch_results), 2): - run_id = run_ids[i // 2] - run_config = fetch_results[i] - score = fetch_results[i + 1] - - # Store run data for later processing - run_data[run_id] = (run_config, score) - - # Collect dimensions - if score and score.dimension_scores: - for dim_score in score.dimension_scores: - dimensions_set.add(dim_score.dimension) - - # Convert dimensions to sorted list - result["dimensions"] = sorted(list(dimensions_set)) - - # Process each run's data - for run_id, (run_config, score) in run_data.items(): - if not run_config or not score: - continue - - # Determine runner type - runner_type = "unknown" - if run_config.get("runner_config"): - runner_config = run_config.get("runner_config") - if runner_config is not None and "provider" in runner_config: - if "ModelEvalRunner" in runner_config["provider"]: - runner_type = "model" - elif "TeamEvalRunner" in runner_config["provider"]: - runner_type = "team" - - # Get task name - task = run_config.get("task") - task_name = task.name if task else "Unknown Task" - - # Create run entry - run_entry: RunEntry = { - "id": run_id, - "name": run_config.get("name", f"Run {run_id}"), - "task_name": task_name, - "runner_type": runner_type, - "overall_score": score.overall_score, - "scores": [], - "reasons": [] if include_reasons else None, - } - - # Build dimension lookup map for O(1) access - dim_map = {ds.dimension: ds for ds in score.dimension_scores} - - # Populate scores aligned with dimensions - for dim in result["dimensions"]: - dim_score = dim_map.get(dim) - if dim_score: - run_entry["scores"].append(dim_score.score) - if include_reasons: - run_entry["reasons"].append(dim_score.reason) # type: ignore - else: - run_entry["scores"].append(None) - if include_reasons: - run_entry["reasons"].append(None) # type: ignore - - result["runs"].append(run_entry) - - return result diff --git a/python/src/autogenstudio/eval/runners.py b/python/src/autogenstudio/eval/runners.py deleted file mode 100644 index bbb1f4813..000000000 --- a/python/src/autogenstudio/eval/runners.py +++ /dev/null @@ -1,201 +0,0 @@ -from abc import ABC, abstractmethod -from datetime import datetime -from typing import Any, Dict, Optional, Sequence, Type, Union - -from autogen_agentchat.base import TaskResult, Team -from autogen_agentchat.messages import ChatMessage, MultiModalMessage, TextMessage -from autogen_core import CancellationToken, Component, ComponentBase, ComponentModel, Image -from autogen_core.models import ChatCompletionClient, UserMessage -from pydantic import BaseModel -from typing_extensions import Self - -from ..datamodel.eval import EvalRunResult, EvalTask - - -class BaseEvalRunnerConfig(BaseModel): - """Base configuration for evaluation runners.""" - - name: str - description: str = "" - metadata: Dict[str, Any] = {} - - -class BaseEvalRunner(ABC, ComponentBase[BaseEvalRunnerConfig]): - """Base class for evaluation runners that defines the interface for running evaluations. - - This class provides the core interface that all evaluation runners must implement. - Subclasses should implement the run method to define how a specific evaluation is executed. - """ - - component_type = "eval_runner" - - def __init__(self, name: str, description: str = "", metadata: Optional[Dict[str, Any]] = None): - self.name = name - self.description = description - self.metadata = metadata or {} - - @abstractmethod - async def run(self, task: EvalTask, cancellation_token: Optional[CancellationToken] = None) -> EvalRunResult: - """Run the evaluation on the provided task and return a result. - - Args: - task: The task to evaluate - cancellation_token: Optional token to cancel the evaluation - - Returns: - EvaluationResult: The result of the evaluation - """ - pass - - def _to_config(self) -> BaseEvalRunnerConfig: - """Convert the runner configuration to a configuration object for serialization.""" - return BaseEvalRunnerConfig(name=self.name, description=self.description, metadata=self.metadata) - - -class ModelEvalRunnerConfig(BaseEvalRunnerConfig): - """Configuration for ModelEvalRunner.""" - - model_client: ComponentModel - - -class ModelEvalRunner(BaseEvalRunner, Component[ModelEvalRunnerConfig]): - """Evaluation runner that uses a single LLM to process tasks. - - This runner sends the task directly to a model client and returns the response. - """ - - component_config_schema = ModelEvalRunnerConfig - component_type = "eval_runner" - component_provider_override = "autogenstudio.eval.runners.ModelEvalRunner" - - def __init__( - self, - model_client: ChatCompletionClient, - name: str = "Model Runner", - description: str = "Evaluates tasks using a single LLM", - metadata: Optional[Dict[str, Any]] = None, - ): - super().__init__(name, description, metadata) - self.model_client = model_client - - async def run(self, task: EvalTask, cancellation_token: Optional[CancellationToken] = None) -> EvalRunResult: - """Run the task with the model client and return the result.""" - # Create initial result object - result = EvalRunResult() - - try: - model_input = [] - if isinstance(task.input, str): - text_message = UserMessage(content=task.input, source="user") - model_input.append(text_message) - elif isinstance(task.input, list): - message_content = [x for x in task.input] - model_input.append(UserMessage(content=message_content, source="user")) - # Run with the model - model_result = await self.model_client.create(messages=model_input, cancellation_token=cancellation_token) - - model_response = model_result.content if isinstance(model_result, str) else model_result.model_dump() - - task_result = TaskResult( - messages=[TextMessage(content=str(model_response), source="model")], - ) - result = EvalRunResult(result=task_result, status=True, start_time=datetime.now(), end_time=datetime.now()) - - except Exception as e: - result = EvalRunResult(status=False, error=str(e), end_time=datetime.now()) - - return result - - def _to_config(self) -> ModelEvalRunnerConfig: - """Convert to configuration object including model client configuration.""" - base_config = super()._to_config() - return ModelEvalRunnerConfig( - name=base_config.name, - description=base_config.description, - metadata=base_config.metadata, - model_client=self.model_client.dump_component(), - ) - - @classmethod - def _from_config(cls, config: ModelEvalRunnerConfig) -> Self: - """Create from configuration object with serialized model client.""" - model_client = ChatCompletionClient.load_component(config.model_client) - return cls( - name=config.name, - description=config.description, - metadata=config.metadata, - model_client=model_client, - ) - - -class TeamEvalRunnerConfig(BaseEvalRunnerConfig): - """Configuration for TeamEvalRunner.""" - - team: ComponentModel - - -class TeamEvalRunner(BaseEvalRunner, Component[TeamEvalRunnerConfig]): - """Evaluation runner that uses a team of agents to process tasks. - - This runner creates and runs a team based on a team configuration. - """ - - component_config_schema = TeamEvalRunnerConfig - component_type = "eval_runner" - component_provider_override = "autogenstudio.eval.runners.TeamEvalRunner" - - def __init__( - self, - team: Union[Team, ComponentModel], - name: str = "Team Runner", - description: str = "Evaluates tasks using a team of agents", - metadata: Optional[Dict[str, Any]] = None, - ): - super().__init__(name, description, metadata) - self._team = team if isinstance(team, Team) else Team.load_component(team) - - async def run(self, task: EvalTask, cancellation_token: Optional[CancellationToken] = None) -> EvalRunResult: - """Run the task with the team and return the result.""" - # Create initial result object - result = EvalRunResult() - - try: - team_task: Sequence[ChatMessage] = [] - if isinstance(task.input, str): - team_task.append(TextMessage(content=task.input, source="user")) - if isinstance(task.input, list): - for message in task.input: - if isinstance(message, str): - team_task.append(TextMessage(content=message, source="user")) - elif isinstance(message, Image): - team_task.append(MultiModalMessage(source="user", content=[message])) - - # Run task with team - team_result = await self._team.run(task=team_task, cancellation_token=cancellation_token) - - result = EvalRunResult(result=team_result, status=True, start_time=datetime.now(), end_time=datetime.now()) - - except Exception as e: - result = EvalRunResult(status=False, error=str(e), end_time=datetime.now()) - - return result - - def _to_config(self) -> TeamEvalRunnerConfig: - """Convert to configuration object including team configuration.""" - base_config = super()._to_config() - return TeamEvalRunnerConfig( - name=base_config.name, - description=base_config.description, - metadata=base_config.metadata, - team=self._team.dump_component(), - ) - - @classmethod - def _from_config(cls, config: TeamEvalRunnerConfig) -> Self: - """Create from configuration object with serialized team configuration.""" - return cls( - team=Team.load_component(config.team), - name=config.name, - description=config.description, - metadata=config.metadata, - ) diff --git a/python/src/autogenstudio/teammanager/__init__.py b/python/src/autogenstudio/teammanager/__init__.py deleted file mode 100644 index 7f202c739..000000000 --- a/python/src/autogenstudio/teammanager/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .teammanager import TeamManager - -__all__ = ["TeamManager"] diff --git a/python/src/autogenstudio/teammanager/teammanager.py b/python/src/autogenstudio/teammanager/teammanager.py deleted file mode 100644 index b0247b31d..000000000 --- a/python/src/autogenstudio/teammanager/teammanager.py +++ /dev/null @@ -1,182 +0,0 @@ -import asyncio -import json -import logging -import os -import time -from pathlib import Path -from typing import AsyncGenerator, Callable, List, Optional, Sequence, Union - -import aiofiles -import yaml -from autogen_agentchat.agents import UserProxyAgent -from autogen_agentchat.base import TaskResult -from autogen_agentchat.messages import BaseAgentEvent, BaseChatMessage -from autogen_agentchat.teams import BaseGroupChat -from autogen_core import EVENT_LOGGER_NAME, CancellationToken, ComponentModel -from autogen_core.logging import LLMCallEvent -from opentelemetry import trace - -from ..datamodel.types import LLMCallEventMessage, TeamResult -from ..web.managers.run_context import RunContext - -logger = logging.getLogger(__name__) - - -class RunEventLogger(logging.Handler): - """Event logger that queues LLMCallEvents for streaming""" - - def __init__(self): - super().__init__() - self.events = asyncio.Queue() - - def emit(self, record: logging.LogRecord): - if isinstance(record.msg, LLMCallEvent): - self.events.put_nowait(LLMCallEventMessage(content=str(record.msg))) - - -class TeamManager: - """Manages team operations including loading configs and running teams""" - - def __init__(self): - self._team: Optional[BaseGroupChat] = None - self._run_context = RunContext() - - @staticmethod - async def load_from_file(path: Union[str, Path]) -> dict: - """Load team configuration from JSON/YAML file""" - path = Path(path) - if not path.exists(): - raise FileNotFoundError(f"Config file not found: {path}") - - async with aiofiles.open(path) as f: - content = await f.read() - if path.suffix == ".json": - return json.loads(content) - elif path.suffix in (".yml", ".yaml"): - return yaml.safe_load(content) - raise ValueError(f"Unsupported file format: {path.suffix}") - - @staticmethod - async def load_from_directory(directory: Union[str, Path]) -> List[dict]: - """Load all team configurations from a directory""" - directory = Path(directory) - configs = [] - valid_extensions = {".json", ".yaml", ".yml"} - excluded_files = {"tools.json"} - - for path in directory.iterdir(): - if path.is_file() and path.suffix.lower() in valid_extensions and path.name.lower() not in excluded_files: - try: - config = await TeamManager.load_from_file(path) - configs.append(config) - except Exception as e: - logger.error(f"Failed to load {path}: {e}") - - return configs - - async def _create_team( - self, - team_config: Union[str, Path, dict, ComponentModel], - input_func: Optional[Callable] = None, - state: Optional[dict] = None, - ) -> BaseGroupChat: - """Create team instance from config""" - if isinstance(team_config, (str, Path)): - config = await self.load_from_file(team_config) - elif isinstance(team_config, dict): - config = team_config - else: - config = team_config.model_dump() - - self._team = BaseGroupChat.load_component(config) - - if state: - await self._team.load_state(state) - - for agent in self._team._participants: - if hasattr(agent, "input_func") and isinstance(agent, UserProxyAgent) and input_func: - agent.input_func = input_func - - return self._team - - async def run_stream( - self, - task: str | BaseChatMessage | Sequence[BaseChatMessage] | None, - team_config: Union[str, Path, dict, ComponentModel], - state: Optional[dict] = None, - input_func: Optional[Callable] = None, - cancellation_token: Optional[CancellationToken] = None, - attributes: Optional[dict] = None, - ) -> AsyncGenerator[Union[BaseAgentEvent | BaseChatMessage | LLMCallEvent, BaseChatMessage, TeamResult], None]: - """Stream team execution results""" - start_time = time.time() - team = None - - # Setup logger correctly - logger = logging.getLogger(EVENT_LOGGER_NAME) - logger.setLevel(logging.INFO) - llm_event_logger = RunEventLogger() - logger.handlers = [llm_event_logger] # Replace all handlers - - try: - team = await self._create_team(team_config, input_func, state) - tracer = trace.get_tracer("autogen-core") - with tracer.start_as_current_span("run_stream", attributes=attributes): - async for message in team.run_stream(task=task, cancellation_token=cancellation_token): - if cancellation_token and cancellation_token.is_cancelled(): - break - - if isinstance(message, TaskResult): - yield TeamResult(task_result=message, usage="", duration=time.time() - start_time) - else: - if hasattr(message, "metadata"): - timestamp = time.time() - message.metadata["duration"] = str(timestamp - start_time) - message.metadata["created_at"] = str(timestamp) - yield message - - # Check for any LLM events - while not llm_event_logger.events.empty(): - event = await llm_event_logger.events.get() - if hasattr(event, "metadata"): - timestamp = time.time() - event.metadata["duration"] = str(timestamp - start_time) - event.metadata["created_at"] = str(timestamp) - yield event - finally: - # Cleanup - remove our handler - if llm_event_logger in logger.handlers: - logger.handlers.remove(llm_event_logger) - - # Ensure cleanup happens - if team and hasattr(team, "_participants"): - for agent in team._participants: - if hasattr(agent, "close"): - await agent.close() - - async def run( - self, - task: str | BaseChatMessage | Sequence[BaseChatMessage] | None, - team_config: Union[str, Path, dict, ComponentModel], - state: Optional[dict] = None, - input_func: Optional[Callable] = None, - cancellation_token: Optional[CancellationToken] = None, - attributes: Optional[dict] = None, - ) -> TeamResult: - """Run team synchronously""" - start_time = time.time() - team = None - - try: - team = await self._create_team(team_config, input_func, state) - tracer = trace.get_tracer("autogen-core") - with tracer.start_as_current_span("run", attributes=attributes): - result = await team.run(task=task, cancellation_token=cancellation_token) - - return TeamResult(task_result=result, usage="", duration=time.time() - start_time) - - finally: - if team and hasattr(team, "_participants"): - for agent in team._participants: - if hasattr(agent, "close"): - await agent.close() diff --git a/python/src/autogenstudio/toolmanager/__init__.py b/python/src/autogenstudio/toolmanager/__init__.py deleted file mode 100644 index fc9f66f2f..000000000 --- a/python/src/autogenstudio/toolmanager/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .toolmanager import ToolManager - -__all__ = ["ToolManager"] diff --git a/python/src/autogenstudio/toolmanager/toolmanager.py b/python/src/autogenstudio/toolmanager/toolmanager.py deleted file mode 100644 index 703757865..000000000 --- a/python/src/autogenstudio/toolmanager/toolmanager.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -import logging -from pathlib import Path -from typing import Union - -import aiofiles -import yaml - -logger = logging.getLogger(__name__) - - -class ToolManager: - """Manages loading tool configs from file/folder to populate the DB.""" - - @staticmethod - async def load_from_file(path: Union[str, Path]) -> dict: - """Load tool configuration from JSON/YAML file""" - path = Path(path) - if not path.exists(): - raise FileNotFoundError(f"Config file not found: {path}") - - async with aiofiles.open(path) as f: - content = await f.read() - if path.suffix == ".json": - return json.loads(content) - elif path.suffix in (".yml", ".yaml"): - return yaml.safe_load(content) - raise ValueError(f"Unsupported file format: {path.suffix}") diff --git a/python/src/autogenstudio/toolservermanager/__init__.py b/python/src/autogenstudio/toolservermanager/__init__.py deleted file mode 100644 index 58a017531..000000000 --- a/python/src/autogenstudio/toolservermanager/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .toolserver_manager import ToolServerManager - -__all__ = ["ToolServerManager"] diff --git a/python/src/autogenstudio/toolservermanager/toolserver_manager.py b/python/src/autogenstudio/toolservermanager/toolserver_manager.py deleted file mode 100644 index 6d36f3d97..000000000 --- a/python/src/autogenstudio/toolservermanager/toolserver_manager.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Union - -from autogen_core import Component, ComponentModel - -from kagent.tool_servers import ToolServer - - -class ToolServerManager: - """ToolServerManager manages tool servers and tool discovery from those servers.""" - - async def _create_tool_server( - self, - tool_server_config: Union[dict, ComponentModel], - ) -> ToolServer: - """Create a tool server from the given configuration.""" - if not tool_server_config: - raise Exception("Tool server config is required") - - if isinstance(tool_server_config, dict): - config = tool_server_config - else: - config = tool_server_config.model_dump() - - try: - server = ToolServer.load_component(config) - return server - except Exception as e: - raise Exception(f"Failed to create tool server: {e}") from e - - async def discover_tools(self, tool_server_config: Union[dict, ComponentModel]) -> list[Component]: - """Discover tools from the given tool server.""" - try: - server = await self._create_tool_server(tool_server_config) - return await server.discover_tools() - except Exception as e: - raise Exception(f"Failed to discover tools: {e}") from e diff --git a/python/src/autogenstudio/utils/utils.py b/python/src/autogenstudio/utils/utils.py deleted file mode 100644 index d3b8e56cd..000000000 --- a/python/src/autogenstudio/utils/utils.py +++ /dev/null @@ -1,71 +0,0 @@ -import base64 -from typing import Sequence - -from autogen_agentchat.messages import ChatMessage, MultiModalMessage, TextMessage -from autogen_core import Image -from autogen_core.models import UserMessage -from loguru import logger - - -def construct_task(query: str, files: list[dict] | None = None) -> Sequence[ChatMessage]: - """ - Construct a task from a query string and list of files. - Returns a list of ChatMessage objects suitable for processing by the agent system. - - Args: - query: The text query from the user - files: List of file objects with properties name, content, and type - - Returns: - List of BaseChatMessage objects (TextMessage, MultiModalMessage) - """ - if files is None: - files = [] - - messages = [] - - # Add the user's text query as a TextMessage - if query: - messages.append(TextMessage(source="user", content=query)) - - # Process each file based on its type - for file in files: - try: - if file.get("type", "").startswith("image/"): - # Handle image file using from_base64 method - # The content is already base64 encoded according to the convertFilesToBase64 function - image = Image.from_base64(file["content"]) - messages.append( - MultiModalMessage( - source="user", content=[image], metadata={"filename": file.get("name", "unknown.img")} - ) - ) - elif file.get("type", "").startswith("text/"): - # Handle text file as TextMessage - text_content = base64.b64decode(file["content"]).decode("utf-8") - messages.append( - TextMessage( - source="user", content=text_content, metadata={"filename": file.get("name", "unknown.txt")} - ) - ) - else: - # Log unsupported file types but still try to process based on best guess - logger.warning(f"Potentially unsupported file type: {file.get('type')} for file {file.get('name')}") - if file.get("type", "").startswith("application/"): - # Try to treat as text if it's an application type (like JSON) - text_content = base64.b64decode(file["content"]).decode("utf-8") - messages.append( - TextMessage( - source="user", - content=text_content, - metadata={ - "filename": file.get("name", "unknown.file"), - "filetype": file.get("type", "unknown"), - }, - ) - ) - except Exception as e: - logger.error(f"Error processing file {file.get('name')}: {str(e)}") - # Continue processing other files even if one fails - - return messages diff --git a/python/src/autogenstudio/validation/__init__.py b/python/src/autogenstudio/validation/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/src/autogenstudio/validation/component_test_service.py b/python/src/autogenstudio/validation/component_test_service.py deleted file mode 100644 index 1b6b07b04..000000000 --- a/python/src/autogenstudio/validation/component_test_service.py +++ /dev/null @@ -1,191 +0,0 @@ -# api/validator/test_service.py -import asyncio -from typing import Any, Dict, List, Optional - -from autogen_core import ComponentModel -from autogen_core.models import ChatCompletionClient, UserMessage -from pydantic import BaseModel - - -class ComponentTestResult(BaseModel): - status: bool - message: str - data: Optional[Any] = None - logs: List[str] = [] - - -class ComponentTestRequest(BaseModel): - component: ComponentModel - model_client: Optional[Dict[str, Any]] = None - timeout: Optional[int] = 30 - - -class ComponentTestService: - @staticmethod - async def test_agent( - component: ComponentModel, model_client: Optional[ChatCompletionClient] = None - ) -> ComponentTestResult: - """Test an agent component with a simple message""" - try: - from autogen_agentchat.agents import AssistantAgent - from autogen_agentchat.messages import TextMessage - from autogen_core import CancellationToken - - # If model_client is provided, use it; otherwise, use the component's model (if applicable) - agent_config = component.config or {} - - # Try to load the agent - try: - # Construct the agent with the model client if provided - if model_client: - agent_config["model_client"] = model_client - - agent = AssistantAgent(name=agent_config.get("name", "assistant"), **agent_config) - - logs = ["Agent component loaded successfully"] - except Exception as e: - return ComponentTestResult( - status=False, - message=f"Failed to initialize agent: {str(e)}", - logs=[f"Agent initialization error: {str(e)}"], - ) - - # Test the agent with a simple message - test_question = "What is 2+2? Keep it brief." - try: - response = await agent.on_messages( - [TextMessage(content=test_question, source="user")], - cancellation_token=CancellationToken(), - ) - - # Check if we got a valid response - status = response and response.chat_message is not None - - if status: - logs.append( - f"Agent responded with: {response.chat_message.to_text()} to the question : {test_question}" - ) - else: - logs.append("Agent did not return a valid response") - - return ComponentTestResult( - status=status, - message="Agent test completed successfully" if status else "Agent test failed - no valid response", - data=response.chat_message.model_dump() if status else None, - logs=logs, - ) - except Exception as e: - return ComponentTestResult( - status=False, - message=f"Error during agent response: {str(e)}", - logs=logs + [f"Agent response error: {str(e)}"], - ) - - except Exception as e: - return ComponentTestResult( - status=False, message=f"Error testing agent component: {str(e)}", logs=[f"Exception: {str(e)}"] - ) - - @staticmethod - async def test_model( - component: ComponentModel, model_client: Optional[ChatCompletionClient] = None - ) -> ComponentTestResult: - """Test a model component with a simple prompt""" - try: - # Use the component itself as a model client - model = ChatCompletionClient.load_component(component) - - # Prepare a simple test message - test_question = "What is 2+2? Give me only the answer." - messages = [UserMessage(content=test_question, source="user")] - - # Try to get a response - response = await model.create(messages=messages) - - # Test passes if we got a response with content - status = response and response.content is not None - - logs = ["Model component loaded successfully"] - if status: - logs.append(f"Model responded with: {response.content} (Query:{test_question})") - else: - logs.append("Model did not return a valid response") - - return ComponentTestResult( - status=status, - message="Model test completed successfully" if status else "Model test failed - no valid response", - data=response.model_dump() if status else None, - logs=logs, - ) - except Exception as e: - return ComponentTestResult( - status=False, message=f"Error testing model component: {str(e)}", logs=[f"Exception: {str(e)}"] - ) - - @staticmethod - async def test_tool(component: ComponentModel) -> ComponentTestResult: - """Test a tool component with sample inputs""" - # Placeholder for tool test logic - return ComponentTestResult( - status=True, message="Tool test not yet implemented", logs=["Tool component loaded successfully"] - ) - - @staticmethod - async def test_team( - component: ComponentModel, model_client: Optional[ChatCompletionClient] = None - ) -> ComponentTestResult: - """Test a team component with a simple task""" - # Placeholder for team test logic - return ComponentTestResult( - status=True, message="Team test not yet implemented", logs=["Team component loaded successfully"] - ) - - @staticmethod - async def test_termination(component: ComponentModel) -> ComponentTestResult: - """Test a termination component with sample message history""" - # Placeholder for termination test logic - return ComponentTestResult( - status=True, - message="Termination test not yet implemented", - logs=["Termination component loaded successfully"], - ) - - @classmethod - async def test_component( - cls, - component: ComponentModel, - timeout: int = 60, - model_client: Optional[ChatCompletionClient] = None, # noqa: ASYNC109 - ) -> ComponentTestResult: - """Test a component based on its type with appropriate test inputs""" - try: - # Get component type - component_type = component.component_type - - # Select test method based on component type - test_method = { - "agent": cls.test_agent, - "model": cls.test_model, - "tool": cls.test_tool, - "team": cls.test_team, - "termination": cls.test_termination, - }.get(component_type or "unknown") - - if not test_method: - return ComponentTestResult(status=False, message=f"Unknown component type: {component_type}") - - # Determine if the test method accepts a model_client parameter - accepts_model_client = component_type in ["agent", "model", "team"] - - # Run test with timeout - try: - if accepts_model_client: - result = await asyncio.wait_for(test_method(component, model_client), timeout=timeout) - else: - result = await asyncio.wait_for(test_method(component), timeout=timeout) - return result - except asyncio.TimeoutError: - return ComponentTestResult(status=False, message=f"Component test exceeded the {timeout}s timeout") - - except Exception as e: - return ComponentTestResult(status=False, message=f"Error testing component: {str(e)}") diff --git a/python/src/autogenstudio/validation/validation_service.py b/python/src/autogenstudio/validation/validation_service.py deleted file mode 100644 index b4a8c60cb..000000000 --- a/python/src/autogenstudio/validation/validation_service.py +++ /dev/null @@ -1,165 +0,0 @@ -# validation/validation_service.py -import importlib -from calendar import c -from typing import Any, Dict, List, Optional - -from autogen_core import ComponentModel, is_component_class -from pydantic import BaseModel - - -class ValidationRequest(BaseModel): - component: ComponentModel - - -class ValidationError(BaseModel): - field: str - error: str - suggestion: Optional[str] = None - - -class ValidationResponse(BaseModel): - is_valid: bool - errors: List[ValidationError] = [] - warnings: List[ValidationError] = [] - - -class ValidationService: - @staticmethod - def validate_provider(provider: str) -> Optional[ValidationError]: - """Validate that the provider exists and can be imported""" - try: - if provider in ["azure_openai_chat_completion_client", "AzureOpenAIChatCompletionClient"]: - provider = "autogen_ext.models.openai.AzureOpenAIChatCompletionClient" - elif provider in ["openai_chat_completion_client", "OpenAIChatCompletionClient"]: - provider = "autogen_ext.models.openai.OpenAIChatCompletionClient" - - module_path, class_name = provider.rsplit(".", maxsplit=1) - module = importlib.import_module(module_path) - component_class = getattr(module, class_name) - - if not is_component_class(component_class): - return ValidationError( - field="provider", - error=f"Class {provider} is not a valid component class", - suggestion="Ensure the class inherits from Component and implements required methods", - ) - return None - except ImportError: - return ValidationError( - field="provider", - error=f"Could not import provider {provider}", - suggestion="Check that the provider module is installed and the path is correct", - ) - except Exception as e: - return ValidationError( - field="provider", - error=f"Error validating provider: {str(e)}", - suggestion="Check the provider string format and class implementation", - ) - - @staticmethod - def validate_component_type(component: ComponentModel) -> Optional[ValidationError]: - """Validate the component type""" - if not component.component_type: - return ValidationError( - field="component_type", - error="Component type is missing", - suggestion="Add a component_type field to the component configuration", - ) - - @staticmethod - def validate_config_schema(component: ComponentModel) -> List[ValidationError]: - """Validate the component configuration against its schema""" - errors = [] - try: - # Convert to ComponentModel for initial validation - model = component.model_copy(deep=True) - - # Get the component class - provider = model.provider - module_path, class_name = provider.rsplit(".", maxsplit=1) - module = importlib.import_module(module_path) - component_class = getattr(module, class_name) - - # Validate against component's schema - if hasattr(component_class, "component_config_schema"): - try: - component_class.component_config_schema.model_validate(model.config) - except Exception as e: - errors.append( - ValidationError( - field="config", - error=f"Config validation failed: {str(e)}", - suggestion="Check that the config matches the component's schema", - ) - ) - else: - errors.append( - ValidationError( - field="config", - error="Component class missing config schema", - suggestion="Implement component_config_schema in the component class", - ) - ) - except Exception as e: - errors.append( - ValidationError( - field="config", - error=f"Schema validation error: {str(e)}", - suggestion="Check the component configuration format", - ) - ) - return errors - - @staticmethod - def validate_instantiation(component: ComponentModel) -> Optional[ValidationError]: - """Validate that the component can be instantiated""" - try: - model = component.model_copy(deep=True) - # Attempt to load the component - module_path, class_name = model.provider.rsplit(".", maxsplit=1) - module = importlib.import_module(module_path) - component_class = getattr(module, class_name) - component_class.load_component(model) - return None - except Exception as e: - return ValidationError( - field="instantiation", - error=f"Failed to instantiate component: {str(e)}", - suggestion="Check that the component can be properly instantiated with the given config", - ) - - @classmethod - def validate(cls, component: ComponentModel) -> ValidationResponse: - """Validate a component configuration""" - errors = [] - warnings = [] - - # Check provider - if provider_error := cls.validate_provider(component.provider): - errors.append(provider_error) - - # Check component type - if type_error := cls.validate_component_type(component): - errors.append(type_error) - - # Validate schema - schema_errors = cls.validate_config_schema(component) - errors.extend(schema_errors) - - # Only attempt instantiation if no errors so far - if not errors: - if inst_error := cls.validate_instantiation(component): - errors.append(inst_error) - - # Check for version warnings - if not component.version: - warnings.append( - ValidationError( - field="version", - error="Component version not specified", - suggestion="Consider adding a version to ensure compatibility", - ) - ) - - return ValidationResponse(is_valid=len(errors) == 0, errors=errors, warnings=warnings) diff --git a/python/src/autogenstudio/version.py b/python/src/autogenstudio/version.py deleted file mode 100644 index 41da5f2f6..000000000 --- a/python/src/autogenstudio/version.py +++ /dev/null @@ -1,3 +0,0 @@ -VERSION = "0.4.2" -__version__ = VERSION -APP_NAME = "autogenstudio" diff --git a/python/src/autogenstudio/web/__init__.py b/python/src/autogenstudio/web/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/src/autogenstudio/web/app.py b/python/src/autogenstudio/web/app.py deleted file mode 100644 index 335c7e8a9..000000000 --- a/python/src/autogenstudio/web/app.py +++ /dev/null @@ -1,108 +0,0 @@ -# api/app.py -import os -from contextlib import asynccontextmanager -from typing import AsyncGenerator - -# import logging -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from fastapi.staticfiles import StaticFiles -from loguru import logger - -from ..version import VERSION -from .config import settings -from .routes import ( - invoke, - models, - tool_servers, - validation, -) - -# Initialize application - -# Create FastAPI application -app = FastAPI(debug=True) - -# Create API router with version and documentation -api = FastAPI( - root_path="/api", - title="AutoGen Studio API", - version=VERSION, - description="AutoGen Studio is a low-code tool for building and testing multi-agent workflows.", - docs_url="/docs" if settings.API_DOCS else None, -) - -# Include all routers with their prefixes -api.include_router( - validation.router, - prefix="/validate", - tags=["validation"], - responses={404: {"description": "Not found"}}, -) - -api.include_router( - tool_servers.router, - prefix="/toolservers", - tags=["tool servers"], - responses={404: {"description": "Not found"}}, -) - -api.include_router( - models.router, - prefix="/models", - tags=["models"], - responses={404: {"description": "Not found"}}, -) - -api.include_router( - invoke.router, - prefix="/invoke", - tags=["invoke"], - responses={404: {"description": "Not found"}}, -) - -# Version endpoint - - -@api.get("/version") -async def get_version(): - """Get API version""" - return { - "status": True, - "message": "Version retrieved successfully", - "data": {"version": VERSION}, - } - - -# Health check endpoint - - -@api.get("/health") -async def health_check(): - """API health check endpoint""" - return { - "status": True, - "message": "Service is healthy", - } - - -app.mount("/api", api) -# Error handlers - - -@app.exception_handler(500) -async def internal_error_handler(request, exc): - logger.error(f"Internal error: {str(exc)}") - return { - "status": False, - "message": "Internal server error", - "detail": "Internal server error", - } - - -def create_app() -> FastAPI: - """ - Factory function to create and configure the FastAPI application. - Useful for testing and different deployment scenarios. - """ - return app diff --git a/python/src/autogenstudio/web/config.py b/python/src/autogenstudio/web/config.py deleted file mode 100644 index 9408bdcd5..000000000 --- a/python/src/autogenstudio/web/config.py +++ /dev/null @@ -1,18 +0,0 @@ -# api/config.py - -from pydantic_settings import BaseSettings - - -class Settings(BaseSettings): - DATABASE_URI: str = "sqlite:///./autogen04202.db" - API_DOCS: bool = False - CLEANUP_INTERVAL: int = 300 # 5 minutes - SESSION_TIMEOUT: int = 3600 # 1 hour - CONFIG_DIR: str = "configs" # Default config directory relative to app_root - DEFAULT_USER_ID: str = "admin@kagent.dev" - UPGRADE_DATABASE: bool = False - - model_config = {"env_prefix": "AUTOGENSTUDIO_"} - - -settings = Settings() diff --git a/python/src/autogenstudio/web/managers/__init__.py b/python/src/autogenstudio/web/managers/__init__.py deleted file mode 100644 index 5fe553360..000000000 --- a/python/src/autogenstudio/web/managers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# from .connection import WebSocketManager diff --git a/python/src/autogenstudio/web/managers/run_context.py b/python/src/autogenstudio/web/managers/run_context.py deleted file mode 100644 index 08ad4f63a..000000000 --- a/python/src/autogenstudio/web/managers/run_context.py +++ /dev/null @@ -1,23 +0,0 @@ -from contextlib import contextmanager -from contextvars import ContextVar -from typing import Any, ClassVar, Generator - - -class RunContext: - RUN_CONTEXT_VAR: ClassVar[ContextVar] = ContextVar("RUN_CONTEXT_VAR") - - @classmethod - @contextmanager - def populate_context(cls, run_id) -> Generator[None, Any, None]: - token = RunContext.RUN_CONTEXT_VAR.set(run_id) - try: - yield - finally: - RunContext.RUN_CONTEXT_VAR.reset(token) - - @classmethod - def current_run_id(cls) -> str: - try: - return cls.RUN_CONTEXT_VAR.get() - except LookupError as e: - raise RuntimeError("Error getting run id") from e diff --git a/python/src/autogenstudio/web/routes/__init__.py b/python/src/autogenstudio/web/routes/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/src/autogenstudio/web/routes/invoke.py b/python/src/autogenstudio/web/routes/invoke.py deleted file mode 100644 index 1ec96c371..000000000 --- a/python/src/autogenstudio/web/routes/invoke.py +++ /dev/null @@ -1,173 +0,0 @@ -import json -import logging -from typing import Any, List, Sequence - -from autogen_agentchat.base import TaskResult -from autogen_agentchat.messages import ( - BaseChatMessage, - ChatMessage, - HandoffMessage, - MemoryQueryEvent, - MessageFactory, - ModelClientStreamingChunkEvent, - StopMessage, - TextMessage, - ToolCallExecutionEvent, - ToolCallRequestEvent, - ToolCallSummaryMessage, -) -from fastapi import APIRouter -from fastapi.responses import StreamingResponse -from pydantic import BaseModel - -from autogenstudio.datamodel import Response, TeamResult -from autogenstudio.datamodel.types import LLMCallEventMessage -from autogenstudio.teammanager import TeamManager - -router = APIRouter() -team_manager = TeamManager() -logger = logging.getLogger(__name__) - -message_factory = MessageFactory() - - -class InvokeTaskRequest(BaseModel): - task: str - team_config: dict - messages: List[dict] | None = None - - -@router.post("/") -async def invoke(request: InvokeTaskRequest): - response = Response(message="Task successfully completed", status=True, data=None) - try: - previous_messages = _convert_message_config_to_chat_message(request.messages or []) - task = _prepare_task_with_history(request.task, previous_messages) - result_message: TeamResult = await team_manager.run(task=task, team_config=request.team_config) - # remove the previous messages from the result messages - result_message.task_result.messages = result_message.task_result.messages[len(previous_messages) :] - formatted_result = format_team_result(result_message) - response.data = formatted_result - except Exception as e: - response.message = str(e) - response.status = False - return response - - -def format_team_result(team_result: TeamResult) -> dict: - """ - Format the result from TeamResult to a dictionary. - """ - formatted_result = { - "task_result": format_task_result(team_result.task_result), - "usage": team_result.usage, - "duration": team_result.duration, - } - return formatted_result - - -def format_task_result(task_result: TaskResult) -> dict: - """ - Format the result from TeamResult to a dictionary. - """ - formatted_result = { - "messages": [format_message(message) for message in task_result.messages], - "stop_reason": task_result.stop_reason, - } - return formatted_result - - -def format_message(message: Any) -> dict: - """Format message for sse transmission - - Args: - message: Message to format - - Returns: - Optional[dict]: Formatted message or None if formatting fails - """ - - try: - if isinstance( - message, - ( - ModelClientStreamingChunkEvent, - TextMessage, - StopMessage, - HandoffMessage, - ToolCallRequestEvent, - ToolCallExecutionEvent, - LLMCallEventMessage, - MemoryQueryEvent, - ToolCallSummaryMessage, - ), - ): - return message.model_dump(exclude={"created_at"}) - - elif isinstance(message, TeamResult): - return format_team_result(message) - - return {"type": "unknown", "data": f"received unknown message type {type(message)}"} - - except Exception as e: - logger.error(f"Message formatting error: {e}") - return {"type": "error", "data": str(e)} - - -@router.post("/stream") -async def stream(request: InvokeTaskRequest): - logger.info(f"Invoking task with streaming: {request.task}") - - async def event_generator(): - try: - previous_messages = _convert_message_config_to_chat_message(request.messages or []) - num_previous_messages = len(previous_messages) - task = _prepare_task_with_history(request.task, previous_messages) - async for event in team_manager.run_stream(task=task, team_config=request.team_config): - if num_previous_messages > 0: - num_previous_messages -= 1 - continue - if isinstance(event, TeamResult): - yield f"event: task_result\ndata: {json.dumps(format_message(event))}\n\n" - else: - yield f"event: event\ndata: {json.dumps(format_message(event))}\n\n" - except Exception as e: - logger.error(f"Error during SSE stream generation: {e}", exc_info=True) - error_payload = {"type": "error", "data": {"message": str(e), "details": type(e).__name__}} - try: - yield f"data: {json.dumps(error_payload)}\n\n" - except Exception as yield_err: # pylint: disable=broad-except - logger.error(f"Error yielding error message to client: {yield_err}", exc_info=True) - - return StreamingResponse(event_generator(), media_type="text/event-stream") - - -def _convert_message_config_to_chat_message(raw_messages: list[dict]) -> list[BaseChatMessage]: - """Convert MessageConfig to appropriate BaseChatMessage type using MessageFactory""" - - messages = [] - for message_config in raw_messages: - message = message_factory.create(message_config) - if isinstance(message, BaseChatMessage): - messages.append(message) - - return messages - - -def _prepare_task_with_history( - task: str | BaseChatMessage | Sequence[BaseChatMessage] | None, - previous_messages: Sequence[BaseChatMessage], -) -> str | BaseChatMessage | Sequence[BaseChatMessage] | None: - """Combine previous messages with current task for team execution""" - if not previous_messages: - return task - - # If we have previous messages, combine them with the current task - if isinstance(task, str): - return list(previous_messages) + [TextMessage(source="user", content=task)] - elif isinstance(task, ChatMessage): - return list(previous_messages) + [task] - elif isinstance(task, list): - return list(previous_messages) + list(task) - else: - return list(previous_messages) diff --git a/python/src/autogenstudio/web/routes/models.py b/python/src/autogenstudio/web/routes/models.py deleted file mode 100644 index 821577f22..000000000 --- a/python/src/autogenstudio/web/routes/models.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import List - -from autogen_ext.models.anthropic._model_info import _MODEL_INFO as anthropic_models -from autogen_ext.models.ollama._model_info import _MODEL_INFO as ollama_models -from autogen_ext.models.openai._model_info import _MODEL_INFO as openai_models -from autogen_ext.models.openai._model_info import _MODEL_POINTERS -from fastapi import APIRouter -from pydantic import BaseModel - -from kagent.models.vertexai._model_info import _MODEL_INFO as vertexai_models - -router = APIRouter() - - -class ModelInfo(BaseModel): - name: str - function_calling: bool - - -class ListModelsResponse(BaseModel): - anthropic: List[ModelInfo] - ollama: List[ModelInfo] - openAI: List[ModelInfo] - azureOpenAI: List[ModelInfo] - vertexAI: List[ModelInfo] - - -@router.get("/") -async def list_models() -> ListModelsResponse: - # Build Ollama models - response_ollama = [] - for model_name, model_data in ollama_models.items(): - response_ollama.append( - ModelInfo( - name=model_name, - function_calling=model_data["function_calling"], - ) - ) - - # Build Anthropic models - final_anthropic_models_map = {} - for model_name, model_data in anthropic_models.items(): - final_anthropic_models_map[model_name] = {"function_calling": model_data["function_calling"]} - - for short_name, long_name_target in _MODEL_POINTERS.items(): - if short_name.startswith("claude-"): - if long_name_target in anthropic_models: - properties = anthropic_models[long_name_target] - final_anthropic_models_map[short_name] = {"function_calling": properties["function_calling"]} - - response_anthropic = [ - ModelInfo(name=name, function_calling=props["function_calling"]) - for name, props in final_anthropic_models_map.items() - ] - - # Build OpenAI models - final_openai_models_map = {} - for model_name, model_data in openai_models.items(): - final_openai_models_map[model_name] = {"function_calling": model_data["function_calling"]} - - for short_name, long_name_target in _MODEL_POINTERS.items(): - if not short_name.startswith("claude-"): - if long_name_target in openai_models: - properties = openai_models[long_name_target] - final_openai_models_map[short_name] = {"function_calling": properties["function_calling"]} - - response_openai = [ - ModelInfo(name=name, function_calling=props["function_calling"]) - for name, props in final_openai_models_map.items() - ] - - # Build VertexAI models - response_vertexai = [] - for model_name, model_data in vertexai_models.items(): - response_vertexai.append( - ModelInfo( - name=model_name, - function_calling=model_data["function_calling"], - ) - ) - - return ListModelsResponse( - anthropic=response_anthropic, - ollama=response_ollama, - openAI=response_openai, - azureOpenAI=response_openai, - vertexAI=response_vertexai, - ) diff --git a/python/src/autogenstudio/web/routes/tool_servers.py b/python/src/autogenstudio/web/routes/tool_servers.py deleted file mode 100644 index de71c053c..000000000 --- a/python/src/autogenstudio/web/routes/tool_servers.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Dict, List - -from autogen_core import ( - ComponentModel, -) -from fastapi import APIRouter, HTTPException -from pydantic import BaseModel - -from ...toolservermanager import ToolServerManager - -router = APIRouter() - - -class GetServerToolsRequest(BaseModel): - server: ComponentModel - - -class NamedTool(BaseModel): - name: str - component: Dict - - -class GetServerToolsResponse(BaseModel): - tools: List[NamedTool] - - -@router.post("/") -async def get_server_tools( - request: GetServerToolsRequest, -) -> GetServerToolsResponse: - # First check if server exists - - tsm = ToolServerManager() - tools_dict: List[NamedTool] = [] - try: - tools = await tsm.discover_tools(request.server) - for tool in tools: - # Generate a unique identifier for the tool from its component - component_data = tool.dump_component().model_dump() - - # Check if the tool already exists based on id/name - component_config = component_data.get("config", {}) - tool_config = component_config.get("tool", {}) - tool_name = tool_config.get("name", None) - tools_dict.append(NamedTool(name=tool_name, component=component_data)) - - except Exception as e: - raise HTTPException(status_code=400, detail=f"Failed to get server tools: {str(e)}") from e - - return GetServerToolsResponse(tools=tools_dict) diff --git a/python/src/autogenstudio/web/routes/validation.py b/python/src/autogenstudio/web/routes/validation.py deleted file mode 100644 index 9041db76a..000000000 --- a/python/src/autogenstudio/web/routes/validation.py +++ /dev/null @@ -1,37 +0,0 @@ -# api/routes/validation.py - -from fastapi import APIRouter - -from ...validation.component_test_service import ComponentTestRequest, ComponentTestResult, ComponentTestService -from ...validation.validation_service import ValidationError, ValidationRequest, ValidationResponse, ValidationService - -router = APIRouter() - - -@router.post("/") -async def validate_component(request: ValidationRequest) -> ValidationResponse: - """Validate a component configuration""" - try: - return ValidationService.validate(request.component) - except Exception as e: - return ValidationResponse( - is_valid=False, errors=[ValidationError(field="validation", error=str(e))], warnings=[] - ) - - -@router.post("/test") -async def test_component(request: ComponentTestRequest) -> ComponentTestResult: - """Test a component functionality with appropriate inputs based on type""" - # First validate the component configuration - validation_result = ValidationService.validate(request.component) - - # Only proceed with testing if the component is valid - if not validation_result.is_valid: - return ComponentTestResult( - status=False, message="Component validation failed", logs=[e.error for e in validation_result.errors] - ) - - # If validation passed, run the functional test - return await ComponentTestService.test_component( - component=request.component, timeout=request.timeout if request.timeout else 60 - ) diff --git a/python/src/kagent/__init__.py b/python/src/kagent/__init__.py deleted file mode 100644 index 1406ad240..000000000 --- a/python/src/kagent/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -import importlib.metadata - -ABOUT = "This is kagent." - -__version__ = importlib.metadata.version("kagent") diff --git a/python/src/kagent/agents/__init__.py b/python/src/kagent/agents/__init__.py deleted file mode 100644 index a3384584e..000000000 --- a/python/src/kagent/agents/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from ._task_agent import TaskAgent - -__all__ = ["TaskAgent"] diff --git a/python/src/kagent/agents/_task_agent.py b/python/src/kagent/agents/_task_agent.py deleted file mode 100644 index 4cf2ef642..000000000 --- a/python/src/kagent/agents/_task_agent.py +++ /dev/null @@ -1,178 +0,0 @@ -from typing import Any, AsyncGenerator, List, Mapping, Sequence - -from autogen_agentchat.agents._base_chat_agent import BaseChatAgent -from autogen_agentchat.base import Response, TaskResult, Team -from autogen_agentchat.messages import ( - AgentEvent, - BaseChatMessage, - ChatMessage, - HandoffMessage, - ModelClientStreamingChunkEvent, - TextMessage, - ToolCallExecutionEvent, - ToolCallRequestEvent, - ToolCallSummaryMessage, -) -from autogen_agentchat.state import BaseState -from autogen_core import CancellationToken, Component, ComponentModel -from autogen_core.model_context import ChatCompletionContext, UnboundedChatCompletionContext -from autogen_core.models import ( - AssistantMessage, - FunctionExecutionResultMessage, -) -from pydantic import BaseModel, Field -from typing_extensions import Self - - -class TaskAgentState(BaseState): - """State for a Task agent.""" - - inner_team_state: Mapping[str, Any] = Field(default_factory=dict) - model_context_state: Mapping[str, Any] = Field(default_factory=dict) - type: str = Field(default="TaskAgentState") - - -class TaskAgentConfig(BaseModel): - """The declarative configuration for a TaskAgent.""" - - name: str - team: ComponentModel - model_context: ComponentModel | None = None - description: str | None = None - - -class TaskAgent(BaseChatAgent, Component[TaskAgentConfig]): - """An agent that uses an inner team of agents to generate responses. - - Each time the agent's :meth:`on_messages` or :meth:`on_messages_stream` - method is called, it runs the inner team of agents and then returns the - final response. It will also reset the inner team by calling - :meth:`Team.reset`. - - Args: - name (str): The name of the agent. - team (Team): The team of agents to use. - model_context (ChatCompletionContext, optional): The model context to use for preparing responses. - description (str, optional): The description of the agent. - """ - - component_config_schema = TaskAgentConfig - component_provider_override = "kagent.agents.TaskAgent" - - DEFAULT_DESCRIPTION = "An agent that uses an inner team of agents to generate responses." - """str: The default description for a TaskAgent.""" - - def __init__( - self, - name: str, - team: Team, - model_context: ChatCompletionContext | None = None, - *, - description: str = DEFAULT_DESCRIPTION, - ) -> None: - super().__init__(name=name, description=description) - self._team = team - self._model_context = model_context or UnboundedChatCompletionContext() - - @property - def produced_message_types(self) -> Sequence[type[ChatMessage]]: - return (TextMessage,) - - async def on_messages(self, messages: Sequence[ChatMessage], cancellation_token: CancellationToken) -> Response: - # Call the stream method and collect the messages. - response: Response | None = None - async for msg in self.on_messages_stream(messages, cancellation_token): - if isinstance(msg, Response): - response = msg - assert response is not None - return response - - async def on_messages_stream( - self, messages: Sequence[ChatMessage], cancellation_token: CancellationToken - ) -> AsyncGenerator[AgentEvent | ChatMessage | Response, None]: - # Run the team of agents. - result: TaskResult | None = None - inner_messages: List[AgentEvent | ChatMessage] = [] - count = 0 - context = await self._model_context.get_messages() - task = list(messages) - if len(context) > 0: - message = HandoffMessage( - content="Here are the relevant previous messages.", - source=self.name, - target="", - context=context, - ) - task = [message] + list(messages) - - async for inner_msg in self._team.run_stream(task=task, cancellation_token=cancellation_token): - if isinstance(inner_msg, TaskResult): - result = inner_msg - else: - count += 1 - if count <= len(task): - # Skip the task messages. - continue - yield inner_msg - if isinstance(inner_msg, ModelClientStreamingChunkEvent): - # Skip the model client streaming chunk events. - continue - - inner_messages.append(inner_msg) - assert result is not None - - text_result: TextMessage | None = None - for message in inner_messages: - if isinstance(message, TextMessage): - text_result = message - await self._model_context.add_message(AssistantMessage(content=message.content, source=message.source)) - elif isinstance(message, ToolCallSummaryMessage): - await self._model_context.add_message(AssistantMessage(content=message.content, source=message.source)) - elif isinstance(message, ToolCallExecutionEvent): - await self._model_context.add_message(FunctionExecutionResultMessage(content=message.content)) - elif isinstance(message, ToolCallRequestEvent): - await self._model_context.add_message(AssistantMessage(content=message.content, source=message.source)) - - assert text_result is not None - # Yield the final agent response. - yield Response(chat_message=text_result, inner_messages=inner_messages) - - # Reset the team. - await self._team.reset() - - async def on_reset(self, cancellation_token: CancellationToken) -> None: - await self._team.reset() - - async def save_state(self) -> Mapping[str, Any]: - team_state = await self._team.save_state() - model_context_state = await self._model_context.save_state() - state = TaskAgentState(inner_team_state=team_state, model_context_state=model_context_state) - return state.model_dump() - - async def load_state(self, state: Mapping[str, Any]) -> None: - task_agent_state = TaskAgentState.model_validate(state) - await self._model_context.load_state(task_agent_state.model_context_state) - await self._team.load_state(task_agent_state.inner_team_state) - - def _to_config(self) -> TaskAgentConfig: - return TaskAgentConfig( - name=self.name, - team=self._team.dump_component(), - model_context=self._model_context.dump_component(), - description=self.description, - ) - - @classmethod - def _from_config(cls, config: TaskAgentConfig) -> Self: - model_context = ( - ChatCompletionContext.load_component(config.model_context) - if config.model_context is not None - else UnboundedChatCompletionContext() - ) - team = Team.load_component(config.team) - return cls( - name=config.name, - team=team, - model_context=model_context, - description=config.description or cls.DEFAULT_DESCRIPTION, - ) diff --git a/python/src/kagent/cli.py b/python/src/kagent/cli.py deleted file mode 100644 index 423aa5ccf..000000000 --- a/python/src/kagent/cli.py +++ /dev/null @@ -1,54 +0,0 @@ -import logging - -import os -import typer -from mcp.server.fastmcp import FastMCP -from autogen_core import ROOT_LOGGER_NAME - -app = typer.Typer() - -mcp = FastMCP("KAgent") - - -@app.command() -def serve( - host: str = "127.0.0.1", - port: int = 8081, - reload: bool = False, -): - import logging - import os - - from opentelemetry import trace - from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter - from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor - from opentelemetry.instrumentation.openai import OpenAIInstrumentor - from opentelemetry.sdk.resources import Resource - from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.sdk.trace.export import BatchSpanProcessor - - from autogenstudio.cli import ui - - tracing_enabled = os.getenv("OTEL_TRACING_ENABLED", "false").lower() == "true" - if tracing_enabled: - logging.info("Enabling tracing") - tracer_provider = TracerProvider(resource=Resource({"service.name": "kagent"})) - processor = BatchSpanProcessor(OTLPSpanExporter()) - tracer_provider.add_span_processor(processor) - trace.set_tracer_provider(tracer_provider) - HTTPXClientInstrumentor().instrument() - OpenAIInstrumentor().instrument() - - ui(host=host, port=port, reload=reload) - - -def run(): - LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() - logging.basicConfig(level=LOG_LEVEL) - logger = logging.getLogger(ROOT_LOGGER_NAME) - logger.setLevel(LOG_LEVEL) - app() - - -if __name__ == "__main__": - run() diff --git a/python/src/kagent/conditions/__init__.py b/python/src/kagent/conditions/__init__.py deleted file mode 100644 index 8c5da59f1..000000000 --- a/python/src/kagent/conditions/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from ._termination import FinalTextMessageTermination - -__all__ = ["FinalTextMessageTermination"] diff --git a/python/src/kagent/conditions/_termination.py b/python/src/kagent/conditions/_termination.py deleted file mode 100644 index f83cf9de7..000000000 --- a/python/src/kagent/conditions/_termination.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import Sequence - -from autogen_agentchat.base import TerminatedException, TerminationCondition -from autogen_agentchat.messages import BaseAgentEvent, BaseChatMessage, StopMessage, TextMessage -from autogen_core import Component -from pydantic import BaseModel -from typing_extensions import Self - - -class FinalTextMessageTerminationConfig(BaseModel): - """Configuration for the FinalTextMessageTermination termination condition.""" - - source: str | None = None - """The source of the text message to terminate the conversation.""" - - -class FinalTextMessageTermination(TerminationCondition, Component[FinalTextMessageTerminationConfig]): - """Terminate the conversation if a :class:`~autogen_agentchat.messages.TextMessage` is received in the FINAL_TEXT_MESSAGE message. - - This termination condition checks for TextMessage instances in the message sequence. When a TextMessage is found, - it terminates the conversation if either: - - No source was specified (terminates on any TextMessage) - - The message source matches the specified source - - Args: - source (str | None, optional): The source name to match against incoming messages. If None, matches any source. - Defaults to None. - """ - - component_config_schema = FinalTextMessageTerminationConfig - component_provider_override = "autogen_agentchat.conditions.FinalTextMessageTermination" - - def __init__(self, source: str | None = None) -> None: - self._terminated = False - self._source = source - - @property - def terminated(self) -> bool: - return self._terminated - - async def __call__(self, messages: Sequence[BaseAgentEvent | BaseChatMessage]) -> StopMessage | None: - if self._terminated: - raise TerminatedException("Termination condition has already been reached") - if len(messages) == 0: - return None - final_message = messages[-1] - if isinstance(final_message, TextMessage) and (self._source is None or final_message.source == self._source): - self._terminated = True - return StopMessage( - content=f"Text message received from '{final_message.source}'", source="FinalTextMessageTermination" - ) - return None - - async def reset(self) -> None: - self._terminated = False - - def _to_config(self) -> FinalTextMessageTerminationConfig: - return FinalTextMessageTerminationConfig(source=self._source) - - @classmethod - def _from_config(cls, config: FinalTextMessageTerminationConfig) -> Self: - return cls(source=config.source) diff --git a/python/src/kagent/memory/__init__.py b/python/src/kagent/memory/__init__.py deleted file mode 100644 index 0c74e582f..000000000 --- a/python/src/kagent/memory/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from ._pinecone_memory import PineconeMemory - -__all__ = ["PineconeMemory"] diff --git a/python/src/kagent/memory/_pinecone_memory.py b/python/src/kagent/memory/_pinecone_memory.py deleted file mode 100644 index b633a7b21..000000000 --- a/python/src/kagent/memory/_pinecone_memory.py +++ /dev/null @@ -1,170 +0,0 @@ -import asyncio -from typing import Any, List, Optional, Sequence - -from autogen_core import CancellationToken, Component -from autogen_core.memory import Memory, MemoryContent, MemoryMimeType, MemoryQueryResult, UpdateContextResult -from autogen_core.model_context import ChatCompletionContext -from autogen_core.models import SystemMessage -from loguru import logger -from pinecone import Pinecone -from pinecone.core.openapi.db_data.model.hit import Hit -from pinecone.data import Index -from pydantic import BaseModel, Field, SecretStr -from typing_extensions import Self - - -class PineconeMemoryConfig(BaseModel): - api_key: SecretStr = Field(..., description="The API key for the Pinecone API") - index_host: str = Field(..., description="The host for the Pinecone index") - top_k: int = Field(default=5, description="The number of results to retrieve from Pinecone") - namespace: Optional[str] = Field(default=None, description="The Pinecone namespace to query") - record_fields: Optional[List[str]] = Field(description="The fields to retrieve from the Pinecone index") - score_threshold: float = Field( - default=0.0, - description="The score threshold of results to include in the context. Results with a score below this threshold will be ignored.", - ) - - -class PineconeMemory(Memory, Component[PineconeMemoryConfig]): - component_config_schema = PineconeMemoryConfig - component_type = "memory" - component_provider_override = "kagent.memory.PineconeMemory" - - def __init__(self, config: PineconeMemoryConfig): - self._config = config - self._pc: Pinecone | None = None - self._index: Index | None = None - self._initialize() - - def _initialize(self): - """Initialize Pinecone if not already done.""" - if self._pc is None: - try: - self._pc = Pinecone(api_key=self._config.api_key.get_secret_value(), host=self._config.index_host) - self._index = self._pc.Index(host=self._config.index_host) - except Exception as e: - logger.error(f"Failed to initialize Pinecone: {e}") - raise Exception(f"Failed to initialize Pinecone: {e}") from e - - async def update_context( - self, - model_context: ChatCompletionContext, - cancellation_token: CancellationToken | None = None, - ) -> UpdateContextResult: - """Update the context by querying Pinecone based on the last message. - - Retrieves relevant text chunks from Pinecone and adds them as a SystemMessage - to the beginning of the context. - """ - messages = await model_context.get_messages() - if not messages: - return UpdateContextResult(success=True, memories=MemoryQueryResult(results=[])) - - # Use the last message as the query basis - last_message = messages[-1] - query_text = last_message.content if isinstance(last_message.content, str) else str(last_message) - - if not query_text: - return UpdateContextResult(success=True, memories=MemoryQueryResult(results=[])) - - try: - query_results = await self.query(query_text, cancellation_token=cancellation_token) - if query_results.results: - memory_strings = [f"{i}. {str(memory.content)}" for i, memory in enumerate(query_results.results, 1)] - memory_context = "\nYour response should include the following memory content:\n" + "\n".join( - memory_strings - ) - - await model_context.add_message(SystemMessage(content=memory_context)) - - return UpdateContextResult(success=True, memories=query_results) - except Exception as e: - logger.error(f"Error during Pinecone update_context: {e}") - return UpdateContextResult(success=False, error=str(e), memories=MemoryQueryResult(results=[])) - - async def query( - self, - query: str | MemoryContent, - cancellation_token: CancellationToken | None = None, - **kwargs: Any, - ) -> MemoryQueryResult: - """Query the memory with a specific string or MemoryContent.""" - query_text = "" - if isinstance(query, str): - query_text = query - elif isinstance(query, MemoryContent) and query.mime_type == MemoryMimeType.TEXT: - query_text = query.content - else: - logger.error("Query must be a string or text MemoryContent.") - raise ValueError("Query must be a string or text MemoryContent.") - - if not query_text: - return MemoryQueryResult(results=[]) - - if cancellation_token and cancellation_token.is_cancelled: - logger.info("Query cancelled.") - return MemoryQueryResult(results=[]) - - try: - query_response = await asyncio.to_thread( - self._index.search, - namespace=self._config.namespace, - query={"inputs": {"text": query_text}, "top_k": self._config.top_k}, - fields=self._config.record_fields, - ) - - results: List[MemoryContent] = [] - if query_response and "result" in query_response: - for match in query_response.result.hits: - hit: Hit = match - score = hit.get("_score") - # Ignore hits with a score below the threshold - if score and score < self._config.score_threshold: - continue - - if not self._config.record_fields: - # if no record fields are specified initially, use all fields - logger.info("No record fields specified, using all fields.") - self._config.record_fields = list(hit.fields.keys()) - - for field in self._config.record_fields: - # For each hit, we get the text from record_fields, and store the remaining fields in metadata - text = hit.fields.get(field) - metadata = {k: v for k, v in hit.fields.items() if k != field} - results.append(MemoryContent(content=text, mime_type=MemoryMimeType.TEXT, metadata=metadata)) - - if len(results) == 0: - logger.warning("No results found from Pinecone query.") - - return MemoryQueryResult(results=results) - except Exception as e: - logger.error(f"Error during Pinecone query: {e}") - raise e - - async def add( - self, content: MemoryContent | Sequence[MemoryContent], cancellation_token: CancellationToken | None = None - ) -> None: - pass - - async def reset(self) -> None: - """Reset the memory by deleting all data in the specified namespace (or the whole index if no namespace).""" - pass - - async def close(self) -> None: - """Clean up Pinecone client and resources.""" - self._pc = None - self._index = None - logger.info("PineconeMemory closed.") - - async def clear(self) -> None: - """Clear the memory by deleting all data in the specified namespace (or the whole index if no namespace).""" - pass - - def _to_config(self) -> PineconeMemoryConfig: - """Serialize the memory configuration.""" - return self._config - - @classmethod - def _from_config(cls, config: PineconeMemoryConfig) -> Self: - """Deserialize the memory configuration.""" - return cls(config=config) diff --git a/python/src/kagent/models/vertexai/__init__.py b/python/src/kagent/models/vertexai/__init__.py deleted file mode 100644 index cbaec8fb1..000000000 --- a/python/src/kagent/models/vertexai/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from ._anthropic_vertex_client import AnthropicVertexAIChatCompletionClient -from ._gemini_vertexai_client import GeminiVertexAIChatCompletionClient -from .config import AnthropicVertexAIClientConfiguration, GeminiVertexAIClientConfiguration - -__all__ = [ - "GeminiVertexAIChatCompletionClient", - "AnthropicVertexAIChatCompletionClient", - "GeminiVertexAIClientConfiguration", - "AnthropicVertexAIClientConfiguration", -] diff --git a/python/src/kagent/models/vertexai/_anthropic_vertex_client.py b/python/src/kagent/models/vertexai/_anthropic_vertex_client.py deleted file mode 100644 index 5787ffaa2..000000000 --- a/python/src/kagent/models/vertexai/_anthropic_vertex_client.py +++ /dev/null @@ -1,128 +0,0 @@ -import inspect - -from anthropic import AsyncAnthropicVertex -from autogen_core import Component -from autogen_ext.models.anthropic import BaseAnthropicChatCompletionClient -from google.auth import load_credentials_from_dict -from typing_extensions import Any, Dict, Mapping, Optional, Self, Set, Unpack - -from ._model_info import ModelInfo, get_info -from .config import AnthropicVertexAIClientConfiguration - -# Common parameters for message creation -anthropic_message_params = { - "system", - "messages", - "max_tokens", - "temperature", - "top_p", - "top_k", - "stop_sequences", - "tools", - "tool_choice", - "stream", - "metadata", -} -disallowed_create_args = {"stream", "messages"} -required_create_args: Set[str] = {"model"} - -anthropic_init_kwargs = set(inspect.getfullargspec(AsyncAnthropicVertex.__init__).kwonlyargs) - - -def _create_args_from_config(config: Mapping[str, Any]) -> Dict[str, Any]: - create_args = {k: v for k, v in config.items() if k in anthropic_message_params or k == "model"} - create_args_keys = set(create_args.keys()) - - if not required_create_args.issubset(create_args_keys): - raise ValueError(f"Required create args are missing: {required_create_args - create_args_keys}") - - if disallowed_create_args.intersection(create_args_keys): - raise ValueError(f"Disallowed create args are present: {disallowed_create_args.intersection(create_args_keys)}") - - return create_args - - -def _anthropic_client_from_config(config: Mapping[str, Any]) -> AsyncAnthropicVertex: - # Filter config to only include valid parameters - client_config = {k: v for k, v in config.items() if k in anthropic_init_kwargs} - return AsyncAnthropicVertex(**client_config) - - -class AnthropicVertexAIChatCompletionClient( - BaseAnthropicChatCompletionClient, Component[AnthropicVertexAIClientConfiguration] -): - component_type = "model" - component_config_schema = AnthropicVertexAIClientConfiguration - component_provider_override = "kagent.models.vertexai.AnthropicVertexAIChatCompletionClient" - - def __init__(self, **kwargs: Unpack[AnthropicVertexAIClientConfiguration]): - if "model" not in kwargs: - raise ValueError("model is required for AnthropicVertexAIChatCompletionClient") - - self._raw_config: Dict[str, Any] = dict(kwargs).copy() - copied_args = dict(kwargs).copy() - - model_info: Optional[ModelInfo] = None - if "model_info" in kwargs: - model_info = kwargs["model_info"] - del copied_args["model_info"] - - if "model" in kwargs: - model_info = get_info(kwargs["model"]) - - if not model_info: - raise ValueError("model_info or model is required for AnthropicVertexAIChatCompletionClient") - - if "credentials" in kwargs: - credentials = kwargs["credentials"] - del copied_args["credentials"] - else: - raise ValueError("credentials is required for AnthropicVertexAIChatCompletionClient") - - if "project" in kwargs: - project = kwargs["project"] - del copied_args["project"] - else: - raise ValueError("project is required for AnthropicVertexAIChatCompletionClient") - - if "location" in kwargs: - location = kwargs["location"] - del copied_args["location"] - else: - raise ValueError("location is required for AnthropicVertexAIChatCompletionClient") - - # need to explicitly provide the scopes for the credentials, otherwise it will not work - google_creds = load_credentials_from_dict( - credentials, scopes=["https://www.googleapis.com/auth/cloud-platform"] - ) - - client = AsyncAnthropicVertex( - region=location, - project_id=project, - credentials=google_creds[0], - ) - create_args = _create_args_from_config(copied_args) - - super().__init__( - client=client, - create_args=create_args, - model_info=model_info, - ) - - def __getstate__(self) -> Dict[str, Any]: - state = self.__dict__.copy() - state["_client"] = None - return state - - def __setstate__(self, state: Dict[str, Any]) -> None: - self.__dict__.update(state) - self._client = _anthropic_client_from_config(state["_raw_config"]) - - def _to_config(self) -> AnthropicVertexAIClientConfiguration: - copied_config = self._raw_config.copy() - return AnthropicVertexAIClientConfiguration(**copied_config) - - @classmethod - def _from_config(cls, config: AnthropicVertexAIClientConfiguration) -> Self: - copied_config = config.model_copy().model_dump(exclude_none=True) - return cls(**copied_config) diff --git a/python/src/kagent/models/vertexai/_gemini_vertexai_client.py b/python/src/kagent/models/vertexai/_gemini_vertexai_client.py deleted file mode 100644 index 71d75c079..000000000 --- a/python/src/kagent/models/vertexai/_gemini_vertexai_client.py +++ /dev/null @@ -1,705 +0,0 @@ -import asyncio -import base64 -import json -import logging -import re -import warnings -from typing import ( - Any, - AsyncGenerator, - Dict, - List, - Mapping, - Optional, - Sequence, - Union, - Unpack, -) - -from autogen_core import ( - EVENT_LOGGER_NAME, - TRACE_LOGGER_NAME, - CancellationToken, - Component, - FunctionCall, -) -from autogen_core.logging import LLMCallEvent, LLMStreamEndEvent, LLMStreamStartEvent # type: ignore -from autogen_core.models import ( - AssistantMessage, - ChatCompletionClient, - CreateResult, - FinishReasons, - FunctionExecutionResultMessage, - LLMMessage, - ModelCapabilities, # type: ignore - ModelInfo, - RequestUsage, - SystemMessage, - UserMessage, - validate_model_info, -) -from autogen_core.tools import Tool, ToolSchema -from google import genai -from google.auth import load_credentials_from_dict -from google.genai import types as genai_types -from google.genai.types import Content, GenerationConfig, Part -from pydantic import BaseModel, SecretStr - -from ._model_info import get_info, get_token_limit -from .config import GeminiVertexAIClientConfiguration - - -# Name validation for Gemini tools -def normalize_gemini_name(name: str) -> str: - """Normalize names by replacing invalid characters with underscore for Gemini tools.""" - return re.sub(r"[^a-zA-Z0-9_]", "_", name)[:63] # Gemini limit seems to be 63 chars - - -def assert_valid_gemini_name(name: str) -> str: - """Ensure that configured names are valid for Gemini, raises ValueError if not.""" - if not re.match(r"^[a-zA-Z0-9_]{1,63}$", name): - raise ValueError( - f"Invalid Gemini tool/function name: {name}. Must be 1-63 chars, letters, numbers, or underscores." - ) - return name - - -logger = logging.getLogger(EVENT_LOGGER_NAME) -trace_logger = logging.getLogger(TRACE_LOGGER_NAME) - - -def _add_usage(usage1: RequestUsage, usage2: RequestUsage) -> RequestUsage: - return RequestUsage( - prompt_tokens=(usage1.prompt_tokens or 0) + (usage2.prompt_tokens or 0), - completion_tokens=(usage1.completion_tokens or 0) + (usage2.completion_tokens or 0), - ) - - -def _normalize_gemini_finish_reason(reason: Optional[genai_types.FinishReason]) -> FinishReasons: - if reason is None: - return "unknown" - mapping = { - genai_types.FinishReason.FINISH_REASON_UNSPECIFIED: "unknown", - genai_types.FinishReason.STOP: "stop", - genai_types.FinishReason.MAX_TOKENS: "length", - genai_types.FinishReason.SAFETY: "content_filter", - genai_types.FinishReason.RECITATION: "content_filter", # Similar to content filter - genai_types.FinishReason.OTHER: "unknown", - } - return mapping.get(reason, "unknown") - - -class GeminiVertexAIChatCompletionClient(ChatCompletionClient, Component[GeminiVertexAIClientConfiguration]): - component_type = "model" - component_config_schema = GeminiVertexAIClientConfiguration - component_provider_override = "kagent.models.vertexai.GeminiVertexAIChatCompletionClient" - - def __init__(self, **kwargs: Unpack[GeminiVertexAIClientConfiguration]): - resolved_config = GeminiVertexAIClientConfiguration(**kwargs) - - self._model_name = resolved_config.model - self._raw_config: Dict[str, Any] = resolved_config.model_dump(warnings=False) - - client_options_dict: Dict[str, Any] = {} - if not resolved_config.project or not resolved_config.location or not resolved_config.credentials: - raise ValueError("project, location, and credentials are required for Vertex AI.") - - # need to explicitly provide the scopes for the credentials, otherwise it will not work - google_creds = load_credentials_from_dict( - resolved_config.credentials, scopes=["https://www.googleapis.com/auth/cloud-platform"] - ) - client_options_dict["credentials"] = google_creds[0] - client_options_dict["project"] = resolved_config.project - client_options_dict["location"] = resolved_config.location - client_options_dict["vertexai"] = True - - self._client = genai.Client(**client_options_dict) - - if resolved_config.model_info_override: - self._model_info = resolved_config.model_info_override - else: - self._model_info = get_info(self._model_name) - validate_model_info(self._model_info) - - self._create_args = GenerationConfig( - **{ - k: v - for k, v in { - "temperature": resolved_config.temperature, - "top_p": resolved_config.top_p, - "top_k": resolved_config.top_k, - "max_output_tokens": resolved_config.max_output_tokens, - "stop_sequences": resolved_config.stop_sequences, - }.items() - if v is not None - } - ) - - self._total_usage = RequestUsage(prompt_tokens=0, completion_tokens=0) - self._actual_usage = RequestUsage(prompt_tokens=0, completion_tokens=0) - self._last_used_tools: Optional[List[genai_types.Tool]] = None - - def _convert_message_to_gemini(self, message: LLMMessage) -> Optional[Content | List[Content]]: - """Converts a single LLMMessage to Gemini Content or list of Contents.""" - parts: List[Part] = [] - role: str = "user" - - if isinstance(message, UserMessage): - role = "user" - if isinstance(message.content, str): - parts.append(Part(text=message.content if message.content.strip() else " ")) - elif isinstance(message.content, list): - for item in message.content: - if isinstance(item, str): - parts.append(Part(text=item if item.strip() else " ")) - # TODO: Add support for images - else: - logger.warning(f"Unsupported content type in UserMessage: {type(item)}") - return Content(parts=parts, role=role) - - elif isinstance(message, AssistantMessage): - role = "model" - if isinstance(message.content, str): - parts.append(Part(text=message.content)) - elif isinstance(message.content, list): - for func_call in message.content: - if isinstance(func_call, FunctionCall): - args = func_call.arguments - try: - args_dict = json.loads(args) if isinstance(args, str) else args - except json.JSONDecodeError: - args_dict = {"_raw_arguments": args} - logger.warning( - f"Function call arguments for {func_call.name} are not valid JSON. Passing as raw string." - ) - - parts.append( - Part( - function_call=genai_types.FunctionCall( - name=normalize_gemini_name(func_call.name), args=args_dict - ) - ) - ) - else: - logger.warning(f"Unsupported content type in AssistantMessage list: {type(func_call)}") - return Content(parts=parts, role=role) - - elif isinstance(message, FunctionExecutionResultMessage): - gemini_parts: List[Part] = [] - for result in message.content: - try: - content_value = json.loads(result.content) if isinstance(result.content, str) else result.content - except json.JSONDecodeError: - content_value = {"_raw_content": str(result.content)} - - gemini_parts.append( - Part( - function_response=genai_types.FunctionResponse( - name=normalize_gemini_name(result.name), # Name of the function that was called - response={"content": content_value}, # Gemini expects a dict, 'content' is a common key - ) - ) - ) - return Content(parts=gemini_parts, role="user") - - elif isinstance(message, SystemMessage): - return None - - def _convert_tools_to_gemini(self, tools: Sequence[Tool | ToolSchema]) -> Optional[List[genai_types.Tool]]: - if not tools: - return None - - gemini_tools: List[genai_types.FunctionDeclaration] = [] - for tool_spec in tools: - schema: ToolSchema - if isinstance(tool_spec, Tool): - schema = tool_spec.schema - else: # It's a dict (ToolSchema) - schema = tool_spec - - assert_valid_gemini_name(schema["name"]) - - parameters_schema: Optional[genai_types.Schema] = None - if "parameters" in schema and schema["parameters"]: - raw_params = schema["parameters"] - - def to_gemini_schema(json_schema_props: Dict[str, Any]) -> genai_types.Schema: - type_mapping = { - "string": genai_types.Type.STRING, - "number": genai_types.Type.NUMBER, # float/double - "integer": genai_types.Type.INTEGER, - "boolean": genai_types.Type.BOOLEAN, - "object": genai_types.Type.OBJECT, - "array": genai_types.Type.ARRAY, - } - - gemini_type = type_mapping.get(json_schema_props.get("type", "object").lower()) - if gemini_type is None: - logger.warning( - f"Unsupported schema type: {json_schema_props.get('type')}. Defaulting to STRING." - ) - gemini_type = genai_types.Type.STRING - - props = None - if "properties" in json_schema_props and json_schema_props["properties"]: - props = {k: to_gemini_schema(v) for k, v in json_schema_props["properties"].items()} - - items_schema = None - if ( - "items" in json_schema_props - and json_schema_props["items"] - and gemini_type == genai_types.Type.ARRAY - ): - items_schema = to_gemini_schema(json_schema_props["items"]) - - return genai_types.Schema( - type=gemini_type, - description=json_schema_props.get("description", ""), - properties=props, - required=json_schema_props.get("required", None), - items=items_schema, - enum=json_schema_props.get("enum", None), - ) - - if raw_params.get("type") == "object" and "properties" in raw_params: - parameters_schema = to_gemini_schema(raw_params) - else: - logger.warning( - f"Tool parameters for {schema['name']} are not a simple object schema, might not be fully compatible." - ) - parameters_schema = genai_types.Schema(type=genai_types.Type.OBJECT) - - gemini_tools.append( - genai_types.FunctionDeclaration( - name=normalize_gemini_name(schema["name"]), - description=schema.get("description", ""), - parameters=parameters_schema, - ) - ) - return [genai_types.Tool(function_declarations=gemini_tools)] if gemini_tools else None - - async def create( - self, - messages: Sequence[LLMMessage], - *, - tools: Sequence[Tool | ToolSchema] = [], - json_output: Optional[bool | type[BaseModel]] = None, - extra_create_args: Mapping[str, Any] = {}, - cancellation_token: Optional[CancellationToken] = None, - ) -> CreateResult: - if self._model_info.get("function_calling", False) is False and len(tools) > 0: - raise ValueError("Model does not support function calling/tools, but tools were provided.") - - final_create_args = self._create_args.model_copy() - allowed_extra_keys = { - "temperature", - "top_p", - "top_k", - "max_output_tokens", - "stop_sequences", - "response_mime_type", - "candidate_count", - } - for k, v in extra_create_args.items(): - if k in allowed_extra_keys: - setattr(final_create_args, k, v) - else: - logger.warning(f"Unsupported extra_create_arg: {k}") - - if json_output: - if self._model_info.get("json_output", False) is False and json_output is True: - logger.warning( - "Model's declared json_output capability is False, but JSON output was requested. Attempting anyway." - ) - if json_output is True: - final_create_args.response_mime_type = "application/json" - elif isinstance(json_output, type) and issubclass(json_output, BaseModel): - logger.warning( - "Pydantic model-based JSON output is not yet fully implemented for Gemini. Use json_output=True for generic JSON." - ) - final_create_args.response_mime_type = "application/json" - - system_instruction_content: Optional[str] = None - gemini_contents: List[Content] = [] - merged_system_message_str = "" - regular_messages: List[LLMMessage] = [] - first_system_idx = -1 - - for idx, msg in enumerate(messages): - if isinstance(msg, SystemMessage): - merged_system_message_str += (msg.content if msg.content.strip() else " ") + "\n" - if first_system_idx == -1: - first_system_idx = idx - else: - regular_messages.append(msg) - - if merged_system_message_str: - system_instruction_content = merged_system_message_str.strip() - - for i, autogen_msg in enumerate(regular_messages): - converted_msg_obj = self._convert_message_to_gemini(autogen_msg) - if isinstance(converted_msg_obj, Content): - if converted_msg_obj.role not in ["user", "model"]: - logger.warning( - f"Message role '{converted_msg_obj.role}' not 'user' or 'model'. Adjusting to 'user'." - ) - converted_msg_obj.role = "user" - - if ( - i == len(regular_messages) - 1 - and converted_msg_obj.role == "user" - and not any(p.text or p.inline_data for p in converted_msg_obj.parts if p) - ): # check if parts exist - logger.warning("Last message is an empty user message. This might cause issues.") - gemini_contents.append(converted_msg_obj) - elif isinstance(converted_msg_obj, list): - gemini_contents.extend(converted_msg_obj) - - gemini_tools_converted = self._convert_tools_to_gemini(tools) - self._last_used_tools = gemini_tools_converted - - gen_content_config = genai_types.GenerateContentConfig( - system_instruction=system_instruction_content if system_instruction_content else None, - temperature=final_create_args.temperature, - top_p=final_create_args.top_p, - top_k=final_create_args.top_k, - max_output_tokens=final_create_args.max_output_tokens, - tools=gemini_tools_converted if gemini_tools_converted else None, - ) - logger.info( - LLMCallEvent( - messages=[msg.model_dump_json() for msg in messages], - response=None, - prompt_tokens=None, - completion_tokens=None, - ) - ) - - api_task = asyncio.ensure_future( - self._client.aio.models.generate_content( - model=self._model_name, contents=gemini_contents, config=gen_content_config - ) - ) - - if cancellation_token: - cancellation_token.link_future(api_task) - - try: - response: genai_types.GenerateContentResponse = await api_task - except Exception as e: - logger.error(f"Gemini API call failed: {e}") - raise - - prompt_tokens_val = response.usage_metadata.prompt_token_count if response.usage_metadata else 0 - completion_tokens_val = response.usage_metadata.candidates_token_count if response.usage_metadata else 0 - usage = RequestUsage( - prompt_tokens=prompt_tokens_val, - completion_tokens=completion_tokens_val, - ) - self._total_usage = _add_usage(self._total_usage, usage) - self._actual_usage = _add_usage(self._actual_usage, usage) - - logger.info( - LLMCallEvent( - messages=None, - response=response.to_dict(), - prompt_tokens=usage.prompt_tokens, - completion_tokens=usage.completion_tokens, - ) - ) - - if not response.candidates: - prompt_feedback_info = response.prompt_feedback if response.prompt_feedback else "No specific feedback." - logger.warning(f"Gemini response has no candidates. Prompt feedback: {prompt_feedback_info}") - finish_reason_from_feedback: FinishReasons = "unknown" - if response.prompt_feedback and response.prompt_feedback.block_reason: - finish_reason_from_feedback = "content_filter" - - return CreateResult( - finish_reason=finish_reason_from_feedback, content="", usage=usage, cached=False, thought=None - ) - - candidate = response.candidates[0] - finish_reason = _normalize_gemini_finish_reason(candidate.finish_reason) - final_content: Union[str, List[FunctionCall]] - thought_content: Optional[str] = None - - function_calls_parts = [part for part in candidate.content.parts if part.function_call] - if function_calls_parts: - autogen_fcs: List[FunctionCall] = [] - for part_fc in function_calls_parts: - fc = part_fc.function_call - normalized_name = normalize_gemini_name(fc.name) - autogen_fcs.append( - FunctionCall( - id=f"call_{normalized_name}_{len(autogen_fcs)}", - name=normalized_name, - arguments=json.dumps(fc.args) if fc.args else "{}", - ) - ) - final_content = autogen_fcs - text_parts = [part.text for part in candidate.content.parts if hasattr(part, "text") and part.text] - if text_parts: - thought_content = "\n".join(text_parts).strip() - else: - all_text_parts = [part.text for part in candidate.content.parts if hasattr(part, "text") and part.text] - final_content = "".join(all_text_parts) - if final_create_args.response_mime_type == "application/json" and isinstance(final_content, str): - try: - json.loads(final_content) - except json.JSONDecodeError: - logger.warning("JSON output was requested, but the response is not valid JSON.") - - return CreateResult( - finish_reason=finish_reason, content=final_content, usage=usage, cached=False, thought=thought_content - ) - - async def create_stream( - self, - messages: Sequence[LLMMessage], - *, - tools: Sequence[Tool | ToolSchema] = [], - json_output: Optional[bool | type[BaseModel]] = None, - extra_create_args: Mapping[str, Any] = {}, - cancellation_token: Optional[CancellationToken] = None, - ) -> AsyncGenerator[Union[str, CreateResult], None]: - if self._model_info.get("function_calling", False) is False and len(tools) > 0: - raise ValueError("Model does not support function calling/tools, but tools were provided.") - - final_create_args = self._create_args.model_copy() - allowed_extra_keys = { - "temperature", - "top_p", - "top_k", - "max_output_tokens", - "stop_sequences", - "response_mime_type", - "candidate_count", - } - for k, v in extra_create_args.items(): - if k in allowed_extra_keys: - setattr(final_create_args, k, v) - else: - logger.warning(f"Unsupported extra_create_arg for stream: {k}") - - if json_output: - if self._model_info.get("json_output", False) is False and json_output is True: - logger.warning( - "Model's declared json_output capability is False, but JSON output was requested for stream. Attempting anyway." - ) - if json_output is True: - final_create_args.response_mime_type = "application/json" - elif isinstance(json_output, type) and issubclass(json_output, BaseModel): - logger.warning( - "Pydantic model-based JSON output is not yet fully implemented for Gemini stream. Use json_output=True." - ) - final_create_args.response_mime_type = "application/json" - - system_instruction_content: Optional[str] = None - gemini_contents: List[Content] = [] - merged_system_message_str = "" - regular_messages: List[LLMMessage] = [] - first_system_idx = -1 - - for idx, msg in enumerate(messages): - if isinstance(msg, SystemMessage): - merged_system_message_str += (msg.content if msg.content.strip() else " ") + "\n" - if first_system_idx == -1: - first_system_idx = idx - else: - regular_messages.append(msg) - if merged_system_message_str: - system_instruction_content = merged_system_message_str.strip() - - for autogen_msg in regular_messages: - converted_msg_obj = self._convert_message_to_gemini(autogen_msg) - if isinstance(converted_msg_obj, Content): - if converted_msg_obj.role not in ["user", "model"]: - converted_msg_obj.role = "user" - gemini_contents.append(converted_msg_obj) - - gemini_tools_converted = self._convert_tools_to_gemini(tools) - self._last_used_tools = gemini_tools_converted - - logger.info(LLMStreamStartEvent(messages=[msg.model_dump_json() for msg in messages])) - - gen_content_config = genai_types.GenerateContentConfig( - system_instruction=system_instruction_content if system_instruction_content else None, - temperature=final_create_args.temperature, - top_p=final_create_args.top_p, - top_k=final_create_args.top_k, - max_output_tokens=final_create_args.max_output_tokens, - tools=gemini_tools_converted if gemini_tools_converted else None, - ) - - stream_api_task = self._client.aio.models.generate_content_stream( - model=self._model_name, contents=gemini_contents, config=gen_content_config - ) - - if cancellation_token: - cancellation_token.link_future(stream_api_task) # type: ignore - - accumulated_text_parts: List[str] = [] - - final_fcs_list: List[FunctionCall] = [] - - prompt_tokens_val = 0 - completion_tokens_val = 0 - final_finish_reason: FinishReasons = "unknown" - - try: - async for chunk in await stream_api_task: - if chunk.usage_metadata: - if chunk.usage_metadata.prompt_token_count: - prompt_tokens_val = chunk.usage_metadata.prompt_token_count - if chunk.usage_metadata.candidates_token_count: - completion_tokens_val = chunk.usage_metadata.candidates_token_count - - if chunk.candidates: - candidate_chunk = chunk.candidates[0] - if candidate_chunk.finish_reason: - final_finish_reason = _normalize_gemini_finish_reason(candidate_chunk.finish_reason) - - if candidate_chunk.content: - for part in candidate_chunk.content.parts: - if hasattr(part, "text") and part.text: - yield part.text - accumulated_text_parts.append(part.text) - - if hasattr(part, "function_call") and part.function_call: - fc_chunk = part.function_call - final_fcs_list.append( - FunctionCall( - id=f"call_{normalize_gemini_name(fc_chunk.name)}_{len(final_fcs_list)}", - name=normalize_gemini_name(fc_chunk.name), - arguments=json.dumps(fc_chunk.args) if fc_chunk.args else "{}", - ) - ) - - except Exception as e: - logger.error(f"Gemini stream API call failed: {e}") - raise - - usage = RequestUsage(prompt_tokens=prompt_tokens_val, completion_tokens=completion_tokens_val) - self._total_usage = _add_usage(self._total_usage, usage) - self._actual_usage = _add_usage(self._actual_usage, usage) - - final_response_content: Union[str, List[FunctionCall]] - thought_stream: Optional[str] = None - - if final_fcs_list: - final_response_content = final_fcs_list - if accumulated_text_parts: - thought_stream = "".join(accumulated_text_parts) - else: - final_response_content = "".join(accumulated_text_parts) - if final_create_args.response_mime_type == "application/json" and isinstance(final_response_content, str): - try: - json.loads(final_response_content) - except json.JSONDecodeError: - logger.warning("Streamed JSON output was requested, but the final response is not valid JSON.") - - final_result_obj = CreateResult( - finish_reason=final_finish_reason, - content=final_response_content, - usage=usage, - cached=False, - thought=thought_stream, - ) - - logger.info( - LLMStreamEndEvent( - response=final_result_obj.model_dump(), - prompt_tokens=usage.prompt_tokens, - completion_tokens=usage.completion_tokens, - ) - ) - yield final_result_obj - - async def close(self) -> None: - await self._client.close() - - def actual_usage(self) -> RequestUsage: - return self._actual_usage - - def total_usage(self) -> RequestUsage: - return self._total_usage - - async def count_tokens(self, messages: Sequence[LLMMessage], *, tools: Sequence[Tool | ToolSchema] = []) -> int: - gemini_contents_for_count: List[Content] = [] - for autogen_msg in messages: - if not isinstance(autogen_msg, SystemMessage): - converted_msg_obj = self._convert_message_to_gemini(autogen_msg) - if isinstance(converted_msg_obj, Content): - gemini_contents_for_count.append(converted_msg_obj) - - # TODO: Investigate how Gemini's count_tokens accounts for tools. - if not gemini_contents_for_count: - return 0 - - try: - response = self._client.models.count_tokens(model=self._model_name, contents=gemini_contents_for_count) - return response.total_tokens - except Exception as e: - logger.warning(f"Token counting failed: {e}. Returning 0.") - return 0 - - def remaining_tokens(self, messages: Sequence[LLMMessage], *, tools: Sequence[Tool | ToolSchema] = []) -> int: - token_limit = get_token_limit(self._model_name) - if not isinstance(token_limit, int) or token_limit <= 0: - logger.warning( - f"Cannot calculate remaining tokens: token_limit not available or invalid in model_info for {self._model_name}." - ) - return 0 - - counted_tokens = self.count_tokens(messages, tools=tools) - return token_limit - counted_tokens - - @property - def model_info(self) -> ModelInfo: - return self._model_info - - def __getstate__(self) -> Dict[str, Any]: - state = self.__dict__.copy() - state["_client"] = None - return state - - def __setstate__(self, state: Dict[str, Any]) -> None: - self.__dict__.update(state) - resolved_config_from_raw = GeminiVertexAIClientConfiguration(**self._raw_config) - - client_options_dict: Dict[str, Any] = {} - if resolved_config_from_raw.api_key: - client_options_dict["api_key"] = resolved_config_from_raw.api_key.get_secret_value() - - if resolved_config_from_raw.vertexai: - if not resolved_config_from_raw.project or not resolved_config_from_raw.location: - raise ValueError("project and location are required for Vertex AI.") - if resolved_config_from_raw.credentials: - client_options_dict["credentials"] = resolved_config_from_raw.credentials - else: - raise ValueError("credentials are required for Vertex AI.") - - self._client = genai.Client(**client_options_dict) - - @property - def capabilities(self) -> ModelCapabilities: # type: ignore - warnings.warn( - "capabilities is deprecated, use model_info instead", - DeprecationWarning, - stacklevel=2, - ) - return self._model_info - - @classmethod - def _from_config(cls, config: GeminiVertexAIClientConfiguration) -> "GeminiVertexAIChatCompletionClient": - copied_config = config.model_copy().model_dump(exclude_none=True) - - if "api_key" in copied_config and isinstance(copied_config["api_key"], str): - copied_config["api_key"] = SecretStr(copied_config["api_key"]) - - return cls(**copied_config) - - def _to_config(self) -> GeminiVertexAIClientConfiguration: - config_data = self._raw_config.copy() - return GeminiVertexAIClientConfiguration(**config_data) diff --git a/python/src/kagent/models/vertexai/_model_info.py b/python/src/kagent/models/vertexai/_model_info.py deleted file mode 100644 index 5cdbe155c..000000000 --- a/python/src/kagent/models/vertexai/_model_info.py +++ /dev/null @@ -1,146 +0,0 @@ -from typing import Dict - -from autogen_core.models import ModelInfo - -from .types import ModelInfoDict - -# https://ai.google.dev/gemini-api/docs/models -_MODEL_INFO: ModelInfoDict = { - "gemini-2.5-flash": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "gemini-2.5-flash", - "structured_output": True, - "multiple_system_messages": False, - }, - "gemini-2.5-pro": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "gemini-2.5-pro", - "structured_output": True, - "multiple_system_messages": False, - }, - "gemini-2.5-flash-lite-preview-06-17": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "gemini-2.5-flash", - "structured_output": True, - "multiple_system_messages": False, - }, - "gemini-2.0-flash": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "gemini-2.0-flash", - "structured_output": True, - "multiple_system_messages": False, - }, - "gemini-2.0-flash-lite": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "gemini-2.0-flash", - "structured_output": True, - "multiple_system_messages": False, - }, - # Anthropic - "claude-sonnet-4@20250514": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-sonnet-4", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-opus-4@20250514": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-opus-4", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-3-7-sonnet@20250219": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-7-sonnet", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-3-5-sonnet-v2@20241022": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-5-sonnet-v2", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-3-5-haiku@20241022": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-5-haiku", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-3-opus@20240229": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-opus", - "structured_output": True, - }, - "claude-3-haiku@20240307": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-haiku", - "structured_output": True, - "multiple_system_messages": False, - }, - "claude-3-5-sonnet@20240620": { - "vision": False, - "function_calling": True, - "json_output": True, - "family": "claude-3-5-sonnet", - "structured_output": True, - "multiple_system_messages": False, - }, -} - -# Model token limits (context window size) -_MODEL_TOKEN_LIMITS: Dict[str, int] = { - "gemini-2.5-flash": 1_048_576, - "gemini-2.5-pro": 1_048_576, - "gemini-2.5-flash-lite-preview-06-17": 1_048_576, - "gemini-2.0-flash": 1_048_576, - "gemini-2.0-flash-lite": 1_048_576, - "claude-sonnet-4@20250514": 64_000, - "claude-opus-4@20250514": 32_000, - "claude-3-7-sonnet@20250219": 200_000, - "claude-3-5-sonnet-v2@20241022": 200_000, - "claude-3-5-haiku@20241022": 200_000, - "claude-3-opus@20240229": 200_000, - "claude-3-haiku@20240307": 200_000, - "claude-3-5-sonnet@20240620": 200_000, -} - - -def get_info(model: str) -> ModelInfo: - """Get the model information for a specific model.""" - # Check for exact match first - if model in _MODEL_INFO: - return _MODEL_INFO[model] - raise KeyError(f"Model '{model}' not found in model info") - - -def get_token_limit(model: str) -> int: - """Get the token limit for a specific model.""" - # Check for exact match first - if model in _MODEL_TOKEN_LIMITS: - return _MODEL_TOKEN_LIMITS[model] - return 100000 diff --git a/python/src/kagent/models/vertexai/config/__init__.py b/python/src/kagent/models/vertexai/config/__init__.py deleted file mode 100644 index 4deed1e1e..000000000 --- a/python/src/kagent/models/vertexai/config/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List, Optional - -from autogen_ext.models.anthropic.config import AnthropicClientConfigurationConfigModel -from pydantic import BaseModel, Field - -from ..types import ModelInfo - - -class VertexAIClientConfiguration(BaseModel): - model: str = Field(description="Name of the Vertex AI model to use, e.g., 'gemini-1.5-pro-latest'.") - credentials: Optional[dict] = Field(default=None, description="Google Cloud credentials file path.") - project: Optional[str] = Field(default=None, description="Google Cloud Project ID (required for Vertex AI).") - location: Optional[str] = Field(default=None, description="Google Cloud Project Location (required for Vertex AI).") - - -class GeminiVertexAIClientConfiguration(VertexAIClientConfiguration): - temperature: Optional[float] = Field( - default=None, ge=0.0, le=2.0, description="Controls randomness. Lower for less random, higher for more." - ) - top_p: Optional[float] = Field(default=None, ge=0.0, le=1.0, description="Nucleus sampling parameter.") - top_k: Optional[int] = Field(default=None, ge=0, description="Top-k sampling parameter.") - max_output_tokens: Optional[int] = Field(default=None, ge=1, description="Maximum number of tokens to generate.") - candidate_count: Optional[int] = Field(default=None, ge=1, description="Number of candidate responses to generate.") - response_mime_type: Optional[str] = Field(default=None, description="Response MIME type.") - stop_sequences: Optional[List[str]] = Field(default=None, description="Stop sequences.") - - model_info_override: Optional[ModelInfo] = Field( - default=None, description="Optional override for model capabilities and information." - ) - - -class AnthropicVertexAIClientConfiguration(VertexAIClientConfiguration, AnthropicClientConfigurationConfigModel): - pass - - -__all__ = ["GeminiVertexAIClientConfiguration", "AnthropicVertexAIClientConfiguration"] diff --git a/python/src/kagent/models/vertexai/types.py b/python/src/kagent/models/vertexai/types.py deleted file mode 100644 index fe872cff0..000000000 --- a/python/src/kagent/models/vertexai/types.py +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Dict - -from autogen_core.models import ModelInfo - -# Common types used across the vertexai module -ModelInfoDict = Dict[str, ModelInfo] diff --git a/python/src/kagent/py.typed b/python/src/kagent/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/src/kagent/tool_servers/__init__.py b/python/src/kagent/tool_servers/__init__.py deleted file mode 100644 index 998ccd1b1..000000000 --- a/python/src/kagent/tool_servers/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -from ._ssemcptoolserver import SseMcpToolServer, SseMcpToolServerConfig -from ._stdiomcptoolserver import StdioMcpToolServer, StdioMcpToolServerConfig -from ._streamable_http_mcp_tool_server import StreamableHttpMcpToolServer, StreamableHttpMcpToolServerConfig -from ._tool_server import ToolServer - -__all__ = [ - "SseMcpToolServer", - "SseMcpToolServerConfig", - "StdioMcpToolServer", - "StdioMcpToolServerConfig", - "StreamableHttpMcpToolServer", - "StreamableHttpMcpToolServerConfig", - "ToolServer", -] diff --git a/python/src/kagent/tool_servers/_ssemcptoolserver.py b/python/src/kagent/tool_servers/_ssemcptoolserver.py deleted file mode 100644 index b47fb3e3d..000000000 --- a/python/src/kagent/tool_servers/_ssemcptoolserver.py +++ /dev/null @@ -1,34 +0,0 @@ -from autogen_core import Component -from autogen_ext.tools.mcp._config import SseServerParams -from autogen_ext.tools.mcp._factory import mcp_server_tools -from loguru import logger - -from ._tool_server import ToolServer - - -class SseMcpToolServerConfig(SseServerParams): - pass - - -class SseMcpToolServer(ToolServer, Component[SseMcpToolServerConfig]): - component_config_schema = SseMcpToolServerConfig - component_type = "tool_server" - component_provider_override = "kagent.tool_servers.SseMcpToolServer" - - def __init__(self, config: SseMcpToolServerConfig): - self.config = config - - async def discover_tools(self) -> list[Component]: - try: - logger.debug(f"Discovering tools from sse server: {self.config}") - tools = await mcp_server_tools(self.config) - return tools - except Exception as e: - raise Exception(f"Failed to discover tools: {e}") from e - - def _to_config(self) -> SseMcpToolServerConfig: - return SseMcpToolServerConfig(**self.config.model_dump()) - - @classmethod - def _from_config(cls, config: SseMcpToolServerConfig): - return cls(config) diff --git a/python/src/kagent/tool_servers/_stdiomcptoolserver.py b/python/src/kagent/tool_servers/_stdiomcptoolserver.py deleted file mode 100644 index 9ad06583c..000000000 --- a/python/src/kagent/tool_servers/_stdiomcptoolserver.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Self - -from autogen_core import Component -from autogen_ext.tools.mcp._config import StdioServerParams -from autogen_ext.tools.mcp._factory import mcp_server_tools -from loguru import logger - -from ._tool_server import ToolServer - - -class StdioMcpToolServerConfig(StdioServerParams): - pass - - -class StdioMcpToolServer(ToolServer, Component[StdioMcpToolServerConfig]): - component_config_schema = StdioMcpToolServerConfig - component_type = "tool_server" - component_provider_override = "kagent.tool_servers.StdioMcpToolServer" - - def __init__(self, config: StdioMcpToolServerConfig): - self.config: StdioMcpToolServerConfig = config - - async def discover_tools(self) -> list[Component]: - try: - logger.debug(f"Discovering tools from stdio server: {self.config}") - tools = await mcp_server_tools(self.config) - return tools - except Exception as e: - raise Exception(f"Failed to discover tools: {e}") from e - - def _to_config(self) -> StdioMcpToolServerConfig: - return StdioMcpToolServerConfig(**self.config.model_dump()) - - @classmethod - def _from_config(cls, config: StdioMcpToolServerConfig) -> Self: - return cls(config) diff --git a/python/src/kagent/tool_servers/_streamable_http_mcp_tool_server.py b/python/src/kagent/tool_servers/_streamable_http_mcp_tool_server.py deleted file mode 100644 index 95d669912..000000000 --- a/python/src/kagent/tool_servers/_streamable_http_mcp_tool_server.py +++ /dev/null @@ -1,34 +0,0 @@ -from autogen_core import Component -from autogen_ext.tools.mcp._config import StreamableHttpServerParams -from autogen_ext.tools.mcp._factory import mcp_server_tools -from loguru import logger - -from ._tool_server import ToolServer - - -class StreamableHttpMcpToolServerConfig(StreamableHttpServerParams): - pass - - -class StreamableHttpMcpToolServer(ToolServer, Component[StreamableHttpMcpToolServerConfig]): - component_config_schema = StreamableHttpMcpToolServerConfig - component_type = "tool_server" - component_provider_override = "kagent.tool_servers.StreamableHttpMcpToolServer" - - def __init__(self, config: StreamableHttpMcpToolServerConfig): - self.config = config - - async def discover_tools(self) -> list[Component]: - try: - logger.debug(f"Discovering tools from streamable http server: {self.config}") - tools = await mcp_server_tools(self.config) - return tools - except Exception as e: - raise Exception(f"Failed to discover tools: {e}") from e - - def _to_config(self) -> StreamableHttpMcpToolServerConfig: - return StreamableHttpMcpToolServerConfig(**self.config.model_dump()) - - @classmethod - def _from_config(cls, config: StreamableHttpMcpToolServerConfig): - return cls(config) diff --git a/python/src/kagent/tool_servers/_tool_server.py b/python/src/kagent/tool_servers/_tool_server.py deleted file mode 100644 index f9040a2c3..000000000 --- a/python/src/kagent/tool_servers/_tool_server.py +++ /dev/null @@ -1,13 +0,0 @@ -from abc import ABC -from typing import Protocol - -from autogen_core import Component, ComponentBase -from pydantic import BaseModel - - -class ToolDiscovery(Protocol): - async def discover_tools(self) -> list[Component]: ... - - -class ToolServer(ABC, ToolDiscovery, ComponentBase[BaseModel]): - component_type = "tool_server" diff --git a/python/tests/__init__.py b/python/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/tests/_test_result.py b/python/tests/_test_result.py deleted file mode 100644 index 0f26d3eed..000000000 --- a/python/tests/_test_result.py +++ /dev/null @@ -1,129 +0,0 @@ -import json -import logging -from datetime import datetime -from pathlib import Path -from typing import Any, Dict - -logger = logging.getLogger(__name__) - -REPORT_DIR = Path(__file__).parent / "test_results" - - -class GenerateResourceTestResult: - """Class to hold test results for reporting.""" - - results = [] - - @classmethod - def add_result( - cls, - test_name: str, - input_description: str, - resource_type: str, - expected_yaml: Dict[str, Any], - actual_yaml: Dict[str, Any], - similarity_score: float, - diff_details: str, - ): - """Add a result to the results list.""" - cls.results.append( - { - "test_name": test_name, - "input_description": input_description, - "resource_type": resource_type, - "expected_yaml": expected_yaml, - "actual_yaml": actual_yaml, - "similarity_score": similarity_score, - "diff_details": diff_details, - "timestamp": datetime.now().isoformat(), - } - ) - - @classmethod - def generate_report(cls, similarity_threshold: float): - """Generate a console-based report of test results.""" - if not cls.results: - logger.info("No test results to report.") - return - - # Calculate overall metrics - total_tests = len(cls.results) - avg_score = sum(r["similarity_score"] for r in cls.results) / total_tests if total_tests > 0 else 0 - - # Prepare a dictionary for JSON export - report_data = { - "timestamp": datetime.now().isoformat(), - "total_tests": total_tests, - "average_similarity": avg_score, - "results": cls.results, - } - - # Ensure test_results directory exists - REPORT_DIR.mkdir(exist_ok=True, parents=True) - - # Generate JSON report - timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") - json_path = REPORT_DIR / f"test_results_{timestamp}.json" - with open(json_path, "w") as f: - json.dump(report_data, f, indent=2) - - logger.info("\n" + "=" * 80) - logger.info("GENERATIVE RESOURCE TOOL - TEST RESULTS") - logger.info("=" * 80) - logger.info(f"Total Tests: {total_tests}") - logger.info(f"Average Similarity Score: {avg_score:.2%}") - logger.info("=" * 80) - - # Group results by resource type - results_by_type = {} - for result in cls.results: - resource_type = result["resource_type"] - if resource_type not in results_by_type: - results_by_type[resource_type] = [] - results_by_type[resource_type].append(result) - - for resource_type, type_results in results_by_type.items(): - logger.info(f"\nResource Type: {resource_type}") - logger.info("-" * 40) - - # Sort results by similarity score (descending) - sorted_results = sorted(type_results, key=lambda x: x["similarity_score"], reverse=True) - - for result in sorted_results: - # Determine color/status based on similarity score - if result["similarity_score"] >= 0.9: - status = "✓" # Checkmark - elif result["similarity_score"] >= 0.7: - status = "!" # Warning - else: - status = "✗" # Cross - - logger.info(f"{status} {result['test_name']}: {result['similarity_score']:.2%} match") - logger.info(f" Input: {result['input_description']}") - - if result["similarity_score"] < 0.9: - logger.info(" Differences:") - diff_lines = result["diff_details"].split("\n") - for line in diff_lines[:5]: # Limit to first 5 lines of diff - logger.info(f" {line}") - if len(diff_lines) > 5: - logger.info(" ...") - - # Identify and highlight failing tests - failing_tests = [r for r in cls.results if r["similarity_score"] < similarity_threshold] - if failing_tests: - logger.info("\n" + "=" * 80) - logger.info("FAILING TESTS") - logger.info("=" * 80) - for test in failing_tests: - logger.info(f"Test: {test['test_name']}") - logger.info(f"Resource Type: {test['resource_type']}") - logger.info(f"Similarity Score: {test['similarity_score']:.2%}") - logger.info(f"Input: {test['input_description']}") - logger.info("Detailed Differences:") - logger.info(test["diff_details"]) - logger.info("-" * 40) - - logger.info(f"\nFull report saved to: {json_path}") - - return report_data diff --git a/python/tests/_yaml_comparer.py b/python/tests/_yaml_comparer.py deleted file mode 100644 index 94495f62f..000000000 --- a/python/tests/_yaml_comparer.py +++ /dev/null @@ -1,124 +0,0 @@ -import difflib -from typing import Any, Dict, Tuple - -import yaml - - -class YAMLComparer: - """Class to compare YAML structures and compute similarity.""" - - @staticmethod - def _normalize_structure(yaml_dict: Dict[str, Any]) -> Dict[str, Any]: - """Normalize YAML structure to make comparison more meaningful.""" - if not isinstance(yaml_dict, dict): - return yaml_dict - - result = {} - - # Process each key-value pair - for key, value in yaml_dict.items(): - # Skip irrelevant metadata - if key == "status" or key in ["creationTimestamp", "generation"]: - continue - - # Process nested dictionaries - if isinstance(value, dict): - result[key] = YAMLComparer._normalize_structure(value) - # Process lists - elif isinstance(value, list): - # For simple lists of primitives, sort them - if all(not isinstance(item, (dict, list)) for item in value): - result[key] = sorted(value) - else: - # For lists of dictionaries, normalize each dictionary - normalized_list = [ - YAMLComparer._normalize_structure(item) if isinstance(item, dict) else item for item in value - ] - result[key] = normalized_list - else: - result[key] = value - - return result - - @staticmethod - def _convert_to_flat_dict(yaml_dict: Dict[str, Any], parent_key: str = "") -> Dict[str, Any]: - """Convert a nested YAML dict to a flat dictionary with dot-notation keys.""" - items = [] - for key, value in yaml_dict.items(): - new_key = f"{parent_key}.{key}" if parent_key else key - - if isinstance(value, dict): - items.extend(YAMLComparer._convert_to_flat_dict(value, new_key).items()) - elif isinstance(value, list): - if all(isinstance(item, dict) for item in value): - for i, item in enumerate(value): - list_key = f"{new_key}[{i}]" - items.extend(YAMLComparer._convert_to_flat_dict(item, list_key).items()) - else: - items.append((new_key, value)) - else: - items.append((new_key, value)) - - return dict(items) - - @staticmethod - def compute_similarity(expected: Dict[str, Any], actual: Dict[str, Any]) -> Tuple[float, str]: - """Compute similarity between two YAML structures and return a diff.""" - # Normalize structures - normalized_expected = YAMLComparer._normalize_structure(expected) - normalized_actual = YAMLComparer._normalize_structure(actual) - - # Convert to flat dictionaries - flat_expected = YAMLComparer._convert_to_flat_dict(normalized_expected) - flat_actual = YAMLComparer._convert_to_flat_dict(normalized_actual) - - # Get all unique keys - all_keys = set(flat_expected.keys()).union(set(flat_actual.keys())) - total_keys = len(all_keys) - - if total_keys == 0: - return 1.0, "Both YAMLs are empty" - - # Count matching keys - matching_keys = 0 - diff_details = [] - - for key in sorted(all_keys): - if key in flat_expected and key in flat_actual: - expected_value = flat_expected[key] - actual_value = flat_actual[key] - - if expected_value == actual_value: - matching_keys += 1 - else: - diff_details.append(f"Value mismatch for key '{key}':") - diff_details.append(f" Expected: {expected_value}") - diff_details.append(f" Actual: {actual_value}") - elif key in flat_expected: - diff_details.append(f"Key '{key}' missing in actual YAML") - diff_details.append(f" Expected value: {flat_expected[key]}") - else: # key in flat_actual - diff_details.append(f"Key '{key}' unexpected in actual YAML") - diff_details.append(f" Actual value: {flat_actual[key]}") - - # Calculate similarity score - similarity_score = matching_keys / total_keys - - # Generate diff using difflib for a more visual representation - expected_yaml_str = yaml.dump(normalized_expected, sort_keys=False) - actual_yaml_str = yaml.dump(normalized_actual, sort_keys=False) - - diff = difflib.unified_diff( - expected_yaml_str.splitlines(keepends=True), - actual_yaml_str.splitlines(keepends=True), - fromfile="expected", - tofile="actual", - ) - - diff_str = "".join(diff) - - # Combine the text diff with our detailed analysis - detailed_diff = "\n".join(diff_details) - full_diff = f"Similarity Score: {similarity_score:.2%}\n\nDetailed Differences:\n{detailed_diff}\n\nUnified Diff:\n{diff_str}" - - return similarity_score, full_diff diff --git a/python/tests/test_generate_resource.py b/python/tests/test_generate_resource.py deleted file mode 100644 index bc6a0c067..000000000 --- a/python/tests/test_generate_resource.py +++ /dev/null @@ -1,145 +0,0 @@ -import logging -import os -import warnings -from pathlib import Path - -import pytest -import yaml -from autogen_core import CancellationToken -from autogen_ext.models.openai import OpenAIChatCompletionClient - -from kagent.tools.k8s import GenerateResourceTool, GenerateResourceToolConfig, GenerateResourceToolInput, ResourceTypes - -from ._test_result import GenerateResourceTestResult -from ._yaml_comparer import YAMLComparer - -logger = logging.getLogger(__name__) - -TEST_CASES = str(Path(__file__).parent / "testcases") -SIMILARITY_THRESHOLD = 0.6 - - -def load_test_cases(file_path): - """Load test cases from a YAML file.""" - with open(file_path, "r") as f: - data = yaml.safe_load(f) - - test_cases = [] - - if "test_cases" in data: - # Get the resource type from metadata - if "metadata" not in data or "resource_type" not in data["metadata"]: - raise ValueError(f"Missing 'resource_type' in metadata section for file: {file_path}") - - resource_type = data["metadata"]["resource_type"] - - for test_case in data["test_cases"]: - # Add the resource type to each test case - test_case["resource_type"] = resource_type - test_cases.append(test_case) - - return test_cases - - raise ValueError(f"Unknown test case format in file: {file_path}") - - -def get_resource_type(resource_string: str): - for resource_type in ResourceTypes: - if resource_type.value == resource_string: - return resource_type - raise ValueError(f"No matching ResourceType found for {resource_string}") - - -@pytest.fixture(scope="session") -def tool_config(): - """Test fixture to create a configuration for the GenerateResourceTool.""" - # First try to get the key from environment - api_key = os.environ.get("OPENAI_API_KEY") - - if not api_key: - logger.warning("No OpenAI API key found. Tests will be skipped.") - pytest.skip("No OpenAI API key found") - - client = OpenAIChatCompletionClient(api_key=api_key, model="gpt-4o-mini", temperature=0.1) - - return GenerateResourceToolConfig(model_client=client.dump_component()) - - -# Load all test cases from the the TEST_CASES folder -test_data = [] - -for file in Path(TEST_CASES).rglob("*.yaml"): - logger.info(f"Loading test cases from: {file}") - test_data.extend([(case, file) for case in load_test_cases(file)]) - - -@pytest.fixture(scope="session", autouse=True) -def report_generation(): - """Generate test report at the end of the test session.""" - yield - GenerateResourceTestResult.generate_report(SIMILARITY_THRESHOLD) - - -@pytest.mark.asyncio -@pytest.mark.parametrize("test_case,source_file", test_data) -async def test_generate_resource(test_case, source_file, tool_config) -> None: - """Test the GenerateResourceTool with various inputs.""" - # Check if we're in the no_fail mode -- this is so we can run all tests and just print warnings - no_fail = os.environ.get("NO_FAIL", "").lower() in ["1", "true", "yes"] - - test_name = test_case["name"] - input_description = test_case["input"] - resource_type_str = test_case.get("resource_type") - expected_output = test_case["expected_output"] - - tool = GenerateResourceTool(tool_config) - result = await tool.run( - args=GenerateResourceToolInput( - resource_description=input_description, resource_type=get_resource_type(resource_type_str) - ), - cancellation_token=CancellationToken(), - ) - - try: - actual_output = yaml.safe_load(result) - similarity_score, diff_details = YAMLComparer.compute_similarity(expected_output, actual_output) - - GenerateResourceTestResult.add_result( - test_name=test_name, - input_description=input_description, - resource_type=resource_type_str, - expected_yaml=expected_output, - actual_yaml=actual_output, - similarity_score=similarity_score, - diff_details=diff_details, - ) - - # Print the summary - logger.info(f"Similarity Score: {similarity_score:.2%}") - - if not no_fail: - assert similarity_score >= SIMILARITY_THRESHOLD, ( - f"Similarity score {similarity_score:.2%} is below threshold {SIMILARITY_THRESHOLD:.2%}\n{diff_details}" - ) - else: - if similarity_score < SIMILARITY_THRESHOLD: - logger.warning( - f"\033[93mWARNING: Test '{test_name}' has low similarity score: {similarity_score:.2%}\033[0m" - ) - - except Exception as e: - GenerateResourceTestResult.add_result( - test_name=test_name, - input_description=input_description, - resource_type=resource_type_str, - expected_yaml=expected_output, - actual_yaml={"error": str(e), "raw_output": result}, - similarity_score=0.0, - diff_details=f"Failed to parse output: {str(e)}", - ) - - # but don't fail - if no_fail: - logger.warning(f"\033[93mWARNING: Test '{test_name}' encountered an error: {str(e)}\033[0m") - else: - pytest.fail(f"Failed to parse result: {str(e)}\nRaw output: {result}") diff --git a/python/tests/test_load_agents.py b/python/tests/test_load_agents.py deleted file mode 100644 index bf4bdafca..000000000 --- a/python/tests/test_load_agents.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -import logging -import os -from pathlib import Path - -import pytest -from autogen_agentchat.base import Team - -logger = logging.getLogger(__name__) - - -# Set up the OpenAI API key as a fixture -@pytest.fixture(scope="module") -def setup_env(): - # Required this be set, but it's unused - os.environ["OPENAI_API_KEY"] = "fake" - - -# Get all agent files -def get_agent_files(): - base_path = Path(__file__).parent.parent / "agents" - files = list(base_path.glob("*.json")) - return files - - -# Create a fixture for each agent file -@pytest.fixture(params=get_agent_files()) -def agent_file(request): - return request.param - - -# Test that loads each agent file individually -@pytest.mark.skip(reason="Skipping agent loading tests") -def test_load_agent(setup_env, agent_file): - with open(agent_file, "r") as f: - agent_config = json.load(f) - Team.load_component(agent_config) - logger.info(f"Successfully loaded agent from {agent_file.name}") - - -# Alternatively, create named fixtures for each agent file -# This allows targeting specific agents in tests if needed -agent_files = get_agent_files() -for file in agent_files: - fixture_name = f"agent_{file.stem}" - globals()[fixture_name] = pytest.fixture(lambda file=file: file) diff --git a/python/tests/test_yaml_comparer.py b/python/tests/test_yaml_comparer.py deleted file mode 100644 index f8acb7205..000000000 --- a/python/tests/test_yaml_comparer.py +++ /dev/null @@ -1,60 +0,0 @@ -import pytest - -from ._yaml_comparer import YAMLComparer - - -def test_normalize_structure_removes_status_and_metadata(): - input_yaml = { - "metadata": {"creationTimestamp": "2023-01-01", "name": "foo"}, - "status": {"phase": "Running"}, - "spec": {"replicas": 3}, - } - expected = {"metadata": {"name": "foo"}, "spec": {"replicas": 3}} - result = YAMLComparer._normalize_structure(input_yaml) - assert result == expected - - -def test_normalize_structure_sorts_simple_lists(): - input_yaml = {"list": [3, 1, 2]} - expected = {"list": [1, 2, 3]} - result = YAMLComparer._normalize_structure(input_yaml) - assert result == expected - - -def test_convert_to_flat_dict(): - input_yaml = {"a": {"b": 1, "c": [2, 3]}, "d": 4} - expected = {"a.b": 1, "a.c": [2, 3], "d": 4} - result = YAMLComparer._convert_to_flat_dict(input_yaml) - assert result == expected - - -def test_compute_similarity_identical(): - yaml1 = {"a": 1, "b": {"c": 2}} - yaml2 = {"a": 1, "b": {"c": 2}} - score, diff = YAMLComparer.compute_similarity(yaml1, yaml2) - assert score == 1.0 - assert "Similarity Score: 100.00%" in diff - - -def test_compute_similarity_with_difference(): - yaml1 = {"a": 1, "b": {"c": 2}} - yaml2 = {"a": 1, "b": {"c": 3}} - score, diff = YAMLComparer.compute_similarity(yaml1, yaml2) - assert score < 1.0 - assert "Value mismatch for key 'b.c':" in diff - - -def test_compute_similarity_missing_key(): - yaml1 = {"a": 1, "b": 2} - yaml2 = {"a": 1} - score, diff = YAMLComparer.compute_similarity(yaml1, yaml2) - assert score < 1.0 - assert "Key 'b' missing in actual YAML" in diff - - -def test_compute_similarity_unexpected_key(): - yaml1 = {"a": 1} - yaml2 = {"a": 1, "b": 2} - score, diff = YAMLComparer.compute_similarity(yaml1, yaml2) - assert score < 1.0 - assert "Key 'b' unexpected in actual YAML" in diff diff --git a/python/tests/testcases/auth_policy_tests.yaml b/python/tests/testcases/auth_policy_tests.yaml deleted file mode 100644 index 4ac6910ee..000000000 --- a/python/tests/testcases/auth_policy_tests.yaml +++ /dev/null @@ -1,127 +0,0 @@ -version: "1.0" -metadata: - description: "Authorization Policy Test Cases" - resource_type: "auth_policy" - -test_cases: - - name: deny_post_8080 - input: "Deny requests with POST method on port 8080 on all workloads in the foo namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: policy - namespace: foo - spec: - action: DENY - rules: - - to: - - operation: - methods: - - POST - ports: - - "8080" - - - name: allow_get_3000 - input: "Allow GET requests on port 3000 for service-a in the bar namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: service-a - namespace: bar - spec: - action: ALLOW - rules: - - to: - - operation: - methods: - - GET - ports: - - "3000" - - - name: allow_nothing - input: "Create an allow nothing policy in the foo namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: allow-nothing - namespace: foo - spec: {} - - - name: allow_nothing_1 - input: "Deny all requests between the workloads in the foo namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: allow-nothing - namespace: foo - spec: {} - - - name: allow_all - input: "Allow all requests in the default namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: allow-all - namespace: default - spec: - rules: - - {} - - - name: deny_from_namespace - input: "Deny requests to customers from foo namespace" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: policy - namespace: default - spec: - selector: - matchLabels: - app: customers - action: DENY - rules: - - from: - - source: - namespaces: - - foo - - - name: mtls_strict - input: "Enforce mutual TLS (mTLS) communication in namespace bar and deny plaintext communication" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: policy - namespace: bar - spec: - action: DENY - rules: - - from: - - source: - notPrincipals: - - "*" - - - name: allow_with_headers - input: "Allow requests to the payment service only if the request header X-API-KEY is set to a specific value abc123" - expected_output: - apiVersion: security.istio.io/v1 - kind: AuthorizationPolicy - metadata: - name: policy - namespace: default - spec: - selector: - matchLabels: - app: payment - action: ALLOW - rules: - - when: - - key: request.headers[X-API-KEY] - values: - - abc123 \ No newline at end of file diff --git a/python/uv.lock b/python/uv.lock index 687011cda..3af851310 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -6,28 +6,36 @@ resolution-markers = [ "python_full_version < '3.13'", ] +[manifest] +members = [ + "kagent", + "kagent-adk", +] + +[manifest.dependency-groups] +dev = [ + { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", specifier = ">=0.25.3" }, + { name = "ruff", specifier = ">=0.11.5" }, +] + [[package]] name = "a2a-sdk" -version = "0.2.12" +version = "0.2.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastapi" }, - { name = "google-api-core" }, - { name = "grpcio" }, - { name = "grpcio-reflection" }, - { name = "grpcio-tools" }, { name = "httpx" }, { name = "httpx-sse" }, { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, - { name = "protobuf" }, { name = "pydantic" }, { name = "sse-starlette" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/e0/002262293c309373b26b9278680aaaaddbfa2eeaa0b890bd75f3518f659a/a2a_sdk-0.2.12.tar.gz", hash = "sha256:a57273f5d23462683ac5437550a09e90a700e7a02f0bb5e5f25c6e4029c67e9e", size = 165979, upload-time = "2025-07-14T16:25:25.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/3b/8fd1e3fe28606712c203b968a6fe2c8e7944b6df9e65c28976c66c19286c/a2a_sdk-0.2.16.tar.gz", hash = "sha256:d9638c71674183f32fe12f8865015e91a563a90a3aa9ed43020f1b23164862b3", size = 179006, upload-time = "2025-07-21T19:51:14.107Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/f7/5d1f852d2e9a91cfe482a83c2443b3d7786d778319cd5c1f484f49e6f6ab/a2a_sdk-0.2.12-py3-none-any.whl", hash = "sha256:0e83884f94c511302dcb2a5eb50965c203eacb396e4160840245fe9efebeb93d", size = 96737, upload-time = "2025-07-14T16:25:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/16bfbc2ef0ef037c5860ef3b13e482aeb1860b9643bf833ed522c995f639/a2a_sdk-0.2.16-py3-none-any.whl", hash = "sha256:54782eab3d0ad0d5842bfa07ff78d338ea836f1259ece51a825c53193c67c7d0", size = 103090, upload-time = "2025-07-21T19:51:12.613Z" }, ] [[package]] @@ -112,20 +120,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "alembic" -version = "1.16.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mako" }, - { name = "sqlalchemy" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9c/35/116797ff14635e496bbda0c168987f5326a6555b09312e9b817e360d1f56/alembic-1.16.2.tar.gz", hash = "sha256:e53c38ff88dadb92eb22f8b150708367db731d58ad7e9d417c9168ab516cbed8", size = 1963563, upload-time = "2025-06-16T18:05:08.566Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/e2/88e425adac5ad887a087c38d04fe2030010572a3e0e627f8a6e8c33eeda8/alembic-1.16.2-py3-none-any.whl", hash = "sha256:5f42e9bd0afdbd1d5e3ad856c01754530367debdebf21ed6894e34af52b3bb03", size = 242717, upload-time = "2025-06-16T18:05:10.27Z" }, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -137,7 +131,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.57.1" +version = "0.59.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -148,9 +142,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/75/6261a1a8d92aed47e27d2fcfb3a411af73b1435e6ae1186da02b760565d0/anthropic-0.57.1.tar.gz", hash = "sha256:7815dd92245a70d21f65f356f33fc80c5072eada87fb49437767ea2918b2c4b0", size = 423775, upload-time = "2025-07-03T16:57:35.932Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/52daff015f5a1f24eec891b3041f5f816712fea8b5113dc76638bcbc23d8/anthropic-0.59.0.tar.gz", hash = "sha256:d710d1ef0547ebbb64b03f219e44ba078e83fc83752b96a9b22e9726b523fd8f", size = 425679, upload-time = "2025-07-23T16:23:16.901Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/cf/ca0ba77805aec6171629a8b665c7dc224dab374539c3d27005b5d8c100a0/anthropic-0.57.1-py3-none-any.whl", hash = "sha256:33afc1f395af207d07ff1bffc0a3d1caac53c371793792569c5d2f09283ea306", size = 292779, upload-time = "2025-07-03T16:57:34.636Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b1/03f680393eac04afd8f2be44ee0e39e033c40faf43dbc1c11764b07a2687/anthropic-0.59.0-py3-none-any.whl", hash = "sha256:cbc8b3dccef66ad6435c4fa1d317e5ebb092399a4b88b33a09dc4bf3944c3183", size = 293057, upload-time = "2025-07-23T16:23:14.934Z" }, ] [package.optional-dependencies] @@ -172,24 +166,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, ] -[[package]] -name = "appnope" -version = "0.1.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, -] - -[[package]] -name = "asttokens" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, -] - [[package]] name = "attrs" version = "25.3.0" @@ -201,206 +177,14 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a2/9d/b1e08d36899c12c8b894a44a5583ee157789f26fc4b176f8e4b6217b56e1/authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210", size = 158371, upload-time = "2025-05-23T00:21:45.011Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, -] - -[[package]] -name = "autogen-agentchat" -version = "0.6.1" -source = { git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-agentchat&rev=c5b893d3f814185c326c8ff95767d2375d95818d#c5b893d3f814185c326c8ff95767d2375d95818d" } -dependencies = [ - { name = "autogen-core" }, -] - -[[package]] -name = "autogen-core" -version = "0.6.1" -source = { git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-core&rev=c5b893d3f814185c326c8ff95767d2375d95818d#c5b893d3f814185c326c8ff95767d2375d95818d" } -dependencies = [ - { name = "jsonref" }, - { name = "opentelemetry-api" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "pillow" }, - { name = "protobuf" }, - { name = "pydantic" }, - { name = "typing-extensions" }, -] - -[[package]] -name = "autogen-ext" -version = "0.6.1" -source = { git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-ext&rev=c5b893d3f814185c326c8ff95767d2375d95818d#c5b893d3f814185c326c8ff95767d2375d95818d" } -dependencies = [ - { name = "autogen-core" }, -] - -[package.optional-dependencies] -anthropic = [ - { name = "anthropic" }, -] -azure = [ - { name = "azure-ai-inference" }, - { name = "azure-ai-projects" }, - { name = "azure-core" }, - { name = "azure-identity" }, - { name = "azure-search-documents" }, -] -mcp = [ - { name = "mcp" }, -] -ollama = [ - { name = "ollama" }, - { name = "tiktoken" }, -] -openai = [ - { name = "aiofiles" }, - { name = "openai" }, - { name = "tiktoken" }, -] - -[[package]] -name = "azure-ai-agents" -version = "1.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "isodate" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d2/e4/aad4e2288134bc2efb44fbec3a219813b43bb07b206d6cb3a227ac08735a/azure_ai_agents-1.0.2.tar.gz", hash = "sha256:b9d349f42f5b944578b9e9428d5508935071cffa8a4cbe376fff62ecf8d07c23", size = 300772, upload-time = "2025-07-01T19:38:03.504Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/82/c8/3c511d1648eaf0fed577db2cbf67f39720a165921e7152b4c731c35b1943/azure_ai_agents-1.0.2-py3-none-any.whl", hash = "sha256:6d5a541b4cb91a3a3341b2f2b5063be898fb4d840d8a9193fe643765c7910d94", size = 189807, upload-time = "2025-07-01T19:38:05.077Z" }, -] - -[[package]] -name = "azure-ai-inference" -version = "1.0.0b9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "isodate" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4e/6a/ed85592e5c64e08c291992f58b1a94dab6869f28fb0f40fd753dced73ba6/azure_ai_inference-1.0.0b9.tar.gz", hash = "sha256:1feb496bd84b01ee2691befc04358fa25d7c344d8288e99364438859ad7cd5a4", size = 182408, upload-time = "2025-02-15T00:37:28.464Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/0f/27520da74769db6e58327d96c98e7b9a07ce686dff582c9a5ec60b03f9dd/azure_ai_inference-1.0.0b9-py3-none-any.whl", hash = "sha256:49823732e674092dad83bb8b0d1b65aa73111fab924d61349eb2a8cdc0493990", size = 124885, upload-time = "2025-02-15T00:37:29.964Z" }, -] - -[[package]] -name = "azure-ai-projects" -version = "1.0.0b12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-ai-agents" }, - { name = "azure-core" }, - { name = "azure-storage-blob" }, - { name = "isodate" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a5/57/9a89c1978ec9ce29a3be454b83b66885982261762d7a436cad73c47c9225/azure_ai_projects-1.0.0b12.tar.gz", hash = "sha256:1a3784e4be6af3b0fc76e9e4a64158a38f6679fe3a1f8b9c33f12bc8914ae36c", size = 144358, upload-time = "2025-06-27T04:12:48.334Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/e4/50cd2c3bd5ab745e85a4a1bd591bf4343d6e3470580f1eadceed55fd57c0/azure_ai_projects-1.0.0b12-py3-none-any.whl", hash = "sha256:4e3d3ef275f7409ea8030e474626968848055d4b3717ff7ef03681da809c096f", size = 129783, upload-time = "2025-06-27T04:12:49.837Z" }, -] - -[[package]] -name = "azure-common" -version = "1.1.28" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", size = 20914, upload-time = "2022-02-03T19:39:44.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad", size = 14462, upload-time = "2022-02-03T19:39:42.417Z" }, -] - -[[package]] -name = "azure-core" -version = "1.35.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, - { name = "six" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ce/89/f53968635b1b2e53e4aad2dd641488929fef4ca9dfb0b97927fa7697ddf3/azure_core-1.35.0.tar.gz", hash = "sha256:c0be528489485e9ede59b6971eb63c1eaacf83ef53001bfe3904e475e972be5c", size = 339689, upload-time = "2025-07-03T00:55:23.496Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", hash = "sha256:8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", size = 210708, upload-time = "2025-07-03T00:55:25.238Z" }, -] - -[[package]] -name = "azure-identity" -version = "1.23.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "cryptography" }, - { name = "msal" }, - { name = "msal-extensions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/41/52/458c1be17a5d3796570ae2ed3c6b7b55b134b22d5ef8132b4f97046a9051/azure_identity-1.23.0.tar.gz", hash = "sha256:d9cdcad39adb49d4bb2953a217f62aec1f65bbb3c63c9076da2be2a47e53dde4", size = 265280, upload-time = "2025-05-14T00:18:30.408Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/16/a51d47780f41e4b87bb2d454df6aea90a44a346e918ac189d3700f3d728d/azure_identity-1.23.0-py3-none-any.whl", hash = "sha256:dbbeb64b8e5eaa81c44c565f264b519ff2de7ff0e02271c49f3cb492762a50b0", size = 186097, upload-time = "2025-05-14T00:18:32.734Z" }, -] - -[[package]] -name = "azure-search-documents" -version = "11.5.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-common" }, - { name = "azure-core" }, - { name = "isodate" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fd/11/9ecde2bd9e6c00cc0e3f312ab096a33d333f8ba40c847f01f94d524895fe/azure_search_documents-11.5.3.tar.gz", hash = "sha256:6931149ec0db90485d78648407f18ea4271420473c7cb646bf87790374439989", size = 300353, upload-time = "2025-06-25T16:48:58.924Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/f5/0f6b52567cbb33f1efba13060514ed7088a86de84d74b77cda17d278bcd9/azure_search_documents-11.5.3-py3-none-any.whl", hash = "sha256:110617751c6c8bd50b1f0af2b00a478bd4fbaf4e2f0387e3454c26ec3eb433d6", size = 298772, upload-time = "2025-06-25T16:49:00.764Z" }, -] - -[[package]] -name = "azure-storage-blob" -version = "12.25.1" +version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "azure-core" }, { name = "cryptography" }, - { name = "isodate" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/f3/f764536c25cc3829d36857167f03933ce9aee2262293179075439f3cd3ad/azure_storage_blob-12.25.1.tar.gz", hash = "sha256:4f294ddc9bc47909ac66b8934bd26b50d2000278b10ad82cc109764fdc6e0e3b", size = 570541, upload-time = "2025-03-27T17:13:05.424Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/a1/d8d1c6f8bc922c0b87ae0d933a8ed57be1bef6970894ed79c2852a153cd3/authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd", size = 159988, upload-time = "2025-07-20T07:38:42.834Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/33/085d9352d416e617993821b9d9488222fbb559bc15c3641d6cbd6d16d236/azure_storage_blob-12.25.1-py3-none-any.whl", hash = "sha256:1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167", size = 406990, upload-time = "2025-03-27T17:13:06.879Z" }, -] - -[[package]] -name = "beautifulsoup4" -version = "4.13.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "soupsieve" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, -] - -[[package]] -name = "bs4" -version = "0.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beautifulsoup4" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698, upload-time = "2024-01-17T18:15:47.371Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189, upload-time = "2024-01-17T18:15:48.613Z" }, + { url = "https://files.pythonhosted.org/packages/f9/58/cc6a08053f822f98f334d38a27687b69c6655fb05cd74a7a5e70a2aeed95/authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e", size = 239299, upload-time = "2025-07-20T07:38:39.259Z" }, ] [[package]] @@ -414,11 +198,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.6.15" +version = "2025.7.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, + { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, ] [[package]] @@ -519,30 +303,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] -[[package]] -name = "coloredlogs" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "humanfriendly" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, -] - -[[package]] -name = "comm" -version = "0.2.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, -] - [[package]] name = "cryptography" version = "45.0.5" @@ -578,41 +338,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" }, ] -[[package]] -name = "debugpy" -version = "1.8.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/75/087fe07d40f490a78782ff3b0a30e3968936854105487decdb33446d4b0e/debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322", size = 1641444, upload-time = "2025-04-10T19:46:10.981Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/2a/ac2df0eda4898f29c46eb6713a5148e6f8b2b389c8ec9e425a4a1d67bf07/debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84", size = 2501268, upload-time = "2025-04-10T19:46:26.044Z" }, - { url = "https://files.pythonhosted.org/packages/10/53/0a0cb5d79dd9f7039169f8bf94a144ad3efa52cc519940b3b7dde23bcb89/debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826", size = 4221077, upload-time = "2025-04-10T19:46:27.464Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d5/84e01821f362327bf4828728aa31e907a2eca7c78cd7c6ec062780d249f8/debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f", size = 5255127, upload-time = "2025-04-10T19:46:29.467Z" }, - { url = "https://files.pythonhosted.org/packages/33/16/1ed929d812c758295cac7f9cf3dab5c73439c83d9091f2d91871e648093e/debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f", size = 5297249, upload-time = "2025-04-10T19:46:31.538Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/395c792b243f2367d84202dc33689aa3d910fb9826a7491ba20fc9e261f5/debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f", size = 2485676, upload-time = "2025-04-10T19:46:32.96Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f1/6f2ee3f991327ad9e4c2f8b82611a467052a0fb0e247390192580e89f7ff/debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15", size = 4217514, upload-time = "2025-04-10T19:46:34.336Z" }, - { url = "https://files.pythonhosted.org/packages/79/28/b9d146f8f2dc535c236ee09ad3e5ac899adb39d7a19b49f03ac95d216beb/debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e", size = 5254756, upload-time = "2025-04-10T19:46:36.199Z" }, - { url = "https://files.pythonhosted.org/packages/e0/62/a7b4a57013eac4ccaef6977966e6bec5c63906dd25a86e35f155952e29a1/debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e", size = 5297119, upload-time = "2025-04-10T19:46:38.141Z" }, - { url = "https://files.pythonhosted.org/packages/97/1a/481f33c37ee3ac8040d3d51fc4c4e4e7e61cb08b8bc8971d6032acc2279f/debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20", size = 5256230, upload-time = "2025-04-10T19:46:54.077Z" }, -] - -[[package]] -name = "decorator" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, -] - [[package]] name = "distro" version = "1.9.0" @@ -624,43 +349,25 @@ wheels = [ [[package]] name = "docstring-parser" -version = "0.16" +version = "0.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565, upload-time = "2024-03-15T10:39:44.419Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533, upload-time = "2024-03-15T10:39:41.527Z" }, -] - -[[package]] -name = "executing" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693, upload-time = "2025-01-22T15:41:29.403Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702, upload-time = "2025-01-22T15:41:25.929Z" }, + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] [[package]] name = "fastapi" -version = "0.115.14" +version = "0.116.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/53/8c38a874844a8b0fa10dd8adf3836ac154082cf88d3f22b544e9ceea0a15/fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739", size = 296263, upload-time = "2025-06-26T15:29:08.21Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514, upload-time = "2025-06-26T15:29:06.49Z" }, -] - -[[package]] -name = "fastjsonschema" -version = "2.21.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939, upload-time = "2024-12-02T10:55:15.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924, upload-time = "2024-12-02T10:55:07.599Z" }, + { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, ] [[package]] @@ -672,15 +379,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, ] -[[package]] -name = "flatbuffers" -version = "25.2.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, -] - [[package]] name = "frozenlist" version = "1.7.0" @@ -743,16 +441,16 @@ wheels = [ [[package]] name = "fsspec" -version = "2025.5.1" +version = "2025.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033, upload-time = "2025-05-24T12:03:23.792Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/02/0835e6ab9cfc03916fe3f78c0956cfcdb6ff2669ffa6651065d5ebf7fc98/fsspec-2025.7.0.tar.gz", hash = "sha256:786120687ffa54b8283d942929540d8bc5ccfa820deb555a2b5d0ed2b737bf58", size = 304432, upload-time = "2025-07-15T16:05:21.19Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052, upload-time = "2025-05-24T12:03:21.66Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, ] [[package]] name = "google-adk" -version = "1.6.1" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -784,9 +482,9 @@ dependencies = [ { name = "watchdog" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7f/6d/4fbdee136d122f5342b465ce7d660fc95ef09613c6f212d4fed4764ab2b0/google_adk-1.6.1.tar.gz", hash = "sha256:d205dd6c3fb266d67d5a61e54f7f8fa560abd92659c70eb337c457714c4fea2f", size = 1526895, upload-time = "2025-07-10T00:04:40.305Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/38/022e05381b21517b6e14a346bed57e6a24ccdf2a8c2d5a2d911d3769c2fd/google_adk-1.8.0.tar.gz", hash = "sha256:5b214361cf356e807c421f75dcaee00be0c9619e7c7e41444883cc8d21f6aee8", size = 1575436, upload-time = "2025-07-23T22:22:29.477Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/78/ff8ecc41c73ea85e13077b6e2880ae2ac7f1197e5017698da04cf8476496/google_adk-1.6.1-py3-none-any.whl", hash = "sha256:a7e0cb15f7d5da6e37a101214e11f17b6e3564665451bb3dad6103b166d499e1", size = 1712021, upload-time = "2025-07-10T00:04:38.516Z" }, + { url = "https://files.pythonhosted.org/packages/9d/90/2317a22d8eeace229a52203c37d1e6a9309e062f382163d58369e798cd7a/google_adk-1.8.0-py3-none-any.whl", hash = "sha256:88f495072a481e68dd599b71be7e0b96611d7dc722dc4818f235b316d3ec240e", size = 1769832, upload-time = "2025-07-23T22:22:27.315Z" }, ] [[package]] @@ -813,7 +511,7 @@ grpc = [ [[package]] name = "google-api-python-client" -version = "2.176.0" +version = "2.177.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -822,9 +520,9 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/38/daf70faf6d05556d382bac640bc6765f09fcfb9dfb51ac4a595d3453a2a9/google_api_python_client-2.176.0.tar.gz", hash = "sha256:2b451cdd7fd10faeb5dd20f7d992f185e1e8f4124c35f2cdcc77c843139a4cf1", size = 13154773, upload-time = "2025-07-08T18:07:10.354Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/75/a89cad519fa8910132e3b08571d0e682ae1163643da6f963f1930f3dc788/google_api_python_client-2.177.0.tar.gz", hash = "sha256:9ffd2b57d68f5afa7e6ac64e2c440534eaa056cbb394812a62ff94723c31b50e", size = 13184405, upload-time = "2025-07-23T16:22:46.321Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/2c/758f415a19a12c3c6d06902794b0dd4c521d912a59b98ab752bba48812df/google_api_python_client-2.176.0-py3-none-any.whl", hash = "sha256:e22239797f1d085341e12cd924591fc65c56d08e0af02549d7606092e6296510", size = 13678445, upload-time = "2025-07-08T18:07:07.799Z" }, + { url = "https://files.pythonhosted.org/packages/47/f5/121248e18ca605a11720c81ae1b52a5a8cb690af9f01887c56de23cd9a5a/google_api_python_client-2.177.0-py3-none-any.whl", hash = "sha256:f2f50f11105ab883eb9b6cf38ec54ea5fd4b429249f76444bec90deba5be79b3", size = 13709470, upload-time = "2025-07-23T16:22:44.081Z" }, ] [[package]] @@ -861,7 +559,7 @@ wheels = [ [[package]] name = "google-cloud-aiplatform" -version = "1.103.0" +version = "1.105.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser" }, @@ -878,9 +576,9 @@ dependencies = [ { name = "shapely" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/f1/deec517e3b10992d3b07e729f2682303c4c999499a1819fde90a57b44e48/google_cloud_aiplatform-1.103.0.tar.gz", hash = "sha256:0d0c0db3bde6182097f4db8ddcbfb2ab382009458cf594c09cfb4406896b9795", size = 9449712, upload-time = "2025-07-10T22:52:55.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/8f/77b36b40370af26f3cf5a2bfd5eae57d63bcdaba869e796de2dc56549bc0/google_cloud_aiplatform-1.105.0.tar.gz", hash = "sha256:749c1230826198fa55d7c38774391f1fa57b9cd021a0e6ad1c788f8bca279555", size = 9474048, upload-time = "2025-07-23T16:25:56.816Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/8e/0779ce9d733c9073a00e2815c82e444f60332c2aac36148f62e2f3418b78/google_cloud_aiplatform-1.103.0-py2.py3-none-any.whl", hash = "sha256:bc14d90caed44580192ad8b60cf74c5a7089562a0dfa6425cad163971d3ae759", size = 7854846, upload-time = "2025-07-10T22:52:52.899Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7c/8ea7e03e82172bede182e2227c2c82f2c41f94edce0ce86c4abc5a05c55f/google_cloud_aiplatform-1.105.0-py2.py3-none-any.whl", hash = "sha256:e6fa21bdd2716051c0c1a48353e43b83080426810f7fbfe71aea629b4d0635cb", size = 7880320, upload-time = "2025-07-23T16:25:53.252Z" }, ] [package.optional-dependencies] @@ -925,7 +623,7 @@ wheels = [ [[package]] name = "google-cloud-bigquery" -version = "3.34.0" +version = "3.35.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"] }, @@ -936,9 +634,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/f9/e9da2d56d7028f05c0e2f5edf6ce43c773220c3172666c3dd925791d763d/google_cloud_bigquery-3.34.0.tar.gz", hash = "sha256:5ee1a78ba5c2ccb9f9a8b2bf3ed76b378ea68f49b6cac0544dc55cc97ff7c1ce", size = 489091, upload-time = "2025-05-29T17:18:06.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/e4/9cf03fa81fefd1b9811a7cd6e398804ae0de3b6a4edef810e2acd45cabbc/google_cloud_bigquery-3.35.1.tar.gz", hash = "sha256:599f26cacf190acfe88000f6cc5f4bc9e6baac7899e4f406ca054f1906f71960", size = 496433, upload-time = "2025-07-24T15:09:04.108Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/7e/7115c4f67ca0bc678f25bff1eab56cc37d06eb9a3978940b2ebd0705aa0a/google_cloud_bigquery-3.34.0-py3-none-any.whl", hash = "sha256:de20ded0680f8136d92ff5256270b5920dfe4fae479f5d0f73e90e5df30b1cf7", size = 253555, upload-time = "2025-05-29T17:18:02.904Z" }, + { url = "https://files.pythonhosted.org/packages/63/50/96fe9bc5b83d3a421e91ed8edc2535de45957e9af398273e3ecb5c3a1094/google_cloud_bigquery-3.35.1-py3-none-any.whl", hash = "sha256:6739a6ba63c6d80735ca2b34b1df2090ff473b80c1a62354caa2debe6dbbd961", size = 256877, upload-time = "2025-07-24T15:09:02.443Z" }, ] [[package]] @@ -1075,7 +773,7 @@ wheels = [ [[package]] name = "google-genai" -version = "1.24.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1087,9 +785,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/cf/37ac8cd4752e28e547b8a52765fe48a2ada2d0d286ea03f46e4d8c69ff4f/google_genai-1.24.0.tar.gz", hash = "sha256:bc896e30ad26d05a2af3d17c2ba10ea214a94f1c0cdb93d5c004dc038774e75a", size = 226740, upload-time = "2025-07-01T22:14:24.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/37/6c0ececc3a7a629029b5beed2ceb9f28f73292236eb96272355636769b0d/google_genai-1.27.0.tar.gz", hash = "sha256:15a13ffe7b3938da50b9ab77204664d82122617256f55b5ce403d593848ef635", size = 220099, upload-time = "2025-07-23T22:00:46.145Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/28/a35f64fc02e599808101617a21d447d241dadeba2aac1f4dc2d1179b8218/google_genai-1.24.0-py3-none-any.whl", hash = "sha256:98be8c51632576289ecc33cd84bcdaf4356ef0bef04ac7578660c49175af22b9", size = 226065, upload-time = "2025-07-01T22:14:23.177Z" }, + { url = "https://files.pythonhosted.org/packages/5a/12/279afe7357af73f9737a3412b6f0bc1482075b896340eb46a2f9cb0fd791/google_genai-1.27.0-py3-none-any.whl", hash = "sha256:afd6b4efaf8ec1d20a6e6657d768b68d998d60007c6e220e9024e23c913c1833", size = 218489, upload-time = "2025-07-23T22:00:44.879Z" }, ] [[package]] @@ -1179,90 +877,44 @@ wheels = [ [[package]] name = "grpcio" -version = "1.73.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/e8/b43b851537da2e2f03fa8be1aef207e5cbfb1a2e014fbb6b40d24c177cd3/grpcio-1.73.1.tar.gz", hash = "sha256:7fce2cd1c0c1116cf3850564ebfc3264fba75d3c74a7414373f1238ea365ef87", size = 12730355, upload-time = "2025-06-26T01:53:24.622Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/41/456caf570c55d5ac26f4c1f2db1f2ac1467d5bf3bcd660cba3e0a25b195f/grpcio-1.73.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:921b25618b084e75d424a9f8e6403bfeb7abef074bb6c3174701e0f2542debcf", size = 5334621, upload-time = "2025-06-26T01:52:23.602Z" }, - { url = "https://files.pythonhosted.org/packages/2a/c2/9a15e179e49f235bb5e63b01590658c03747a43c9775e20c4e13ca04f4c4/grpcio-1.73.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:277b426a0ed341e8447fbf6c1d6b68c952adddf585ea4685aa563de0f03df887", size = 10601131, upload-time = "2025-06-26T01:52:25.691Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1d/1d39e90ef6348a0964caa7c5c4d05f3bae2c51ab429eb7d2e21198ac9b6d/grpcio-1.73.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:96c112333309493c10e118d92f04594f9055774757f5d101b39f8150f8c25582", size = 5759268, upload-time = "2025-06-26T01:52:27.631Z" }, - { url = "https://files.pythonhosted.org/packages/8a/2b/2dfe9ae43de75616177bc576df4c36d6401e0959833b2e5b2d58d50c1f6b/grpcio-1.73.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f48e862aed925ae987eb7084409a80985de75243389dc9d9c271dd711e589918", size = 6409791, upload-time = "2025-06-26T01:52:29.711Z" }, - { url = "https://files.pythonhosted.org/packages/6e/66/e8fe779b23b5a26d1b6949e5c70bc0a5fd08f61a6ec5ac7760d589229511/grpcio-1.73.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a6c2cce218e28f5040429835fa34a29319071079e3169f9543c3fbeff166d2", size = 6003728, upload-time = "2025-06-26T01:52:31.352Z" }, - { url = "https://files.pythonhosted.org/packages/a9/39/57a18fcef567784108c4fc3f5441cb9938ae5a51378505aafe81e8e15ecc/grpcio-1.73.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:65b0458a10b100d815a8426b1442bd17001fdb77ea13665b2f7dc9e8587fdc6b", size = 6103364, upload-time = "2025-06-26T01:52:33.028Z" }, - { url = "https://files.pythonhosted.org/packages/c5/46/28919d2aa038712fc399d02fa83e998abd8c1f46c2680c5689deca06d1b2/grpcio-1.73.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0a9f3ea8dce9eae9d7cb36827200133a72b37a63896e0e61a9d5ec7d61a59ab1", size = 6749194, upload-time = "2025-06-26T01:52:34.734Z" }, - { url = "https://files.pythonhosted.org/packages/3d/56/3898526f1fad588c5d19a29ea0a3a4996fb4fa7d7c02dc1be0c9fd188b62/grpcio-1.73.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:de18769aea47f18e782bf6819a37c1c528914bfd5683b8782b9da356506190c8", size = 6283902, upload-time = "2025-06-26T01:52:36.503Z" }, - { url = "https://files.pythonhosted.org/packages/dc/64/18b77b89c5870d8ea91818feb0c3ffb5b31b48d1b0ee3e0f0d539730fea3/grpcio-1.73.1-cp312-cp312-win32.whl", hash = "sha256:24e06a5319e33041e322d32c62b1e728f18ab8c9dbc91729a3d9f9e3ed336642", size = 3668687, upload-time = "2025-06-26T01:52:38.678Z" }, - { url = "https://files.pythonhosted.org/packages/3c/52/302448ca6e52f2a77166b2e2ed75f5d08feca4f2145faf75cb768cccb25b/grpcio-1.73.1-cp312-cp312-win_amd64.whl", hash = "sha256:303c8135d8ab176f8038c14cc10d698ae1db9c480f2b2823f7a987aa2a4c5646", size = 4334887, upload-time = "2025-06-26T01:52:40.743Z" }, - { url = "https://files.pythonhosted.org/packages/37/bf/4ca20d1acbefabcaba633ab17f4244cbbe8eca877df01517207bd6655914/grpcio-1.73.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:b310824ab5092cf74750ebd8a8a8981c1810cb2b363210e70d06ef37ad80d4f9", size = 5335615, upload-time = "2025-06-26T01:52:42.896Z" }, - { url = "https://files.pythonhosted.org/packages/75/ed/45c345f284abec5d4f6d77cbca9c52c39b554397eb7de7d2fcf440bcd049/grpcio-1.73.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:8f5a6df3fba31a3485096ac85b2e34b9666ffb0590df0cd044f58694e6a1f6b5", size = 10595497, upload-time = "2025-06-26T01:52:44.695Z" }, - { url = "https://files.pythonhosted.org/packages/a4/75/bff2c2728018f546d812b755455014bc718f8cdcbf5c84f1f6e5494443a8/grpcio-1.73.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:052e28fe9c41357da42250a91926a3e2f74c046575c070b69659467ca5aa976b", size = 5765321, upload-time = "2025-06-26T01:52:46.871Z" }, - { url = "https://files.pythonhosted.org/packages/70/3b/14e43158d3b81a38251b1d231dfb45a9b492d872102a919fbf7ba4ac20cd/grpcio-1.73.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0bf15f629b1497436596b1cbddddfa3234273490229ca29561209778ebe182", size = 6415436, upload-time = "2025-06-26T01:52:49.134Z" }, - { url = "https://files.pythonhosted.org/packages/e5/3f/81d9650ca40b54338336fd360f36773be8cb6c07c036e751d8996eb96598/grpcio-1.73.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab860d5bfa788c5a021fba264802e2593688cd965d1374d31d2b1a34cacd854", size = 6007012, upload-time = "2025-06-26T01:52:51.076Z" }, - { url = "https://files.pythonhosted.org/packages/55/f4/59edf5af68d684d0f4f7ad9462a418ac517201c238551529098c9aa28cb0/grpcio-1.73.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d958c31cc91ab050bd8a91355480b8e0683e21176522bacea225ce51163f2", size = 6105209, upload-time = "2025-06-26T01:52:52.773Z" }, - { url = "https://files.pythonhosted.org/packages/e4/a8/700d034d5d0786a5ba14bfa9ce974ed4c976936c2748c2bd87aa50f69b36/grpcio-1.73.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f43ffb3bd415c57224c7427bfb9e6c46a0b6e998754bfa0d00f408e1873dcbb5", size = 6753655, upload-time = "2025-06-26T01:52:55.064Z" }, - { url = "https://files.pythonhosted.org/packages/1f/29/efbd4ac837c23bc48e34bbaf32bd429f0dc9ad7f80721cdb4622144c118c/grpcio-1.73.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:686231cdd03a8a8055f798b2b54b19428cdf18fa1549bee92249b43607c42668", size = 6287288, upload-time = "2025-06-26T01:52:57.33Z" }, - { url = "https://files.pythonhosted.org/packages/d8/61/c6045d2ce16624bbe18b5d169c1a5ce4d6c3a47bc9d0e5c4fa6a50ed1239/grpcio-1.73.1-cp313-cp313-win32.whl", hash = "sha256:89018866a096e2ce21e05eabed1567479713ebe57b1db7cbb0f1e3b896793ba4", size = 3668151, upload-time = "2025-06-26T01:52:59.405Z" }, - { url = "https://files.pythonhosted.org/packages/c2/d7/77ac689216daee10de318db5aa1b88d159432dc76a130948a56b3aa671a2/grpcio-1.73.1-cp313-cp313-win_amd64.whl", hash = "sha256:4a68f8c9966b94dff693670a5cf2b54888a48a5011c5d9ce2295a1a1465ee84f", size = 4335747, upload-time = "2025-06-26T01:53:01.233Z" }, -] - -[[package]] -name = "grpcio-reflection" -version = "1.71.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "grpcio" }, - { name = "protobuf" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/41/14/4e5f8e902fa9461abae292773b921a578f68333c7c3e731bcff7514f78cd/grpcio_reflection-1.71.2.tar.gz", hash = "sha256:bedfac3d2095d6c066b16b66bfce85b4be3e92dc9f3b7121e6f019d24a9c09c0", size = 18798, upload-time = "2025-06-28T04:24:06.019Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/89/c99ff79b90315cf47dbcdd86babb637764e5f14f523d622020bfee57dc4d/grpcio_reflection-1.71.2-py3-none-any.whl", hash = "sha256:c4f1a0959acb94ec9e1369bb7dab827cc9a6efcc448bdb10436246c8e52e2f57", size = 22684, upload-time = "2025-06-28T04:23:44.759Z" }, +version = "1.74.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, + { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, + { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, + { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d8/1004a5f468715221450e66b051c839c2ce9a985aa3ee427422061fcbb6aa/grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89", size = 5449488, upload-time = "2025-07-24T18:53:41.174Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/33731a03f63740d7743dced423846c831d8e6da808fcd02821a4416df7fa/grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01", size = 10974059, upload-time = "2025-07-24T18:53:43.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c6/3d2c14d87771a421205bdca991467cfe473ee4c6a1231c1ede5248c62ab8/grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e", size = 5945647, upload-time = "2025-07-24T18:53:45.269Z" }, + { url = "https://files.pythonhosted.org/packages/c5/83/5a354c8aaff58594eef7fffebae41a0f8995a6258bbc6809b800c33d4c13/grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91", size = 6626101, upload-time = "2025-07-24T18:53:47.015Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ca/4fdc7bf59bf6994aa45cbd4ef1055cd65e2884de6113dbd49f75498ddb08/grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249", size = 6182562, upload-time = "2025-07-24T18:53:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/fd/48/2869e5b2c1922583686f7ae674937986807c2f676d08be70d0a541316270/grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362", size = 6303425, upload-time = "2025-07-24T18:53:50.847Z" }, + { url = "https://files.pythonhosted.org/packages/a6/0e/bac93147b9a164f759497bc6913e74af1cb632c733c7af62c0336782bd38/grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f", size = 6996533, upload-time = "2025-07-24T18:53:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/84/35/9f6b2503c1fd86d068b46818bbd7329db26a87cdd8c01e0d1a9abea1104c/grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20", size = 6491489, upload-time = "2025-07-24T18:53:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/75/33/a04e99be2a82c4cbc4039eb3a76f6c3632932b9d5d295221389d10ac9ca7/grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa", size = 3805811, upload-time = "2025-07-24T18:53:56.798Z" }, + { url = "https://files.pythonhosted.org/packages/34/80/de3eb55eb581815342d097214bed4c59e806b05f1b3110df03b2280d6dfd/grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24", size = 4489214, upload-time = "2025-07-24T18:53:59.771Z" }, ] [[package]] name = "grpcio-status" -version = "1.71.2" +version = "1.74.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/d1/b6e9877fedae3add1afdeae1f89d1927d296da9cf977eca0eb08fb8a460e/grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50", size = 13677, upload-time = "2025-06-28T04:24:05.426Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/22/238c5f01e6837df54494deb08d5c772bc3f5bf5fb80a15dce254892d1a81/grpcio_status-1.74.0.tar.gz", hash = "sha256:c58c1b24aa454e30f1fc6a7e0dbbc194c54a408143971a94b5f4e40bb5831432", size = 13662, upload-time = "2025-07-24T19:01:56.874Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/58/317b0134129b556a93a3b0afe00ee675b5657f0155509e22fcb853bafe2d/grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3", size = 14424, upload-time = "2025-06-28T04:23:42.136Z" }, -] - -[[package]] -name = "grpcio-tools" -version = "1.71.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "grpcio" }, - { name = "protobuf" }, - { name = "setuptools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" }, - { url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" }, - { url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" }, - { url = "https://files.pythonhosted.org/packages/b2/8a/e4c1c4cb8c9ff7f50b7b2bba94abe8d1e98ea05f52a5db476e7f1c1a3c70/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a28eda8137d587eb30081384c256f5e5de7feda34776f89848b846da64e4be35", size = 2743321, upload-time = "2025-06-28T04:21:15.007Z" }, - { url = "https://files.pythonhosted.org/packages/fd/aa/95bc77fda5c2d56fb4a318c1b22bdba8914d5d84602525c99047114de531/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19c083198f5eb15cc69c0a2f2c415540cbc636bfe76cea268e5894f34023b40", size = 2474005, upload-time = "2025-06-28T04:21:16.443Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ff/ca11f930fe1daa799ee0ce1ac9630d58a3a3deed3dd2f465edb9a32f299d/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:784c284acda0d925052be19053d35afbf78300f4d025836d424cf632404f676a", size = 2851559, upload-time = "2025-06-28T04:21:18.139Z" }, - { url = "https://files.pythonhosted.org/packages/64/10/c6fc97914c7e19c9bb061722e55052fa3f575165da9f6510e2038d6e8643/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:381e684d29a5d052194e095546eef067201f5af30fd99b07b5d94766f44bf1ae", size = 3300622, upload-time = "2025-06-28T04:21:20.291Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d6/965f36cfc367c276799b730d5dd1311b90a54a33726e561393b808339b04/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3e4b4801fabd0427fc61d50d09588a01b1cfab0ec5e8a5f5d515fbdd0891fd11", size = 2913863, upload-time = "2025-06-28T04:21:22.196Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f0/c05d5c3d0c1d79ac87df964e9d36f1e3a77b60d948af65bec35d3e5c75a3/grpcio_tools-1.71.2-cp312-cp312-win32.whl", hash = "sha256:84ad86332c44572305138eafa4cc30040c9a5e81826993eae8227863b700b490", size = 945744, upload-time = "2025-06-28T04:21:23.463Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e9/c84c1078f0b7af7d8a40f5214a9bdd8d2a567ad6c09975e6e2613a08d29d/grpcio_tools-1.71.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e1108d37eecc73b1c4a27350a6ed921b5dda25091700c1da17cfe30761cd462", size = 1117695, upload-time = "2025-06-28T04:21:25.22Z" }, - { url = "https://files.pythonhosted.org/packages/60/9c/bdf9c5055a1ad0a09123402d73ecad3629f75b9cf97828d547173b328891/grpcio_tools-1.71.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:b0f0a8611614949c906e25c225e3360551b488d10a366c96d89856bcef09f729", size = 2384758, upload-time = "2025-06-28T04:21:26.712Z" }, - { url = "https://files.pythonhosted.org/packages/49/d0/6aaee4940a8fb8269c13719f56d69c8d39569bee272924086aef81616d4a/grpcio_tools-1.71.2-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:7931783ea7ac42ac57f94c5047d00a504f72fbd96118bf7df911bb0e0435fc0f", size = 5443127, upload-time = "2025-06-28T04:21:28.383Z" }, - { url = "https://files.pythonhosted.org/packages/d9/11/50a471dcf301b89c0ed5ab92c533baced5bd8f796abfd133bbfadf6b60e5/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:d188dc28e069aa96bb48cb11b1338e47ebdf2e2306afa58a8162cc210172d7a8", size = 2349627, upload-time = "2025-06-28T04:21:30.254Z" }, - { url = "https://files.pythonhosted.org/packages/bb/66/e3dc58362a9c4c2fbe98a7ceb7e252385777ebb2bbc7f42d5ab138d07ace/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f36c4b3cc42ad6ef67430639174aaf4a862d236c03c4552c4521501422bfaa26", size = 2742932, upload-time = "2025-06-28T04:21:32.325Z" }, - { url = "https://files.pythonhosted.org/packages/b7/1e/1e07a07ed8651a2aa9f56095411198385a04a628beba796f36d98a5a03ec/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bd9ed12ce93b310f0cef304176049d0bc3b9f825e9c8c6a23e35867fed6affd", size = 2473627, upload-time = "2025-06-28T04:21:33.752Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f9/3b7b32e4acb419f3a0b4d381bc114fe6cd48e3b778e81273fc9e4748caad/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7ce27e76dd61011182d39abca38bae55d8a277e9b7fe30f6d5466255baccb579", size = 2850879, upload-time = "2025-06-28T04:21:35.241Z" }, - { url = "https://files.pythonhosted.org/packages/1e/99/cd9e1acd84315ce05ad1fcdfabf73b7df43807cf00c3b781db372d92b899/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:dcc17bf59b85c3676818f2219deacac0156492f32ca165e048427d2d3e6e1157", size = 3300216, upload-time = "2025-06-28T04:21:36.826Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c0/66eab57b14550c5b22404dbf60635c9e33efa003bd747211981a9859b94b/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:706360c71bdd722682927a1fb517c276ccb816f1e30cb71f33553e5817dc4031", size = 2913521, upload-time = "2025-06-28T04:21:38.347Z" }, - { url = "https://files.pythonhosted.org/packages/05/9b/7c90af8f937d77005625d705ab1160bc42a7e7b021ee5c788192763bccd6/grpcio_tools-1.71.2-cp313-cp313-win32.whl", hash = "sha256:bcf751d5a81c918c26adb2d6abcef71035c77d6eb9dd16afaf176ee096e22c1d", size = 945322, upload-time = "2025-06-28T04:21:39.864Z" }, - { url = "https://files.pythonhosted.org/packages/5f/80/6db6247f767c94fe551761772f89ceea355ff295fd4574cb8efc8b2d1199/grpcio_tools-1.71.2-cp313-cp313-win_amd64.whl", hash = "sha256:b1581a1133552aba96a730178bc44f6f1a071f0eb81c5b6bc4c0f89f5314e2b8", size = 1117234, upload-time = "2025-06-28T04:21:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/28/aa/1b1fe7d8ab699e1ec26d3a36b91d3df9f83a30abc07d4c881d0296b17b67/grpcio_status-1.74.0-py3-none-any.whl", hash = "sha256:52cdbd759a6760fc8f668098a03f208f493dd5c76bf8e02598bbbaf1f6fc2876", size = 14425, upload-time = "2025-07-24T19:01:19.963Z" }, ] [[package]] @@ -1289,15 +941,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245", size = 2738931, upload-time = "2025-06-20T21:48:39.482Z" }, ] -[[package]] -name = "html2text" -version = "2025.4.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/27/e158d86ba1e82967cc2f790b0cb02030d4a8bef58e0c79a8590e9678107f/html2text-2025.4.15.tar.gz", hash = "sha256:948a645f8f0bc3abe7fd587019a2197a12436cd73d0d4908af95bfc8da337588", size = 64316, upload-time = "2025-04-15T04:02:30.045Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/84/1a0f9555fd5f2b1c924ff932d99b40a0f8a6b12f6dd625e2a47f415b00ea/html2text-2025.4.15-py3-none-any.whl", hash = "sha256:00569167ffdab3d7767a4cdf589b7f57e777a5ed28d12907d8c58769ec734acc", size = 34656, upload-time = "2025-04-15T04:02:28.44Z" }, -] - [[package]] name = "httpcore" version = "1.0.9" @@ -1349,7 +992,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "0.33.4" +version = "0.34.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1361,21 +1004,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/9e/9366b7349fc125dd68b9d384a0fea84d67b7497753fe92c71b67e13f47c4/huggingface_hub-0.33.4.tar.gz", hash = "sha256:6af13478deae120e765bfd92adad0ae1aec1ad8c439b46f23058ad5956cbca0a", size = 426674, upload-time = "2025-07-11T12:32:48.694Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/53/572b9c03ca0cabb3d71e02b1750b595196332cfb8c4d74a90de383451171/huggingface_hub-0.34.2.tar.gz", hash = "sha256:a27c1ba3d2a70b378dce546c8be3a90349a64e6bd5d7a806679d4bf5e5d2d8fe", size = 456837, upload-time = "2025-07-28T10:12:09.32Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/7b/98daa50a2db034cab6cd23a3de04fa2358cb691593d28e9130203eb7a805/huggingface_hub-0.33.4-py3-none-any.whl", hash = "sha256:09f9f4e7ca62547c70f8b82767eefadd2667f4e116acba2e3e62a5a81815a7bb", size = 515339, upload-time = "2025-07-11T12:32:46.346Z" }, -] - -[[package]] -name = "humanfriendly" -version = "10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyreadline3", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, + { url = "https://files.pythonhosted.org/packages/24/20/5ee412acef0af05bd3ccc78186ccb7ca672f9998a7cbc94c011df8f101f4/huggingface_hub-0.34.2-py3-none-any.whl", hash = "sha256:699843fc58d3d257dbd3cb014e0cd34066a56372246674322ba0909981ec239c", size = 558843, upload-time = "2025-07-28T10:12:07.064Z" }, ] [[package]] @@ -1408,84 +1039,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] -[[package]] -name = "ipykernel" -version = "6.29.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "appnope", marker = "sys_platform == 'darwin'" }, - { name = "comm" }, - { name = "debugpy" }, - { name = "ipython" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "matplotlib-inline" }, - { name = "nest-asyncio" }, - { name = "packaging" }, - { name = "psutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367, upload-time = "2024-07-01T14:07:22.543Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173, upload-time = "2024-07-01T14:07:19.603Z" }, -] - -[[package]] -name = "ipython" -version = "9.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "decorator" }, - { name = "ipython-pygments-lexers" }, - { name = "jedi" }, - { name = "matplotlib-inline" }, - { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit" }, - { name = "pygments" }, - { name = "stack-data" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/54/80/406f9e3bde1c1fd9bf5a0be9d090f8ae623e401b7670d8f6fdf2ab679891/ipython-9.4.0.tar.gz", hash = "sha256:c033c6d4e7914c3d9768aabe76bbe87ba1dc66a92a05db6bfa1125d81f2ee270", size = 4385338, upload-time = "2025-07-01T11:11:30.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/f8/0031ee2b906a15a33d6bfc12dd09c3dfa966b3cb5b284ecfb7549e6ac3c4/ipython-9.4.0-py3-none-any.whl", hash = "sha256:25850f025a446d9b359e8d296ba175a36aedd32e83ca9b5060430fe16801f066", size = 611021, upload-time = "2025-07-01T11:11:27.85Z" }, -] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, -] - -[[package]] -name = "isodate" -version = "0.7.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, -] - -[[package]] -name = "jedi" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "parso" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, -] - [[package]] name = "jinja2" version = "3.1.6" @@ -1557,7 +1110,7 @@ wheels = [ [[package]] name = "jsonschema" -version = "4.24.0" +version = "4.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -1565,9 +1118,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/00/a297a868e9d0784450faa7365c2172a7d6110c763e30ba861867c32ae6a9/jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f", size = 356830, upload-time = "2025-07-18T15:39:45.11Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" }, ] [[package]] @@ -1583,107 +1136,64 @@ wheels = [ ] [[package]] -name = "jupyter-client" -version = "8.6.3" -source = { registry = "https://pypi.org/simple" } +name = "kagent" +version = "0.3.0" +source = { editable = "packages/kagent" } dependencies = [ - { name = "jupyter-core" }, - { name = "python-dateutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, + { name = "aiofiles" }, + { name = "fastapi" }, + { name = "kagent-adk" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-instrumentation-httpx" }, + { name = "opentelemetry-instrumentation-openai" }, + { name = "opentelemetry-sdk" }, + { name = "protobuf" }, + { name = "typer" }, ] -[[package]] -name = "jupyter-core" -version = "5.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "platformdirs" }, - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923, upload-time = "2025-05-27T07:38:16.655Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880, upload-time = "2025-05-27T07:38:15.137Z" }, +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = ">=24.1.0" }, + { name = "fastapi", specifier = ">=0.115.1" }, + { name = "kagent-adk", editable = "packages/kagent-adk" }, + { name = "opentelemetry-api", specifier = ">=1.32.0" }, + { name = "opentelemetry-exporter-otlp-proto-grpc", specifier = ">=1.32.0" }, + { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.52.0" }, + { name = "opentelemetry-instrumentation-openai", specifier = ">=0.39.0" }, + { name = "opentelemetry-sdk", specifier = ">=1.32.0" }, + { name = "protobuf", specifier = ">=6.31.1" }, + { name = "typer", specifier = ">=0.15.0" }, ] [[package]] -name = "kagent" +name = "kagent-adk" version = "0.3.0" -source = { editable = "." } +source = { editable = "packages/kagent-adk" } dependencies = [ { name = "a2a-sdk" }, - { name = "aiofiles" }, - { name = "alembic" }, { name = "anthropic", extra = ["vertex"] }, { name = "anyio" }, - { name = "autogen-agentchat" }, - { name = "autogen-core" }, - { name = "autogen-ext", extra = ["anthropic", "azure", "mcp", "ollama", "openai"] }, - { name = "azure-ai-inference" }, - { name = "azure-ai-projects" }, - { name = "azure-core" }, - { name = "azure-identity" }, - { name = "azure-search-documents" }, - { name = "azure-storage-blob" }, - { name = "bs4" }, - { name = "click" }, - { name = "distro" }, { name = "fastapi" }, { name = "google-adk" }, { name = "google-auth" }, { name = "google-genai" }, - { name = "h11" }, - { name = "html2text" }, { name = "httpx" }, - { name = "idna" }, - { name = "isodate" }, - { name = "jiter" }, { name = "jsonref" }, { name = "litellm" }, - { name = "loguru" }, - { name = "magika" }, - { name = "markitdown" }, { name = "mcp" }, - { name = "numpy" }, - { name = "ollama" }, - { name = "onnxruntime" }, { name = "openai" }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-otlp-proto-grpc" }, { name = "opentelemetry-instrumentation-httpx" }, { name = "opentelemetry-instrumentation-openai" }, { name = "opentelemetry-sdk" }, - { name = "pandas" }, - { name = "pillow" }, - { name = "pinecone" }, { name = "protobuf" }, { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "pyyaml" }, - { name = "sniffio" }, - { name = "sqlalchemy" }, - { name = "sqlite-vec" }, - { name = "sqlmodel" }, - { name = "tiktoken" }, - { name = "typer" }, { name = "typing-extensions" }, - { name = "websockets" }, ] [package.optional-dependencies] -jupyter-executor = [ - { name = "ipykernel" }, - { name = "nbclient" }, -] -lint = [ - { name = "ruff" }, -] test = [ { name = "pytest" }, { name = "pytest-asyncio" }, @@ -1691,176 +1201,63 @@ test = [ [package.metadata] requires-dist = [ - { name = "a2a-sdk", specifier = ">=0.2.12" }, - { name = "aiofiles", specifier = ">=24.1.0" }, - { name = "alembic", specifier = ">=1.11.0" }, + { name = "a2a-sdk", specifier = ">=0.2.16" }, { name = "anthropic", extras = ["vertex"], specifier = ">=0.49.0" }, - { name = "anyio", specifier = ">=4.0.0" }, - { name = "autogen-agentchat", git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-agentchat&rev=c5b893d3f814185c326c8ff95767d2375d95818d" }, - { name = "autogen-core", git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-core&rev=c5b893d3f814185c326c8ff95767d2375d95818d" }, - { name = "autogen-ext", extras = ["anthropic", "azure", "mcp", "ollama", "openai"], git = "https://github.com/Microsoft/autogen?subdirectory=python%2Fpackages%2Fautogen-ext&rev=c5b893d3f814185c326c8ff95767d2375d95818d" }, - { name = "azure-ai-inference", specifier = ">=1.0.0b9" }, - { name = "azure-ai-projects", specifier = ">=1.0.0b11" }, - { name = "azure-core", specifier = ">=1.30.0" }, - { name = "azure-identity", specifier = ">=1.15.0" }, - { name = "azure-search-documents", specifier = ">=11.4.0" }, - { name = "azure-storage-blob", specifier = ">=12.19.0" }, - { name = "bs4", specifier = ">=0.0.2" }, - { name = "click", specifier = ">=8.0.6" }, - { name = "distro", specifier = ">=1.8.0" }, - { name = "fastapi", specifier = ">=0.103.1" }, - { name = "google-adk", specifier = ">=1.6.1" }, + { name = "anyio", specifier = ">=4.9.0" }, + { name = "fastapi", specifier = ">=0.115.1" }, + { name = "google-adk", specifier = ">=1.8.0" }, { name = "google-auth", specifier = ">=2.40.2" }, - { name = "google-genai", specifier = ">=1.18.0" }, - { name = "h11", specifier = ">=0.16.0" }, - { name = "html2text", specifier = ">=2025.4.15" }, + { name = "google-genai", specifier = ">=1.21.1" }, { name = "httpx", specifier = ">=0.25.0" }, - { name = "idna", specifier = ">=3.4" }, - { name = "ipykernel", marker = "extra == 'jupyter-executor'", specifier = ">=6.29.5" }, - { name = "isodate", specifier = ">=0.6.1" }, - { name = "jiter", specifier = ">=0.4.0" }, { name = "jsonref", specifier = ">=1.1.0" }, { name = "litellm", specifier = ">=1.74.3" }, - { name = "loguru", specifier = ">=0.7.3" }, - { name = "magika", specifier = ">=0.6.2" }, - { name = "markitdown", specifier = ">=0.1.1" }, - { name = "mcp", specifier = ">=1.8.1" }, - { name = "nbclient", marker = "extra == 'jupyter-executor'", specifier = ">=0.10.2" }, - { name = "numpy", specifier = ">=2.2.4" }, - { name = "ollama", specifier = ">=0.3.0" }, - { name = "onnxruntime", specifier = ">=1.21.1" }, + { name = "mcp", specifier = ">=1.12.0" }, { name = "openai", specifier = ">=1.72.0" }, { name = "opentelemetry-api", specifier = ">=1.32.0" }, { name = "opentelemetry-exporter-otlp-proto-grpc", specifier = ">=1.32.0" }, { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.52.0" }, { name = "opentelemetry-instrumentation-openai", specifier = ">=0.39.0" }, { name = "opentelemetry-sdk", specifier = ">=1.32.0" }, - { name = "pandas", specifier = ">=2.2.3" }, - { name = "pillow", specifier = ">=10.0.0" }, - { name = "pinecone", specifier = ">=6.0.2" }, - { name = "protobuf", specifier = ">=5.29.5" }, + { name = "protobuf", specifier = ">=6.31.1" }, { name = "pydantic", specifier = ">=2.5.0" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=8.3.5" }, - { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.25.3" }, - { name = "python-dotenv", specifier = ">=1.1.0" }, - { name = "pyyaml", specifier = ">=6.0.2" }, - { name = "ruff", marker = "extra == 'lint'", specifier = ">=0.11.5" }, - { name = "sniffio", specifier = ">=1.3.0" }, - { name = "sqlalchemy", specifier = ">=2.0.40" }, - { name = "sqlite-vec", specifier = ">=0.1.7a2" }, - { name = "sqlmodel", specifier = ">=0.0.24" }, - { name = "tiktoken", specifier = "==0.8.0" }, - { name = "typer", specifier = ">=0.15.0" }, - { name = "typing-extensions", specifier = ">=4.8.0" }, - { name = "websockets", specifier = ">=11.0" }, -] -provides-extras = ["jupyter-executor", "test", "lint"] - -[[package]] -name = "litellm" -version = "1.74.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp" }, - { name = "click" }, - { name = "httpx" }, - { name = "importlib-metadata" }, - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "openai" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "tiktoken" }, - { name = "tokenizers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cd/e3/3091066f6682016840e9a36111560656b609b95de04b2ec7b19ad2580eaa/litellm-1.74.3.tar.gz", hash = "sha256:a9e87ebe78947ceec67e75f830f1c956cc653b84563574241acea9c84e7e3ca1", size = 9256457, upload-time = "2025-07-12T20:06:06.128Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/6f/07735b5178f32e28daf8a30ed6ad3e2c8c06ac374dc06aecde007110470f/litellm-1.74.3-py3-none-any.whl", hash = "sha256:638ec73633c6f2cf78a7343723d8f3bc13c192558fcbaa29f3ba6bc7802e8663", size = 8618899, upload-time = "2025-07-12T20:06:03.609Z" }, -] - -[[package]] -name = "loguru" -version = "0.7.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "win32-setctime", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, -] - -[[package]] -name = "magika" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "numpy" }, - { name = "onnxruntime" }, - { name = "python-dotenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/8fdd991142ad3e037179a494b153f463024e5a211ef3ad948b955c26b4de/magika-0.6.2.tar.gz", hash = "sha256:37eb6ae8020f6e68f231bc06052c0a0cbe8e6fa27492db345e8dc867dbceb067", size = 3036634, upload-time = "2025-05-02T14:54:18.88Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/07/4f7748f34279f2852068256992377474f9700b6fbad6735d6be58605178f/magika-0.6.2-py3-none-any.whl", hash = "sha256:5ef72fbc07723029b3684ef81454bc224ac5f60986aa0fc5a28f4456eebcb5b2", size = 2967609, upload-time = "2025-05-02T14:54:09.696Z" }, - { url = "https://files.pythonhosted.org/packages/64/6d/0783af677e601d8a42258f0fbc47663abf435f927e58a8d2928296743099/magika-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9109309328a1553886c8ff36c2ee9a5e9cfd36893ad81b65bf61a57debdd9d0e", size = 12404787, upload-time = "2025-05-02T14:54:16.963Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ad/42e39748ddc4bbe55c2dc1093ce29079c04d096ac0d844f8ae66178bc3ed/magika-0.6.2-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:57cd1d64897634d15de552bd6b3ae9c6ff6ead9c60d384dc46497c08288e4559", size = 15091089, upload-time = "2025-05-02T14:54:11.59Z" }, - { url = "https://files.pythonhosted.org/packages/b0/1f/28e412d0ccedc068fbccdae6a6233faaa97ec3e5e2ffd242e49655b10064/magika-0.6.2-py3-none-win_amd64.whl", hash = "sha256:711f427a633e0182737dcc2074748004842f870643585813503ff2553b973b9f", size = 12385740, upload-time = "2025-05-02T14:54:14.096Z" }, -] - -[[package]] -name = "mako" -version = "1.3.10" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.25.3" }, + { name = "typing-extensions", specifier = ">=4.8.0" }, ] +provides-extras = ["test"] [[package]] -name = "markdownify" -version = "1.1.0" +name = "litellm" +version = "1.74.9" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "beautifulsoup4" }, - { name = "six" }, + { name = "aiohttp" }, + { name = "click" }, + { name = "httpx" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "tiktoken" }, + { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/78/c48fed23c7aebc2c16049062e72de1da3220c274de59d28c942acdc9ffb2/markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd", size = 17127, upload-time = "2025-03-05T11:54:40.574Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/5d/646bebdb4769d77e6a018b9152c9ccf17afe15d0f88974f338d3f2ee7c15/litellm-1.74.9.tar.gz", hash = "sha256:4a32eff70342e1aee4d1cbf2de2a6ed64a7c39d86345c58d4401036af018b7de", size = 9660510, upload-time = "2025-07-28T16:42:39.297Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/11/b751af7ad41b254a802cf52f7bc1fca7cabe2388132f2ce60a1a6b9b9622/markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef", size = 13901, upload-time = "2025-03-05T11:54:39.454Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/f1546746049c99c6b8b247e2f34485b9eae36faa9322b84e2a17262e6712/litellm-1.74.9-py3-none-any.whl", hash = "sha256:ab8f8a6e4d8689d3c7c4f9c3bbc7e46212cc3ebc74ddd0f3c0c921bb459c9874", size = 8740449, upload-time = "2025-07-28T16:42:36.8Z" }, ] [[package]] -name = "markitdown" -version = "0.1.2" +name = "markdown-it-py" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "beautifulsoup4" }, - { name = "charset-normalizer" }, - { name = "defusedxml" }, - { name = "magika" }, - { name = "markdownify" }, - { name = "requests" }, + { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/bd/b7ae7863ee556411fbb6ca19a4a7593ef2b3531d6cd10b979ba386a2dd4d/markitdown-0.1.2.tar.gz", hash = "sha256:85fe108a92bd18f317e75a36cf567a6fa812072612a898abf8c156d5d74c13c4", size = 39361, upload-time = "2025-05-28T17:06:10.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/33/d52d06b44c28e0db5c458690a4356e6abbb866f4abc00c0cf4eebb90ca78/markitdown-0.1.2-py3-none-any.whl", hash = "sha256:4881f0768794ffccb52d09dd86498813a6896ba9639b4fc15512817f56ed9d74", size = 57751, upload-time = "2025-05-28T17:06:08.722Z" }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, ] [[package]] @@ -1901,21 +1298,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, -] - [[package]] name = "mcp" -version = "1.10.1" +version = "1.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1925,13 +1310,14 @@ dependencies = [ { name = "pydantic" }, { name = "pydantic-settings" }, { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/85/f36d538b1286b7758f35c1b69d93f2719d2df90c01bd074eadd35f6afc35/mcp-1.12.2.tar.gz", hash = "sha256:a4b7c742c50ce6ed6d6a6c096cca0e3893f5aecc89a59ed06d47c4e6ba41edcc", size = 426202, upload-time = "2025-07-24T18:29:05.175Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, + { url = "https://files.pythonhosted.org/packages/2f/cf/3fd38cfe43962452e4bfadc6966b2ea0afaf8e0286cb3991c247c8c33ebd/mcp-1.12.2-py3-none-any.whl", hash = "sha256:b86d584bb60193a42bd78aef01882c5c42d614e416cbf0480149839377ab5a5f", size = 158473, upload-time = "2025-07-24T18:29:03.419Z" }, ] [[package]] @@ -1943,41 +1329,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] -[[package]] -name = "mpmath" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, -] - -[[package]] -name = "msal" -version = "1.32.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cryptography" }, - { name = "pyjwt", extra = ["crypto"] }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3f/90/81dcc50f0be11a8c4dcbae1a9f761a26e5f905231330a7cacc9f04ec4c61/msal-1.32.3.tar.gz", hash = "sha256:5eea038689c78a5a70ca8ecbe1245458b55a857bd096efb6989c69ba15985d35", size = 151449, upload-time = "2025-04-25T13:12:34.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/bf/81516b9aac7fd867709984d08eb4db1d2e3fe1df795c8e442cde9b568962/msal-1.32.3-py3-none-any.whl", hash = "sha256:b2798db57760b1961b142f027ffb7c8169536bf77316e99a0df5c4aaebb11569", size = 115358, upload-time = "2025-04-25T13:12:33.034Z" }, -] - -[[package]] -name = "msal-extensions" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "msal" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, -] - [[package]] name = "multidict" version = "6.6.3" @@ -2041,127 +1392,72 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, ] -[[package]] -name = "nbclient" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "nbformat" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, -] - -[[package]] -name = "nbformat" -version = "5.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fastjsonschema" }, - { name = "jsonschema" }, - { name = "jupyter-core" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, -] - [[package]] name = "numpy" -version = "2.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/19/d7c972dfe90a353dbd3efbbe1d14a5951de80c99c9dc1b93cd998d51dc0f/numpy-2.3.1.tar.gz", hash = "sha256:1ec9ae20a4226da374362cca3c62cd753faf2f951440b0e3b98e93c235441d2b", size = 20390372, upload-time = "2025-06-21T12:28:33.469Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/56/71ad5022e2f63cfe0ca93559403d0edef14aea70a841d640bd13cdba578e/numpy-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2959d8f268f3d8ee402b04a9ec4bb7604555aeacf78b360dc4ec27f1d508177d", size = 20896664, upload-time = "2025-06-21T12:15:30.845Z" }, - { url = "https://files.pythonhosted.org/packages/25/65/2db52ba049813670f7f987cc5db6dac9be7cd95e923cc6832b3d32d87cef/numpy-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:762e0c0c6b56bdedfef9a8e1d4538556438288c4276901ea008ae44091954e29", size = 14131078, upload-time = "2025-06-21T12:15:52.23Z" }, - { url = "https://files.pythonhosted.org/packages/57/dd/28fa3c17b0e751047ac928c1e1b6990238faad76e9b147e585b573d9d1bd/numpy-2.3.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:867ef172a0976aaa1f1d1b63cf2090de8b636a7674607d514505fb7276ab08fc", size = 5112554, upload-time = "2025-06-21T12:16:01.434Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fc/84ea0cba8e760c4644b708b6819d91784c290288c27aca916115e3311d17/numpy-2.3.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:4e602e1b8682c2b833af89ba641ad4176053aaa50f5cacda1a27004352dde943", size = 6646560, upload-time = "2025-06-21T12:16:11.895Z" }, - { url = "https://files.pythonhosted.org/packages/61/b2/512b0c2ddec985ad1e496b0bd853eeb572315c0f07cd6997473ced8f15e2/numpy-2.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8e333040d069eba1652fb08962ec5b76af7f2c7bce1df7e1418c8055cf776f25", size = 14260638, upload-time = "2025-06-21T12:16:32.611Z" }, - { url = "https://files.pythonhosted.org/packages/6e/45/c51cb248e679a6c6ab14b7a8e3ead3f4a3fe7425fc7a6f98b3f147bec532/numpy-2.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e7cbf5a5eafd8d230a3ce356d892512185230e4781a361229bd902ff403bc660", size = 16632729, upload-time = "2025-06-21T12:16:57.439Z" }, - { url = "https://files.pythonhosted.org/packages/e4/ff/feb4be2e5c09a3da161b412019caf47183099cbea1132fd98061808c2df2/numpy-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1b8f26d1086835f442286c1d9b64bb3974b0b1e41bb105358fd07d20872952", size = 15565330, upload-time = "2025-06-21T12:17:20.638Z" }, - { url = "https://files.pythonhosted.org/packages/bc/6d/ceafe87587101e9ab0d370e4f6e5f3f3a85b9a697f2318738e5e7e176ce3/numpy-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee8340cb48c9b7a5899d1149eece41ca535513a9698098edbade2a8e7a84da77", size = 18361734, upload-time = "2025-06-21T12:17:47.938Z" }, - { url = "https://files.pythonhosted.org/packages/2b/19/0fb49a3ea088be691f040c9bf1817e4669a339d6e98579f91859b902c636/numpy-2.3.1-cp312-cp312-win32.whl", hash = "sha256:e772dda20a6002ef7061713dc1e2585bc1b534e7909b2030b5a46dae8ff077ab", size = 6320411, upload-time = "2025-06-21T12:17:58.475Z" }, - { url = "https://files.pythonhosted.org/packages/b1/3e/e28f4c1dd9e042eb57a3eb652f200225e311b608632bc727ae378623d4f8/numpy-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cfecc7822543abdea6de08758091da655ea2210b8ffa1faf116b940693d3df76", size = 12734973, upload-time = "2025-06-21T12:18:17.601Z" }, - { url = "https://files.pythonhosted.org/packages/04/a8/8a5e9079dc722acf53522b8f8842e79541ea81835e9b5483388701421073/numpy-2.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:7be91b2239af2658653c5bb6f1b8bccafaf08226a258caf78ce44710a0160d30", size = 10191491, upload-time = "2025-06-21T12:18:33.585Z" }, - { url = "https://files.pythonhosted.org/packages/d4/bd/35ad97006d8abff8631293f8ea6adf07b0108ce6fec68da3c3fcca1197f2/numpy-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25a1992b0a3fdcdaec9f552ef10d8103186f5397ab45e2d25f8ac51b1a6b97e8", size = 20889381, upload-time = "2025-06-21T12:19:04.103Z" }, - { url = "https://files.pythonhosted.org/packages/f1/4f/df5923874d8095b6062495b39729178eef4a922119cee32a12ee1bd4664c/numpy-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dea630156d39b02a63c18f508f85010230409db5b2927ba59c8ba4ab3e8272e", size = 14152726, upload-time = "2025-06-21T12:19:25.599Z" }, - { url = "https://files.pythonhosted.org/packages/8c/0f/a1f269b125806212a876f7efb049b06c6f8772cf0121139f97774cd95626/numpy-2.3.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bada6058dd886061f10ea15f230ccf7dfff40572e99fef440a4a857c8728c9c0", size = 5105145, upload-time = "2025-06-21T12:19:34.782Z" }, - { url = "https://files.pythonhosted.org/packages/6d/63/a7f7fd5f375b0361682f6ffbf686787e82b7bbd561268e4f30afad2bb3c0/numpy-2.3.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:a894f3816eb17b29e4783e5873f92faf55b710c2519e5c351767c51f79d8526d", size = 6639409, upload-time = "2025-06-21T12:19:45.228Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0d/1854a4121af895aab383f4aa233748f1df4671ef331d898e32426756a8a6/numpy-2.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:18703df6c4a4fee55fd3d6e5a253d01c5d33a295409b03fda0c86b3ca2ff41a1", size = 14257630, upload-time = "2025-06-21T12:20:06.544Z" }, - { url = "https://files.pythonhosted.org/packages/50/30/af1b277b443f2fb08acf1c55ce9d68ee540043f158630d62cef012750f9f/numpy-2.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5902660491bd7a48b2ec16c23ccb9124b8abfd9583c5fdfa123fe6b421e03de1", size = 16627546, upload-time = "2025-06-21T12:20:31.002Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ec/3b68220c277e463095342d254c61be8144c31208db18d3fd8ef02712bcd6/numpy-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:36890eb9e9d2081137bd78d29050ba63b8dab95dff7912eadf1185e80074b2a0", size = 15562538, upload-time = "2025-06-21T12:20:54.322Z" }, - { url = "https://files.pythonhosted.org/packages/77/2b/4014f2bcc4404484021c74d4c5ee8eb3de7e3f7ac75f06672f8dcf85140a/numpy-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a780033466159c2270531e2b8ac063704592a0bc62ec4a1b991c7c40705eb0e8", size = 18360327, upload-time = "2025-06-21T12:21:21.053Z" }, - { url = "https://files.pythonhosted.org/packages/40/8d/2ddd6c9b30fcf920837b8672f6c65590c7d92e43084c25fc65edc22e93ca/numpy-2.3.1-cp313-cp313-win32.whl", hash = "sha256:39bff12c076812595c3a306f22bfe49919c5513aa1e0e70fac756a0be7c2a2b8", size = 6312330, upload-time = "2025-06-21T12:25:07.447Z" }, - { url = "https://files.pythonhosted.org/packages/dd/c8/beaba449925988d415efccb45bf977ff8327a02f655090627318f6398c7b/numpy-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d5ee6eec45f08ce507a6570e06f2f879b374a552087a4179ea7838edbcbfa42", size = 12731565, upload-time = "2025-06-21T12:25:26.444Z" }, - { url = "https://files.pythonhosted.org/packages/0b/c3/5c0c575d7ec78c1126998071f58facfc124006635da75b090805e642c62e/numpy-2.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:0c4d9e0a8368db90f93bd192bfa771ace63137c3488d198ee21dfb8e7771916e", size = 10190262, upload-time = "2025-06-21T12:25:42.196Z" }, - { url = "https://files.pythonhosted.org/packages/ea/19/a029cd335cf72f79d2644dcfc22d90f09caa86265cbbde3b5702ccef6890/numpy-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b0b5397374f32ec0649dd98c652a1798192042e715df918c20672c62fb52d4b8", size = 20987593, upload-time = "2025-06-21T12:21:51.664Z" }, - { url = "https://files.pythonhosted.org/packages/25/91/8ea8894406209107d9ce19b66314194675d31761fe2cb3c84fe2eeae2f37/numpy-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c5bdf2015ccfcee8253fb8be695516ac4457c743473a43290fd36eba6a1777eb", size = 14300523, upload-time = "2025-06-21T12:22:13.583Z" }, - { url = "https://files.pythonhosted.org/packages/a6/7f/06187b0066eefc9e7ce77d5f2ddb4e314a55220ad62dd0bfc9f2c44bac14/numpy-2.3.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d70f20df7f08b90a2062c1f07737dd340adccf2068d0f1b9b3d56e2038979fee", size = 5227993, upload-time = "2025-06-21T12:22:22.53Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ec/a926c293c605fa75e9cfb09f1e4840098ed46d2edaa6e2152ee35dc01ed3/numpy-2.3.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:2fb86b7e58f9ac50e1e9dd1290154107e47d1eef23a0ae9145ded06ea606f992", size = 6736652, upload-time = "2025-06-21T12:22:33.629Z" }, - { url = "https://files.pythonhosted.org/packages/e3/62/d68e52fb6fde5586650d4c0ce0b05ff3a48ad4df4ffd1b8866479d1d671d/numpy-2.3.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:23ab05b2d241f76cb883ce8b9a93a680752fbfcbd51c50eff0b88b979e471d8c", size = 14331561, upload-time = "2025-06-21T12:22:55.056Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ec/b74d3f2430960044bdad6900d9f5edc2dc0fb8bf5a0be0f65287bf2cbe27/numpy-2.3.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ce2ce9e5de4703a673e705183f64fd5da5bf36e7beddcb63a25ee2286e71ca48", size = 16693349, upload-time = "2025-06-21T12:23:20.53Z" }, - { url = "https://files.pythonhosted.org/packages/0d/15/def96774b9d7eb198ddadfcbd20281b20ebb510580419197e225f5c55c3e/numpy-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c4913079974eeb5c16ccfd2b1f09354b8fed7e0d6f2cab933104a09a6419b1ee", size = 15642053, upload-time = "2025-06-21T12:23:43.697Z" }, - { url = "https://files.pythonhosted.org/packages/2b/57/c3203974762a759540c6ae71d0ea2341c1fa41d84e4971a8e76d7141678a/numpy-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:010ce9b4f00d5c036053ca684c77441f2f2c934fd23bee058b4d6f196efd8280", size = 18434184, upload-time = "2025-06-21T12:24:10.708Z" }, - { url = "https://files.pythonhosted.org/packages/22/8a/ccdf201457ed8ac6245187850aff4ca56a79edbea4829f4e9f14d46fa9a5/numpy-2.3.1-cp313-cp313t-win32.whl", hash = "sha256:6269b9edfe32912584ec496d91b00b6d34282ca1d07eb10e82dfc780907d6c2e", size = 6440678, upload-time = "2025-06-21T12:24:21.596Z" }, - { url = "https://files.pythonhosted.org/packages/f1/7e/7f431d8bd8eb7e03d79294aed238b1b0b174b3148570d03a8a8a8f6a0da9/numpy-2.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2a809637460e88a113e186e87f228d74ae2852a2e0c44de275263376f17b5bdc", size = 12870697, upload-time = "2025-06-21T12:24:40.644Z" }, - { url = "https://files.pythonhosted.org/packages/d4/ca/af82bf0fad4c3e573c6930ed743b5308492ff19917c7caaf2f9b6f9e2e98/numpy-2.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:eccb9a159db9aed60800187bc47a6d3451553f0e1b08b068d8b277ddfbb9b244", size = 10260376, upload-time = "2025-06-21T12:24:56.884Z" }, -] - -[[package]] -name = "ollama" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "httpx" }, - { name = "pydantic" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8d/96/c7fe0d2d1b3053be614822a7b722c7465161b3672ce90df71515137580a0/ollama-0.5.1.tar.gz", hash = "sha256:5a799e4dc4e7af638b11e3ae588ab17623ee019e496caaf4323efbaa8feeff93", size = 41112, upload-time = "2025-05-30T21:32:48.679Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/76/3f96c8cdbf3955d7a73ee94ce3e0db0755d6de1e0098a70275940d1aff2f/ollama-0.5.1-py3-none-any.whl", hash = "sha256:4c8839f35bc173c7057b1eb2cbe7f498c1a7e134eafc9192824c8aecb3617506", size = 13369, upload-time = "2025-05-30T21:32:47.429Z" }, -] - -[[package]] -name = "onnxruntime" -version = "1.22.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coloredlogs" }, - { name = "flatbuffers" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "protobuf" }, - { name = "sympy" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/de/9162872c6e502e9ac8c99a98a8738b2fab408123d11de55022ac4f92562a/onnxruntime-1.22.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f3c0380f53c1e72a41b3f4d6af2ccc01df2c17844072233442c3a7e74851ab97", size = 34298046, upload-time = "2025-05-09T20:26:02.399Z" }, - { url = "https://files.pythonhosted.org/packages/03/79/36f910cd9fc96b444b0e728bba14607016079786adf032dae61f7c63b4aa/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c", size = 14443220, upload-time = "2025-05-09T20:25:47.078Z" }, - { url = "https://files.pythonhosted.org/packages/8c/60/16d219b8868cc8e8e51a68519873bdb9f5f24af080b62e917a13fff9989b/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6964a975731afc19dc3418fad8d4e08c48920144ff590149429a5ebe0d15fb3c", size = 16406377, upload-time = "2025-05-09T20:26:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/36/b4/3f1c71ce1d3d21078a6a74c5483bfa2b07e41a8d2b8fb1e9993e6a26d8d3/onnxruntime-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0d534a43d1264d1273c2d4f00a5a588fa98d21117a3345b7104fa0bbcaadb9a", size = 12692233, upload-time = "2025-05-12T21:26:16.963Z" }, - { url = "https://files.pythonhosted.org/packages/a9/65/5cb5018d5b0b7cba820d2c4a1d1b02d40df538d49138ba36a509457e4df6/onnxruntime-1.22.0-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:fe7c051236aae16d8e2e9ffbfc1e115a0cc2450e873a9c4cb75c0cc96c1dae07", size = 34298715, upload-time = "2025-05-09T20:26:05.634Z" }, - { url = "https://files.pythonhosted.org/packages/e1/89/1dfe1b368831d1256b90b95cb8d11da8ab769febd5c8833ec85ec1f79d21/onnxruntime-1.22.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a6bbed10bc5e770c04d422893d3045b81acbbadc9fb759a2cd1ca00993da919", size = 14443266, upload-time = "2025-05-09T20:25:49.479Z" }, - { url = "https://files.pythonhosted.org/packages/1e/70/342514ade3a33ad9dd505dcee96ff1f0e7be6d0e6e9c911fe0f1505abf42/onnxruntime-1.22.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fe45ee3e756300fccfd8d61b91129a121d3d80e9d38e01f03ff1295badc32b8", size = 16406707, upload-time = "2025-05-09T20:26:17.454Z" }, - { url = "https://files.pythonhosted.org/packages/3e/89/2f64e250945fa87140fb917ba377d6d0e9122e029c8512f389a9b7f953f4/onnxruntime-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:5a31d84ef82b4b05d794a4ce8ba37b0d9deb768fd580e36e17b39e0b4840253b", size = 12691777, upload-time = "2025-05-12T21:26:20.19Z" }, - { url = "https://files.pythonhosted.org/packages/9f/48/d61d5f1ed098161edd88c56cbac49207d7b7b149e613d2cd7e33176c63b3/onnxruntime-1.22.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2ac5bd9205d831541db4e508e586e764a74f14efdd3f89af7fd20e1bf4a1ed", size = 14454003, upload-time = "2025-05-09T20:25:52.287Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/873b955beda7bada5b0d798d3a601b2ff210e44ad5169f6d405b93892103/onnxruntime-1.22.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64845709f9e8a2809e8e009bc4c8f73b788cee9c6619b7d9930344eae4c9cd36", size = 16427482, upload-time = "2025-05-09T20:26:20.376Z" }, +version = "2.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/6d/745dd1c1c5c284d17725e5c802ca4d45cfc6803519d777f087b71c9f4069/numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b", size = 20956420, upload-time = "2025-07-24T20:28:18.002Z" }, + { url = "https://files.pythonhosted.org/packages/bc/96/e7b533ea5740641dd62b07a790af5d9d8fec36000b8e2d0472bd7574105f/numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f", size = 14184660, upload-time = "2025-07-24T20:28:39.522Z" }, + { url = "https://files.pythonhosted.org/packages/2b/53/102c6122db45a62aa20d1b18c9986f67e6b97e0d6fbc1ae13e3e4c84430c/numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0", size = 5113382, upload-time = "2025-07-24T20:28:48.544Z" }, + { url = "https://files.pythonhosted.org/packages/2b/21/376257efcbf63e624250717e82b4fae93d60178f09eb03ed766dbb48ec9c/numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b", size = 6647258, upload-time = "2025-07-24T20:28:59.104Z" }, + { url = "https://files.pythonhosted.org/packages/91/ba/f4ebf257f08affa464fe6036e13f2bf9d4642a40228781dc1235da81be9f/numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370", size = 14281409, upload-time = "2025-07-24T20:40:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/59/ef/f96536f1df42c668cbacb727a8c6da7afc9c05ece6d558927fb1722693e1/numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73", size = 16641317, upload-time = "2025-07-24T20:40:56.625Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a7/af813a7b4f9a42f498dde8a4c6fcbff8100eed00182cc91dbaf095645f38/numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc", size = 16056262, upload-time = "2025-07-24T20:41:20.797Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5d/41c4ef8404caaa7f05ed1cfb06afe16a25895260eacbd29b4d84dff2920b/numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be", size = 18579342, upload-time = "2025-07-24T20:41:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/9950e44c5a11636f4a3af6e825ec23003475cc9a466edb7a759ed3ea63bd/numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036", size = 6320610, upload-time = "2025-07-24T20:42:01.551Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2f/244643a5ce54a94f0a9a2ab578189c061e4a87c002e037b0829dd77293b6/numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f", size = 12786292, upload-time = "2025-07-24T20:42:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/54/cd/7b5f49d5d78db7badab22d8323c1b6ae458fbf86c4fdfa194ab3cd4eb39b/numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07", size = 10194071, upload-time = "2025-07-24T20:42:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074, upload-time = "2025-07-24T20:43:07.813Z" }, + { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311, upload-time = "2025-07-24T20:43:29.335Z" }, + { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022, upload-time = "2025-07-24T20:43:37.999Z" }, + { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135, upload-time = "2025-07-24T20:43:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147, upload-time = "2025-07-24T20:44:10.328Z" }, + { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989, upload-time = "2025-07-24T20:44:34.88Z" }, + { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052, upload-time = "2025-07-24T20:44:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955, upload-time = "2025-07-24T20:45:26.714Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843, upload-time = "2025-07-24T20:49:24.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876, upload-time = "2025-07-24T20:49:43.227Z" }, + { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786, upload-time = "2025-07-24T20:49:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395, upload-time = "2025-07-24T20:45:58.821Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374, upload-time = "2025-07-24T20:46:20.207Z" }, + { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864, upload-time = "2025-07-24T20:46:30.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533, upload-time = "2025-07-24T20:46:46.111Z" }, + { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007, upload-time = "2025-07-24T20:47:07.1Z" }, + { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914, upload-time = "2025-07-24T20:47:32.459Z" }, + { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708, upload-time = "2025-07-24T20:47:58.129Z" }, + { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678, upload-time = "2025-07-24T20:48:25.402Z" }, + { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832, upload-time = "2025-07-24T20:48:37.181Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049, upload-time = "2025-07-24T20:48:56.24Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935, upload-time = "2025-07-24T20:49:13.136Z" }, + { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906, upload-time = "2025-07-24T20:50:30.346Z" }, + { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607, upload-time = "2025-07-24T20:50:51.923Z" }, + { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110, upload-time = "2025-07-24T20:51:01.041Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050, upload-time = "2025-07-24T20:51:11.64Z" }, + { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292, upload-time = "2025-07-24T20:51:33.488Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913, upload-time = "2025-07-24T20:51:58.517Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180, upload-time = "2025-07-24T20:52:22.827Z" }, + { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809, upload-time = "2025-07-24T20:52:51.015Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410, upload-time = "2025-07-24T20:56:44.949Z" }, + { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821, upload-time = "2025-07-24T20:57:06.479Z" }, + { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303, upload-time = "2025-07-24T20:57:22.879Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524, upload-time = "2025-07-24T20:53:22.086Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519, upload-time = "2025-07-24T20:53:44.053Z" }, + { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972, upload-time = "2025-07-24T20:53:53.81Z" }, + { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439, upload-time = "2025-07-24T20:54:04.742Z" }, + { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479, upload-time = "2025-07-24T20:54:25.819Z" }, + { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805, upload-time = "2025-07-24T20:54:50.814Z" }, + { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830, upload-time = "2025-07-24T20:55:17.306Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665, upload-time = "2025-07-24T20:55:46.665Z" }, + { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777, upload-time = "2025-07-24T20:55:57.66Z" }, + { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856, upload-time = "2025-07-24T20:56:17.318Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226, upload-time = "2025-07-24T20:56:34.509Z" }, ] [[package]] name = "openai" -version = "1.93.0" +version = "1.97.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2173,22 +1469,22 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/d7/e91c6a9cf71726420cddf539852ee4c29176ebb716a702d9118d0409fd8e/openai-1.93.0.tar.gz", hash = "sha256:988f31ade95e1ff0585af11cc5a64510225e4f5cd392698c675d0a9265b8e337", size = 486573, upload-time = "2025-06-27T21:21:39.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/57/1c471f6b3efb879d26686d31582997615e969f3bb4458111c9705e56332e/openai-1.97.1.tar.gz", hash = "sha256:a744b27ae624e3d4135225da9b1c89c107a2a7e5bc4c93e5b7b5214772ce7a4e", size = 494267, upload-time = "2025-07-22T13:10:12.607Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/46/a10d9df4673df56f71201d129ba1cb19eaff3366d08c8664d61a7df52e65/openai-1.93.0-py3-none-any.whl", hash = "sha256:3d746fe5498f0dd72e0d9ab706f26c91c0f646bf7459e5629af8ba7c9dbdf090", size = 755038, upload-time = "2025-06-27T21:21:37.532Z" }, + { url = "https://files.pythonhosted.org/packages/ee/35/412a0e9c3f0d37c94ed764b8ac7adae2d834dbd20e69f6aca582118e0f55/openai-1.97.1-py3-none-any.whl", hash = "sha256:4e96bbdf672ec3d44968c9ea39d2c375891db1acc1794668d8149d5fa6000606", size = 764380, upload-time = "2025-07-22T13:10:10.689Z" }, ] [[package]] name = "opentelemetry-api" -version = "1.34.1" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/5e/94a8cb759e4e409022229418294e098ca7feca00eb3c467bb20cbd329bda/opentelemetry_api-1.34.1.tar.gz", hash = "sha256:64f0bd06d42824843731d05beea88d4d4b6ae59f9fe347ff7dfa2cc14233bbb3", size = 64987, upload-time = "2025-06-10T08:55:19.818Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/c9/4509bfca6bb43220ce7f863c9f791e0d5001c2ec2b5867d48586008b3d96/opentelemetry_api-1.35.0.tar.gz", hash = "sha256:a111b959bcfa5b4d7dffc2fbd6a241aa72dd78dd8e79b5b1662bda896c5d2ffe", size = 64778, upload-time = "2025-07-11T12:23:28.804Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/3a/2ba85557e8dc024c0842ad22c570418dc02c36cbd1ab4b832a93edf071b8/opentelemetry_api-1.34.1-py3-none-any.whl", hash = "sha256:b7df4cb0830d5a6c29ad0c0691dbae874d8daefa934b8b1d642de48323d32a8c", size = 65767, upload-time = "2025-06-10T08:54:56.717Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5a/3f8d078dbf55d18442f6a2ecedf6786d81d7245844b2b20ce2b8ad6f0307/opentelemetry_api-1.35.0-py3-none-any.whl", hash = "sha256:c4ea7e258a244858daf18474625e9cc0149b8ee354f37843415771a40c25ee06", size = 65566, upload-time = "2025-07-11T12:23:07.944Z" }, ] [[package]] @@ -2208,19 +1504,19 @@ wheels = [ [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.34.1" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/f0/ff235936ee40db93360233b62da932d4fd9e8d103cd090c6bcb9afaf5f01/opentelemetry_exporter_otlp_proto_common-1.34.1.tar.gz", hash = "sha256:b59a20a927facd5eac06edaf87a07e49f9e4a13db487b7d8a52b37cb87710f8b", size = 20817, upload-time = "2025-06-10T08:55:22.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/d1/887f860529cba7fc3aba2f6a3597fefec010a17bd1b126810724707d9b51/opentelemetry_exporter_otlp_proto_common-1.35.0.tar.gz", hash = "sha256:6f6d8c39f629b9fa5c79ce19a2829dbd93034f8ac51243cdf40ed2196f00d7eb", size = 20299, upload-time = "2025-07-11T12:23:31.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/e8/8b292a11cc8d8d87ec0c4089ae21b6a58af49ca2e51fa916435bc922fdc7/opentelemetry_exporter_otlp_proto_common-1.34.1-py3-none-any.whl", hash = "sha256:8e2019284bf24d3deebbb6c59c71e6eef3307cd88eff8c633e061abba33f7e87", size = 18834, upload-time = "2025-06-10T08:55:00.806Z" }, + { url = "https://files.pythonhosted.org/packages/5a/2c/e31dd3c719bff87fa77391eb7f38b1430d22868c52312cba8aad60f280e5/opentelemetry_exporter_otlp_proto_common-1.35.0-py3-none-any.whl", hash = "sha256:863465de697ae81279ede660f3918680b4480ef5f69dcdac04f30722ed7b74cc", size = 18349, upload-time = "2025-07-11T12:23:11.713Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.34.1" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, @@ -2231,14 +1527,14 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/41/f7/bb63837a3edb9ca857aaf5760796874e7cecddc88a2571b0992865a48fb6/opentelemetry_exporter_otlp_proto_grpc-1.34.1.tar.gz", hash = "sha256:7c841b90caa3aafcfc4fee58487a6c71743c34c6dc1787089d8b0578bbd794dd", size = 22566, upload-time = "2025-06-10T08:55:23.214Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/de/222e4f2f8cd39250991f84d76b661534aef457cafc6a3eb3fcd513627698/opentelemetry_exporter_otlp_proto_grpc-1.35.0.tar.gz", hash = "sha256:ac4c2c3aa5674642db0df0091ab43ec08bbd91a9be469c8d9b18923eb742b9cc", size = 23794, upload-time = "2025-07-11T12:23:31.662Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/42/0a4dd47e7ef54edf670c81fc06a83d68ea42727b82126a1df9dd0477695d/opentelemetry_exporter_otlp_proto_grpc-1.34.1-py3-none-any.whl", hash = "sha256:04bb8b732b02295be79f8a86a4ad28fae3d4ddb07307a98c7aa6f331de18cca6", size = 18615, upload-time = "2025-06-10T08:55:02.214Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a6/3f60a77279e6a3dc21fc076dcb51be159a633b0bba5cba9fb804062a9332/opentelemetry_exporter_otlp_proto_grpc-1.35.0-py3-none-any.whl", hash = "sha256:ee31203eb3e50c7967b8fa71db366cc355099aca4e3726e489b248cdb2fd5a62", size = 18846, upload-time = "2025-07-11T12:23:12.957Z" }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.55b1" +version = "0.56b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2246,14 +1542,14 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/69/d8995f229ddf4d98b9c85dd126aeca03dd1742f6dc5d3bc0d2f6dae1535c/opentelemetry_instrumentation-0.55b1.tar.gz", hash = "sha256:2dc50aa207b9bfa16f70a1a0571e011e737a9917408934675b89ef4d5718c87b", size = 28552, upload-time = "2025-06-10T08:58:15.312Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/14/964e90f524655aed5c699190dad8dd9a05ed0f5fa334b4b33532237c2b51/opentelemetry_instrumentation-0.56b0.tar.gz", hash = "sha256:d2dbb3021188ca0ec8c5606349ee9a2919239627e8341d4d37f1d21ec3291d11", size = 28551, upload-time = "2025-07-11T12:26:19.305Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/7d/8ddfda1506c2fcca137924d5688ccabffa1aed9ec0955b7d0772de02cec3/opentelemetry_instrumentation-0.55b1-py3-none-any.whl", hash = "sha256:cbb1496b42bc394e01bc63701b10e69094e8564e281de063e4328d122cc7a97e", size = 31108, upload-time = "2025-06-10T08:57:14.355Z" }, + { url = "https://files.pythonhosted.org/packages/26/aa/2328f27200b8e51640d4d7ff5343ba6a81ab7d2650a9f574db016aae4adf/opentelemetry_instrumentation-0.56b0-py3-none-any.whl", hash = "sha256:948967f7c8f5bdc6e43512ba74c9ae14acb48eb72a35b61afe8db9909f743be3", size = 31105, upload-time = "2025-07-11T12:25:22.788Z" }, ] [[package]] name = "opentelemetry-instrumentation-httpx" -version = "0.55b1" +version = "0.56b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2262,14 +1558,14 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/7d/3fe981e7ee6655406aef2ff391a8192c1d3934f5990603af4ce1a8689a8e/opentelemetry_instrumentation_httpx-0.55b1.tar.gz", hash = "sha256:3121a9196a25a72b65cb16188a1b09f61e365694c75534b306d09088e5f90041", size = 19268, upload-time = "2025-06-10T08:58:31.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/93/784623262776d90aa07e5cc08f58e1d476ed4b208127c1718db4b57b78c8/opentelemetry_instrumentation_httpx-0.56b0.tar.gz", hash = "sha256:d93c7a0e0ce5cc2170f7cc0d26f19167bc8314b7d2a5204afdf84000d5ca8647", size = 19510, upload-time = "2025-07-11T12:26:37.492Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/70/a4bd4b7b5c601573975e33e276b0c2eb7635ae4d752359d49b68a8547414/opentelemetry_instrumentation_httpx-0.55b1-py3-none-any.whl", hash = "sha256:5fe22fcc3ad78a1da85cbd5d35d6acfb208521c164ad1dd75594230a266c6811", size = 15124, upload-time = "2025-06-10T08:57:43.062Z" }, + { url = "https://files.pythonhosted.org/packages/b4/0e/fe2f38e0170a22a44b26e9a8f555ac23b2fcf09f89264d718800ec1d5954/opentelemetry_instrumentation_httpx-0.56b0-py3-none-any.whl", hash = "sha256:34d9558413d695ea69fe002efa07c00aa859a8aa4930815726e5813d5f002e1d", size = 15113, upload-time = "2025-07-11T12:25:45.671Z" }, ] [[package]] name = "opentelemetry-instrumentation-openai" -version = "0.40.14" +version = "0.43.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2278,21 +1574,21 @@ dependencies = [ { name = "opentelemetry-semantic-conventions-ai" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/91/f721da20b8f5eb29138900bf0d9cd094e52f94a26558f42a124a66ea38d9/opentelemetry_instrumentation_openai-0.40.14.tar.gz", hash = "sha256:3ba8e36a3853833f5c0c6b3b8ffa0f1289e6423e0d428bd1c7b3f27abdc545f0", size = 15117, upload-time = "2025-06-24T10:26:25.965Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/cf/90675fdf938c67fa362a75f5d26ad83a9982d520ea4f9a8c84e149e93665/opentelemetry_instrumentation_openai-0.43.1.tar.gz", hash = "sha256:73fb071bd1d03481adf33473f784a90101e7a813f742049fb9e5b18c11cef699", size = 23438, upload-time = "2025-07-23T14:39:46.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/f6/865eb4a9416567559dd344dbefb2a2c3035f6066985e0b130f62458c2342/opentelemetry_instrumentation_openai-0.40.14-py3-none-any.whl", hash = "sha256:72319113370a018390e9c987dc3dac569380591fbaa5639c5e5cacd3f3165f4a", size = 23129, upload-time = "2025-06-24T10:26:01.138Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d6/e253457cd2739e386a9a6f137091522d085b24239cad8ef2af80840d7959/opentelemetry_instrumentation_openai-0.43.1-py3-none-any.whl", hash = "sha256:7b4d738d7c33b8601bce7db345f16852e7c6e65253c7c23e521d31541362bc74", size = 33543, upload-time = "2025-07-23T14:39:17.995Z" }, ] [[package]] name = "opentelemetry-proto" -version = "1.34.1" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/b3/c3158dd012463bb7c0eb7304a85a6f63baeeb5b4c93a53845cf89f848c7e/opentelemetry_proto-1.34.1.tar.gz", hash = "sha256:16286214e405c211fc774187f3e4bbb1351290b8dfb88e8948af209ce85b719e", size = 34344, upload-time = "2025-06-10T08:55:32.25Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/a2/7366e32d9a2bccbb8614942dbea2cf93c209610385ea966cb050334f8df7/opentelemetry_proto-1.35.0.tar.gz", hash = "sha256:532497341bd3e1c074def7c5b00172601b28bb83b48afc41a4b779f26eb4ee05", size = 46151, upload-time = "2025-07-11T12:23:38.797Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/ab/4591bfa54e946350ce8b3f28e5c658fe9785e7cd11e9c11b1671a867822b/opentelemetry_proto-1.34.1-py3-none-any.whl", hash = "sha256:eb4bb5ac27f2562df2d6857fc557b3a481b5e298bc04f94cc68041f00cebcbd2", size = 55692, upload-time = "2025-06-10T08:55:14.904Z" }, + { url = "https://files.pythonhosted.org/packages/00/a7/3f05de580da7e8a8b8dff041d3d07a20bf3bb62d3bcc027f8fd669a73ff4/opentelemetry_proto-1.35.0-py3-none-any.whl", hash = "sha256:98fffa803164499f562718384e703be8d7dfbe680192279a0429cb150a2f8809", size = 72536, upload-time = "2025-07-11T12:23:23.247Z" }, ] [[package]] @@ -2312,225 +1608,56 @@ wheels = [ [[package]] name = "opentelemetry-sdk" -version = "1.34.1" +version = "1.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/41/fe20f9036433da8e0fcef568984da4c1d1c771fa072ecd1a4d98779dccdd/opentelemetry_sdk-1.34.1.tar.gz", hash = "sha256:8091db0d763fcd6098d4781bbc80ff0971f94e260739aa6afe6fd379cdf3aa4d", size = 159441, upload-time = "2025-06-10T08:55:33.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/cf/1eb2ed2ce55e0a9aa95b3007f26f55c7943aeef0a783bb006bdd92b3299e/opentelemetry_sdk-1.35.0.tar.gz", hash = "sha256:2a400b415ab68aaa6f04e8a6a9f6552908fb3090ae2ff78d6ae0c597ac581954", size = 160871, upload-time = "2025-07-11T12:23:39.566Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/1b/def4fe6aa73f483cabf4c748f4c25070d5f7604dcc8b52e962983491b29e/opentelemetry_sdk-1.34.1-py3-none-any.whl", hash = "sha256:308effad4059562f1d92163c61c8141df649da24ce361827812c40abb2a1e96e", size = 118477, upload-time = "2025-06-10T08:55:16.02Z" }, + { url = "https://files.pythonhosted.org/packages/01/4f/8e32b757ef3b660511b638ab52d1ed9259b666bdeeceba51a082ce3aea95/opentelemetry_sdk-1.35.0-py3-none-any.whl", hash = "sha256:223d9e5f5678518f4842311bb73966e0b6db5d1e0b74e35074c052cd2487f800", size = 119379, upload-time = "2025-07-11T12:23:24.521Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.55b1" +version = "0.56b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5d/f0/f33458486da911f47c4aa6db9bda308bb80f3236c111bf848bd870c16b16/opentelemetry_semantic_conventions-0.55b1.tar.gz", hash = "sha256:ef95b1f009159c28d7a7849f5cbc71c4c34c845bb514d66adfdf1b3fff3598b3", size = 119829, upload-time = "2025-06-10T08:55:33.881Z" } +sdist = { url = "https://files.pythonhosted.org/packages/32/8e/214fa817f63b9f068519463d8ab46afd5d03b98930c39394a37ae3e741d0/opentelemetry_semantic_conventions-0.56b0.tar.gz", hash = "sha256:c114c2eacc8ff6d3908cb328c811eaf64e6d68623840be9224dc829c4fd6c2ea", size = 124221, upload-time = "2025-07-11T12:23:40.71Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/89/267b0af1b1d0ba828f0e60642b6a5116ac1fd917cde7fc02821627029bd1/opentelemetry_semantic_conventions-0.55b1-py3-none-any.whl", hash = "sha256:5da81dfdf7d52e3d37f8fe88d5e771e191de924cfff5f550ab0b8f7b2409baed", size = 196223, upload-time = "2025-06-10T08:55:17.638Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3f/e80c1b017066a9d999efffe88d1cce66116dcf5cb7f80c41040a83b6e03b/opentelemetry_semantic_conventions-0.56b0-py3-none-any.whl", hash = "sha256:df44492868fd6b482511cc43a942e7194be64e94945f572db24df2e279a001a2", size = 201625, upload-time = "2025-07-11T12:23:25.63Z" }, ] [[package]] name = "opentelemetry-semantic-conventions-ai" -version = "0.4.9" +version = "0.4.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/ba/2405abde825cf654d09ba16bfcfb8c863156bccdc47d1f2a86df6331e7bb/opentelemetry_semantic_conventions_ai-0.4.9.tar.gz", hash = "sha256:54a0b901959e2de5124384925846bac2ea0a6dab3de7e501ba6aecf5e293fe04", size = 4920, upload-time = "2025-05-16T10:20:54.611Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/8a/9228919e167a03f4c4f4c424a185dbfe62bd8597b9e2b20570b9db85bc84/opentelemetry_semantic_conventions_ai-0.4.11.tar.gz", hash = "sha256:bc84b71c66a01a5836a28104e691c5524f4f677fc90b40a4e6fbc2ec3e250610", size = 4825, upload-time = "2025-07-14T13:32:44.855Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/98/f5196ba0f4105a4790cec8c6671cf676c96dfa29bfedfe3c4f112bf4e6ad/opentelemetry_semantic_conventions_ai-0.4.9-py3-none-any.whl", hash = "sha256:71149e46a72554ae17de46bca6c11ba540c19c89904bd4cc3111aac6edf10315", size = 5617, upload-time = "2025-05-16T10:20:53.062Z" }, + { url = "https://files.pythonhosted.org/packages/47/a3/eab56cbd9a7d6f7c797172c0600be60811777535fea9c820ede9e985f1c4/opentelemetry_semantic_conventions_ai-0.4.11-py3-none-any.whl", hash = "sha256:9b07da1e66bed1746b61bb5d49d8fba9ae693625ec4ea94ddab390760505bf4b", size = 5682, upload-time = "2025-07-14T13:32:43.877Z" }, ] [[package]] name = "opentelemetry-util-http" -version = "0.55b1" +version = "0.56b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/f7/3cc23b95921177cdda6d61d3475659b86bac335ed02dd19f994a850ceee3/opentelemetry_util_http-0.55b1.tar.gz", hash = "sha256:29e119c1f6796cccf5fc2aedb55274435cde5976d0ac3fec3ca20a80118f821e", size = 8038, upload-time = "2025-06-10T08:58:53.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/c5/80c603e44071d172d4e9c909b13e3d9924b90b08a581eff78a8daf77686e/opentelemetry_util_http-0.56b0.tar.gz", hash = "sha256:9a0c8573a68e3242a2d3e5840476088e63714e6d3e25f67127945ab0c7143074", size = 9404, upload-time = "2025-07-11T12:26:55.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/0a/49c5464efc0e6f6aa94a9ec054879efe2a59d7c1f6aacc500665b3d8afdc/opentelemetry_util_http-0.55b1-py3-none-any.whl", hash = "sha256:e134218df8ff010e111466650e5f019496b29c3b4f1b7de0e8ff8ebeafeebdf4", size = 7299, upload-time = "2025-06-10T08:58:11.785Z" }, + { url = "https://files.pythonhosted.org/packages/05/ca/20763fba2af06e73f0e666e46a32b5cdb9d2d75dcb5fd221f50c818cae43/opentelemetry_util_http-0.56b0-py3-none-any.whl", hash = "sha256:e26dd8c7f71da6806f1e65ac7cde189d389b8f152506146968f59b7a607dc8cf", size = 7645, upload-time = "2025-07-11T12:26:16.106Z" }, ] [[package]] name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" }, -] - -[[package]] -name = "pandas" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "python-dateutil" }, - { name = "pytz" }, - { name = "tzdata" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/51/48f713c4c728d7c55ef7444ba5ea027c26998d96d1a40953b346438602fc/pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133", size = 4484490, upload-time = "2025-06-05T03:27:54.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/46/24192607058dd607dbfacdd060a2370f6afb19c2ccb617406469b9aeb8e7/pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf", size = 11573865, upload-time = "2025-06-05T03:26:46.774Z" }, - { url = "https://files.pythonhosted.org/packages/9f/cc/ae8ea3b800757a70c9fdccc68b67dc0280a6e814efcf74e4211fd5dea1ca/pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027", size = 10702154, upload-time = "2025-06-05T16:50:14.439Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ba/a7883d7aab3d24c6540a2768f679e7414582cc389876d469b40ec749d78b/pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09", size = 11262180, upload-time = "2025-06-05T16:50:17.453Z" }, - { url = "https://files.pythonhosted.org/packages/01/a5/931fc3ad333d9d87b10107d948d757d67ebcfc33b1988d5faccc39c6845c/pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d", size = 11991493, upload-time = "2025-06-05T03:26:51.813Z" }, - { url = "https://files.pythonhosted.org/packages/d7/bf/0213986830a92d44d55153c1d69b509431a972eb73f204242988c4e66e86/pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20", size = 12470733, upload-time = "2025-06-06T00:00:18.651Z" }, - { url = "https://files.pythonhosted.org/packages/a4/0e/21eb48a3a34a7d4bac982afc2c4eb5ab09f2d988bdf29d92ba9ae8e90a79/pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b", size = 13212406, upload-time = "2025-06-05T03:26:55.992Z" }, - { url = "https://files.pythonhosted.org/packages/1f/d9/74017c4eec7a28892d8d6e31ae9de3baef71f5a5286e74e6b7aad7f8c837/pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be", size = 10976199, upload-time = "2025-06-05T03:26:59.594Z" }, - { url = "https://files.pythonhosted.org/packages/d3/57/5cb75a56a4842bbd0511c3d1c79186d8315b82dac802118322b2de1194fe/pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983", size = 11518913, upload-time = "2025-06-05T03:27:02.757Z" }, - { url = "https://files.pythonhosted.org/packages/05/01/0c8785610e465e4948a01a059562176e4c8088aa257e2e074db868f86d4e/pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd", size = 10655249, upload-time = "2025-06-05T16:50:20.17Z" }, - { url = "https://files.pythonhosted.org/packages/e8/6a/47fd7517cd8abe72a58706aab2b99e9438360d36dcdb052cf917b7bf3bdc/pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f", size = 11328359, upload-time = "2025-06-05T03:27:06.431Z" }, - { url = "https://files.pythonhosted.org/packages/2a/b3/463bfe819ed60fb7e7ddffb4ae2ee04b887b3444feee6c19437b8f834837/pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3", size = 12024789, upload-time = "2025-06-05T03:27:09.875Z" }, - { url = "https://files.pythonhosted.org/packages/04/0c/e0704ccdb0ac40aeb3434d1c641c43d05f75c92e67525df39575ace35468/pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8", size = 12480734, upload-time = "2025-06-06T00:00:22.246Z" }, - { url = "https://files.pythonhosted.org/packages/e9/df/815d6583967001153bb27f5cf075653d69d51ad887ebbf4cfe1173a1ac58/pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9", size = 13223381, upload-time = "2025-06-05T03:27:15.641Z" }, - { url = "https://files.pythonhosted.org/packages/79/88/ca5973ed07b7f484c493e941dbff990861ca55291ff7ac67c815ce347395/pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390", size = 10970135, upload-time = "2025-06-05T03:27:24.131Z" }, - { url = "https://files.pythonhosted.org/packages/24/fb/0994c14d1f7909ce83f0b1fb27958135513c4f3f2528bde216180aa73bfc/pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575", size = 12141356, upload-time = "2025-06-05T03:27:34.547Z" }, - { url = "https://files.pythonhosted.org/packages/9d/a2/9b903e5962134497ac4f8a96f862ee3081cb2506f69f8e4778ce3d9c9d82/pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042", size = 11474674, upload-time = "2025-06-05T03:27:39.448Z" }, - { url = "https://files.pythonhosted.org/packages/81/3a/3806d041bce032f8de44380f866059437fb79e36d6b22c82c187e65f765b/pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c", size = 11439876, upload-time = "2025-06-05T03:27:43.652Z" }, - { url = "https://files.pythonhosted.org/packages/15/aa/3fc3181d12b95da71f5c2537c3e3b3af6ab3a8c392ab41ebb766e0929bc6/pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67", size = 11966182, upload-time = "2025-06-05T03:27:47.652Z" }, - { url = "https://files.pythonhosted.org/packages/37/e7/e12f2d9b0a2c4a2cc86e2aabff7ccfd24f03e597d770abfa2acd313ee46b/pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f", size = 12547686, upload-time = "2025-06-06T00:00:26.142Z" }, - { url = "https://files.pythonhosted.org/packages/39/c2/646d2e93e0af70f4e5359d870a63584dacbc324b54d73e6b3267920ff117/pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249", size = 13231847, upload-time = "2025-06-05T03:27:51.465Z" }, -] - -[[package]] -name = "parso" -version = "0.8.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609, upload-time = "2024-04-05T09:43:55.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ptyprocess" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, -] - -[[package]] -name = "pillow" -version = "11.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, - { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, - { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, - { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, - { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, - { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, - { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, - { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, - { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, - { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, - { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, - { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, - { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, - { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, - { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, - { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, - { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, - { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, - { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, - { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, - { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, - { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, - { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, - { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, - { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, - { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, - { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, - { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, - { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, - { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, - { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, - { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, - { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, - { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, - { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, - { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, - { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, - { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, - { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, - { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, - { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, - { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, - { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, -] - -[[package]] -name = "pinecone" -version = "7.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "pinecone-plugin-assistant" }, - { name = "pinecone-plugin-interface" }, - { name = "python-dateutil" }, - { name = "typing-extensions" }, - { name = "urllib3", marker = "python_full_version < '4.0'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fa/38/12731d4af470851b4963eba616605868a8599ef4df51c7b6c928e5f3166d/pinecone-7.3.0.tar.gz", hash = "sha256:307edc155621d487c20dc71b76c3ad5d6f799569ba42064190d03917954f9a7b", size = 235256, upload-time = "2025-06-27T20:03:51.498Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/a6/c5d54a5fb1de3983a8739c1a1660e7a7074db2cbadfa875b823fcf29b629/pinecone-7.3.0-py3-none-any.whl", hash = "sha256:315b8fef20320bef723ecbb695dec0aafa75d8434d86e01e5a0e85933e1009a8", size = 587563, upload-time = "2025-06-27T20:03:50.249Z" }, -] - -[[package]] -name = "pinecone-plugin-assistant" -version = "1.7.0" +version = "25.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fa/8c/2db25e4d88ec31cc096b71473938e9269459eb567b50ea49dbea9a88f3ab/pinecone_plugin_assistant-1.7.0.tar.gz", hash = "sha256:e26e3ba10a8b71c3da0d777cff407668022e82963c4913d0ffeb6c552721e482", size = 145608, upload-time = "2025-06-11T09:06:39.067Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/29/9aab5e3e22086da8ba40fa9cd34bfefffd9cdf3f43f237fd7c9969568f20/pinecone_plugin_assistant-1.7.0-py3-none-any.whl", hash = "sha256:864cb8e7930588e6c2da97c6d44f0240969195f43fa303c5db76cbc12bf903a5", size = 239972, upload-time = "2025-06-11T09:06:37.284Z" }, -] - -[[package]] -name = "pinecone-plugin-interface" -version = "0.0.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/fb/e8a4063264953ead9e2b24d9b390152c60f042c951c47f4592e9996e57ff/pinecone_plugin_interface-0.0.7.tar.gz", hash = "sha256:b8e6675e41847333aa13923cc44daa3f85676d7157324682dc1640588a982846", size = 3370, upload-time = "2024-06-05T01:57:52.093Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/1d/a21fdfcd6d022cb64cef5c2a29ee6691c6c103c4566b41646b080b7536a5/pinecone_plugin_interface-0.0.7-py3-none-any.whl", hash = "sha256:875857ad9c9fc8bbc074dbe780d187a2afd21f5bfe0f3b08601924a61ef1bba8", size = 6249, upload-time = "2024-06-05T01:57:50.583Z" }, -] - -[[package]] -name = "platformdirs" -version = "4.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] [[package]] @@ -2542,18 +1669,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "prompt-toolkit" -version = "3.0.51" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, -] - [[package]] name = "propcache" version = "0.3.2" @@ -2625,49 +1740,16 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, - { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, - { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, - { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, - { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, - { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, -] - -[[package]] -name = "psutil" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" +version = "6.31.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a", size = 441797, upload-time = "2025-05-28T19:25:54.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6f/6ab8e4bf962fd5570d3deaa2d5c38f0a363f57b4501047b5ebeb83ab1125/protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9", size = 423603, upload-time = "2025-05-28T19:25:41.198Z" }, + { url = "https://files.pythonhosted.org/packages/44/3a/b15c4347dd4bf3a1b0ee882f384623e2063bb5cf9fa9d57990a4f7df2fb6/protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447", size = 435283, upload-time = "2025-05-28T19:25:44.275Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c9/b9689a2a250264a84e66c46d8862ba788ee7a641cdca39bccf64f59284b7/protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402", size = 425604, upload-time = "2025-05-28T19:25:45.702Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39", size = 322115, upload-time = "2025-05-28T19:25:47.128Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6", size = 321070, upload-time = "2025-05-28T19:25:50.036Z" }, + { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, ] [[package]] @@ -2780,20 +1862,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] -[[package]] -name = "pyjwt" -version = "2.10.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, -] - -[package.optional-dependencies] -crypto = [ - { name = "cryptography" }, -] - [[package]] name = "pyparsing" version = "3.2.3" @@ -2803,15 +1871,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, ] -[[package]] -name = "pyreadline3" -version = "3.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, -] - [[package]] name = "pytest" version = "8.4.1" @@ -2830,14 +1889,14 @@ wheels = [ [[package]] name = "pytest-asyncio" -version = "1.0.0" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, + { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, ] [[package]] @@ -2870,26 +1929,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] -[[package]] -name = "pytz" -version = "2025.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, -] - [[package]] name = "pywin32" -version = "310" +version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239, upload-time = "2025-03-17T00:55:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839, upload-time = "2025-03-17T00:56:00.8Z" }, - { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470, upload-time = "2025-03-17T00:56:02.601Z" }, - { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384, upload-time = "2025-03-17T00:56:04.383Z" }, - { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039, upload-time = "2025-03-17T00:56:06.207Z" }, - { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152, upload-time = "2025-03-17T00:56:07.819Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, ] [[package]] @@ -2918,36 +1971,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] -[[package]] -name = "pyzmq" -version = "27.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "implementation_name == 'pypy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f1/06/50a4e9648b3e8b992bef8eb632e457307553a89d294103213cfd47b3da69/pyzmq-27.0.0.tar.gz", hash = "sha256:b1f08eeb9ce1510e6939b6e5dcd46a17765e2333daae78ecf4606808442e52cf", size = 280478, upload-time = "2025-06-13T14:09:07.087Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/93/a7/9ad68f55b8834ede477842214feba6a4c786d936c022a67625497aacf61d/pyzmq-27.0.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:cbabc59dcfaac66655c040dfcb8118f133fb5dde185e5fc152628354c1598e52", size = 1305438, upload-time = "2025-06-13T14:07:31.676Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ee/26aa0f98665a22bc90ebe12dced1de5f3eaca05363b717f6fb229b3421b3/pyzmq-27.0.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:cb0ac5179cba4b2f94f1aa208fbb77b62c4c9bf24dd446278b8b602cf85fcda3", size = 895095, upload-time = "2025-06-13T14:07:33.104Z" }, - { url = "https://files.pythonhosted.org/packages/cf/85/c57e7ab216ecd8aa4cc7e3b83b06cc4e9cf45c87b0afc095f10cd5ce87c1/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53a48f0228eab6cbf69fde3aa3c03cbe04e50e623ef92ae395fce47ef8a76152", size = 651826, upload-time = "2025-06-13T14:07:34.831Z" }, - { url = "https://files.pythonhosted.org/packages/69/9a/9ea7e230feda9400fb0ae0d61d7d6ddda635e718d941c44eeab22a179d34/pyzmq-27.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:111db5f395e09f7e775f759d598f43cb815fc58e0147623c4816486e1a39dc22", size = 839750, upload-time = "2025-06-13T14:07:36.553Z" }, - { url = "https://files.pythonhosted.org/packages/08/66/4cebfbe71f3dfbd417011daca267539f62ed0fbc68105357b68bbb1a25b7/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c8878011653dcdc27cc2c57e04ff96f0471e797f5c19ac3d7813a245bcb24371", size = 1641357, upload-time = "2025-06-13T14:07:38.21Z" }, - { url = "https://files.pythonhosted.org/packages/ac/f6/b0f62578c08d2471c791287149cb8c2aaea414ae98c6e995c7dbe008adfb/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:c0ed2c1f335ba55b5fdc964622254917d6b782311c50e138863eda409fbb3b6d", size = 2020281, upload-time = "2025-06-13T14:07:39.599Z" }, - { url = "https://files.pythonhosted.org/packages/37/b9/4f670b15c7498495da9159edc374ec09c88a86d9cd5a47d892f69df23450/pyzmq-27.0.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e918d70862d4cfd4b1c187310015646a14e1f5917922ab45b29f28f345eeb6be", size = 1877110, upload-time = "2025-06-13T14:07:41.027Z" }, - { url = "https://files.pythonhosted.org/packages/66/31/9dee25c226295b740609f0d46db2fe972b23b6f5cf786360980524a3ba92/pyzmq-27.0.0-cp312-abi3-win32.whl", hash = "sha256:88b4e43cab04c3c0f0d55df3b1eef62df2b629a1a369b5289a58f6fa8b07c4f4", size = 559297, upload-time = "2025-06-13T14:07:42.533Z" }, - { url = "https://files.pythonhosted.org/packages/9b/12/52da5509800f7ff2d287b2f2b4e636e7ea0f001181cba6964ff6c1537778/pyzmq-27.0.0-cp312-abi3-win_amd64.whl", hash = "sha256:dce4199bf5f648a902ce37e7b3afa286f305cd2ef7a8b6ec907470ccb6c8b371", size = 619203, upload-time = "2025-06-13T14:07:43.843Z" }, - { url = "https://files.pythonhosted.org/packages/93/6d/7f2e53b19d1edb1eb4f09ec7c3a1f945ca0aac272099eab757d15699202b/pyzmq-27.0.0-cp312-abi3-win_arm64.whl", hash = "sha256:56e46bbb85d52c1072b3f809cc1ce77251d560bc036d3a312b96db1afe76db2e", size = 551927, upload-time = "2025-06-13T14:07:45.51Z" }, - { url = "https://files.pythonhosted.org/packages/19/62/876b27c4ff777db4ceba1c69ea90d3c825bb4f8d5e7cd987ce5802e33c55/pyzmq-27.0.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c36ad534c0c29b4afa088dc53543c525b23c0797e01b69fef59b1a9c0e38b688", size = 1340826, upload-time = "2025-06-13T14:07:46.881Z" }, - { url = "https://files.pythonhosted.org/packages/43/69/58ef8f4f59d3bcd505260c73bee87b008850f45edca40ddaba54273c35f4/pyzmq-27.0.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:67855c14173aec36395d7777aaba3cc527b393821f30143fd20b98e1ff31fd38", size = 897283, upload-time = "2025-06-13T14:07:49.562Z" }, - { url = "https://files.pythonhosted.org/packages/43/15/93a0d0396700a60475ad3c5d42c5f1c308d3570bc94626b86c71ef9953e0/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8617c7d43cd8ccdb62aebe984bfed77ca8f036e6c3e46dd3dddda64b10f0ab7a", size = 660567, upload-time = "2025-06-13T14:07:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b3/fe055513e498ca32f64509abae19b9c9eb4d7c829e02bd8997dd51b029eb/pyzmq-27.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:67bfbcbd0a04c575e8103a6061d03e393d9f80ffdb9beb3189261e9e9bc5d5e9", size = 847681, upload-time = "2025-06-13T14:07:52.77Z" }, - { url = "https://files.pythonhosted.org/packages/b6/4f/ff15300b00b5b602191f3df06bbc8dd4164e805fdd65bb77ffbb9c5facdc/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5cd11d46d7b7e5958121b3eaf4cd8638eff3a720ec527692132f05a57f14341d", size = 1650148, upload-time = "2025-06-13T14:07:54.178Z" }, - { url = "https://files.pythonhosted.org/packages/c4/6f/84bdfff2a224a6f26a24249a342e5906993c50b0761e311e81b39aef52a7/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:b801c2e40c5aa6072c2f4876de8dccd100af6d9918d4d0d7aa54a1d982fd4f44", size = 2023768, upload-time = "2025-06-13T14:07:55.714Z" }, - { url = "https://files.pythonhosted.org/packages/64/39/dc2db178c26a42228c5ac94a9cc595030458aa64c8d796a7727947afbf55/pyzmq-27.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:20d5cb29e8c5f76a127c75b6e7a77e846bc4b655c373baa098c26a61b7ecd0ef", size = 1885199, upload-time = "2025-06-13T14:07:57.166Z" }, - { url = "https://files.pythonhosted.org/packages/c7/21/dae7b06a1f8cdee5d8e7a63d99c5d129c401acc40410bef2cbf42025e26f/pyzmq-27.0.0-cp313-cp313t-win32.whl", hash = "sha256:a20528da85c7ac7a19b7384e8c3f8fa707841fd85afc4ed56eda59d93e3d98ad", size = 575439, upload-time = "2025-06-13T14:07:58.959Z" }, - { url = "https://files.pythonhosted.org/packages/eb/bc/1709dc55f0970cf4cb8259e435e6773f9946f41a045c2cb90e870b7072da/pyzmq-27.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d8229f2efece6a660ee211d74d91dbc2a76b95544d46c74c615e491900dc107f", size = 639933, upload-time = "2025-06-13T14:08:00.777Z" }, -] - [[package]] name = "referencing" version = "0.36.2" @@ -3017,15 +2040,15 @@ wheels = [ [[package]] name = "rich" -version = "14.0.0" +version = "14.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, ] [[package]] @@ -3118,36 +2141,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/3d/d9a195676f25d00dbfcf3cf95fdd4c685c497fcfa7e862a44ac5e4e96480/ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e", size = 4432239, upload-time = "2025-07-03T16:40:19.566Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/74/b6/2098d0126d2d3318fd5bec3ad40d06c25d377d95749f7a0c5af17129b3b1/ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be", size = 10369761, upload-time = "2025-07-03T16:39:38.847Z" }, - { url = "https://files.pythonhosted.org/packages/b1/4b/5da0142033dbe155dc598cfb99262d8ee2449d76920ea92c4eeb9547c208/ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e", size = 11155659, upload-time = "2025-07-03T16:39:42.294Z" }, - { url = "https://files.pythonhosted.org/packages/3e/21/967b82550a503d7c5c5c127d11c935344b35e8c521f52915fc858fb3e473/ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc", size = 10537769, upload-time = "2025-07-03T16:39:44.75Z" }, - { url = "https://files.pythonhosted.org/packages/33/91/00cff7102e2ec71a4890fb7ba1803f2cdb122d82787c7d7cf8041fe8cbc1/ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922", size = 10717602, upload-time = "2025-07-03T16:39:47.652Z" }, - { url = "https://files.pythonhosted.org/packages/9b/eb/928814daec4e1ba9115858adcda44a637fb9010618721937491e4e2283b8/ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b", size = 10198772, upload-time = "2025-07-03T16:39:49.641Z" }, - { url = "https://files.pythonhosted.org/packages/50/fa/f15089bc20c40f4f72334f9145dde55ab2b680e51afb3b55422effbf2fb6/ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d", size = 11845173, upload-time = "2025-07-03T16:39:52.069Z" }, - { url = "https://files.pythonhosted.org/packages/43/9f/1f6f98f39f2b9302acc161a4a2187b1e3a97634fe918a8e731e591841cf4/ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1", size = 12553002, upload-time = "2025-07-03T16:39:54.551Z" }, - { url = "https://files.pythonhosted.org/packages/d8/70/08991ac46e38ddd231c8f4fd05ef189b1b94be8883e8c0c146a025c20a19/ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4", size = 12171330, upload-time = "2025-07-03T16:39:57.55Z" }, - { url = "https://files.pythonhosted.org/packages/88/a9/5a55266fec474acfd0a1c73285f19dd22461d95a538f29bba02edd07a5d9/ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9", size = 11774717, upload-time = "2025-07-03T16:39:59.78Z" }, - { url = "https://files.pythonhosted.org/packages/87/e5/0c270e458fc73c46c0d0f7cf970bb14786e5fdb88c87b5e423a4bd65232b/ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da", size = 11646659, upload-time = "2025-07-03T16:40:01.934Z" }, - { url = "https://files.pythonhosted.org/packages/b7/b6/45ab96070c9752af37f0be364d849ed70e9ccede07675b0ec4e3ef76b63b/ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce", size = 10604012, upload-time = "2025-07-03T16:40:04.363Z" }, - { url = "https://files.pythonhosted.org/packages/86/91/26a6e6a424eb147cc7627eebae095cfa0b4b337a7c1c413c447c9ebb72fd/ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d", size = 10176799, upload-time = "2025-07-03T16:40:06.514Z" }, - { url = "https://files.pythonhosted.org/packages/f5/0c/9f344583465a61c8918a7cda604226e77b2c548daf8ef7c2bfccf2b37200/ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04", size = 11241507, upload-time = "2025-07-03T16:40:08.708Z" }, - { url = "https://files.pythonhosted.org/packages/1c/b7/99c34ded8fb5f86c0280278fa89a0066c3760edc326e935ce0b1550d315d/ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342", size = 11717609, upload-time = "2025-07-03T16:40:10.836Z" }, - { url = "https://files.pythonhosted.org/packages/51/de/8589fa724590faa057e5a6d171e7f2f6cffe3287406ef40e49c682c07d89/ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a", size = 10523823, upload-time = "2025-07-03T16:40:13.203Z" }, - { url = "https://files.pythonhosted.org/packages/94/47/8abf129102ae4c90cba0c2199a1a9b0fa896f6f806238d6f8c14448cc748/ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639", size = 11629831, upload-time = "2025-07-03T16:40:15.478Z" }, - { url = "https://files.pythonhosted.org/packages/e2/1f/72d2946e3cc7456bb837e88000eb3437e55f80db339c840c04015a11115d/ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12", size = 10735334, upload-time = "2025-07-03T16:40:17.677Z" }, -] - -[[package]] -name = "setuptools" -version = "80.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +version = "0.12.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/ce/8d7dbedede481245b489b769d27e2934730791a9a82765cb94566c6e6abd/ruff-0.12.4.tar.gz", hash = "sha256:13efa16df6c6eeb7d0f091abae50f58e9522f3843edb40d56ad52a5a4a4b6873", size = 5131435, upload-time = "2025-07-17T17:27:19.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/9f/517bc5f61bad205b7f36684ffa5415c013862dee02f55f38a217bdbe7aa4/ruff-0.12.4-py3-none-linux_armv6l.whl", hash = "sha256:cb0d261dac457ab939aeb247e804125a5d521b21adf27e721895b0d3f83a0d0a", size = 10188824, upload-time = "2025-07-17T17:26:31.412Z" }, + { url = "https://files.pythonhosted.org/packages/28/83/691baae5a11fbbde91df01c565c650fd17b0eabed259e8b7563de17c6529/ruff-0.12.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:55c0f4ca9769408d9b9bac530c30d3e66490bd2beb2d3dae3e4128a1f05c7442", size = 10884521, upload-time = "2025-07-17T17:26:35.084Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8d/756d780ff4076e6dd035d058fa220345f8c458391f7edfb1c10731eedc75/ruff-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a8224cc3722c9ad9044da7f89c4c1ec452aef2cfe3904365025dd2f51daeae0e", size = 10277653, upload-time = "2025-07-17T17:26:37.897Z" }, + { url = "https://files.pythonhosted.org/packages/8d/97/8eeee0f48ece153206dce730fc9e0e0ca54fd7f261bb3d99c0a4343a1892/ruff-0.12.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9949d01d64fa3672449a51ddb5d7548b33e130240ad418884ee6efa7a229586", size = 10485993, upload-time = "2025-07-17T17:26:40.68Z" }, + { url = "https://files.pythonhosted.org/packages/49/b8/22a43d23a1f68df9b88f952616c8508ea6ce4ed4f15353b8168c48b2d7e7/ruff-0.12.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:be0593c69df9ad1465e8a2d10e3defd111fdb62dcd5be23ae2c06da77e8fcffb", size = 10022824, upload-time = "2025-07-17T17:26:43.564Z" }, + { url = "https://files.pythonhosted.org/packages/cd/70/37c234c220366993e8cffcbd6cadbf332bfc848cbd6f45b02bade17e0149/ruff-0.12.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7dea966bcb55d4ecc4cc3270bccb6f87a337326c9dcd3c07d5b97000dbff41c", size = 11524414, upload-time = "2025-07-17T17:26:46.219Z" }, + { url = "https://files.pythonhosted.org/packages/14/77/c30f9964f481b5e0e29dd6a1fae1f769ac3fd468eb76fdd5661936edd262/ruff-0.12.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afcfa3ab5ab5dd0e1c39bf286d829e042a15e966b3726eea79528e2e24d8371a", size = 12419216, upload-time = "2025-07-17T17:26:48.883Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/af7fe0a4202dce4ef62c5e33fecbed07f0178f5b4dd9c0d2fcff5ab4a47c/ruff-0.12.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c057ce464b1413c926cdb203a0f858cd52f3e73dcb3270a3318d1630f6395bb3", size = 11976756, upload-time = "2025-07-17T17:26:51.754Z" }, + { url = "https://files.pythonhosted.org/packages/09/d1/33fb1fc00e20a939c305dbe2f80df7c28ba9193f7a85470b982815a2dc6a/ruff-0.12.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e64b90d1122dc2713330350626b10d60818930819623abbb56535c6466cce045", size = 11020019, upload-time = "2025-07-17T17:26:54.265Z" }, + { url = "https://files.pythonhosted.org/packages/64/f4/e3cd7f7bda646526f09693e2e02bd83d85fff8a8222c52cf9681c0d30843/ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abc48f3d9667fdc74022380b5c745873499ff827393a636f7a59da1515e7c57", size = 11277890, upload-time = "2025-07-17T17:26:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d0/69a85fb8b94501ff1a4f95b7591505e8983f38823da6941eb5b6badb1e3a/ruff-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b2449dc0c138d877d629bea151bee8c0ae3b8e9c43f5fcaafcd0c0d0726b184", size = 10348539, upload-time = "2025-07-17T17:26:59.381Z" }, + { url = "https://files.pythonhosted.org/packages/16/a0/91372d1cb1678f7d42d4893b88c252b01ff1dffcad09ae0c51aa2542275f/ruff-0.12.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:56e45bb11f625db55f9b70477062e6a1a04d53628eda7784dce6e0f55fd549eb", size = 10009579, upload-time = "2025-07-17T17:27:02.462Z" }, + { url = "https://files.pythonhosted.org/packages/23/1b/c4a833e3114d2cc0f677e58f1df6c3b20f62328dbfa710b87a1636a5e8eb/ruff-0.12.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:478fccdb82ca148a98a9ff43658944f7ab5ec41c3c49d77cd99d44da019371a1", size = 10942982, upload-time = "2025-07-17T17:27:05.343Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ce/ce85e445cf0a5dd8842f2f0c6f0018eedb164a92bdf3eda51984ffd4d989/ruff-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0fc426bec2e4e5f4c4f182b9d2ce6a75c85ba9bcdbe5c6f2a74fcb8df437df4b", size = 11343331, upload-time = "2025-07-17T17:27:08.652Z" }, + { url = "https://files.pythonhosted.org/packages/35/cf/441b7fc58368455233cfb5b77206c849b6dfb48b23de532adcc2e50ccc06/ruff-0.12.4-py3-none-win32.whl", hash = "sha256:4de27977827893cdfb1211d42d84bc180fceb7b72471104671c59be37041cf93", size = 10267904, upload-time = "2025-07-17T17:27:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7e/20af4a0df5e1299e7368d5ea4350412226afb03d95507faae94c80f00afd/ruff-0.12.4-py3-none-win_amd64.whl", hash = "sha256:fe0b9e9eb23736b453143d72d2ceca5db323963330d5b7859d60d101147d461a", size = 11209038, upload-time = "2025-07-17T17:27:14.417Z" }, + { url = "https://files.pythonhosted.org/packages/11/02/8857d0dfb8f44ef299a5dfd898f673edefb71e3b533b3b9d2db4c832dd13/ruff-0.12.4-py3-none-win_arm64.whl", hash = "sha256:0618ec4442a83ab545e5b71202a5c0ed7791e8471435b94e655b570a5031a98e", size = 10469336, upload-time = "2025-07-17T17:27:16.913Z" }, ] [[package]] @@ -3212,15 +2226,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] -[[package]] -name = "soupsieve" -version = "2.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, -] - [[package]] name = "sqlalchemy" version = "2.0.41" @@ -3250,79 +2255,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, ] -[[package]] -name = "sqlite-vec" -version = "0.1.7a2" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/3e/ec6ef1e18b4a9370740a2e5c33474468f1e75275a0bb7fec3fd6f9e95b34/sqlite_vec-0.1.7a2-py3-none-macosx_10_6_x86_64.whl", hash = "sha256:a08dd9396d494ac8970ba519a3931410f08c0c5eeadd0e1a2e02053789f6c877", size = 163783, upload-time = "2025-01-10T23:19:30.223Z" }, - { url = "https://files.pythonhosted.org/packages/f6/dd/e23fa64e1080720dc8972620fa410916b7054822d574c70211336bac1c09/sqlite_vec-0.1.7a2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b98d7af645d28c0b5c844bf1d99fe2103fe1320fe2bbf36d0713f0b36764fdcb", size = 165270, upload-time = "2025-01-10T23:19:32.518Z" }, - { url = "https://files.pythonhosted.org/packages/45/d2/2a04ea12023ba76bc7668ce984243a372cb26257c05b0df8284b93b8fa98/sqlite_vec-0.1.7a2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ff6088435f49cbb97422171bd17d7bcc9b67c5e6890ece680e53a679dd0ff7c", size = 151632, upload-time = "2025-01-10T23:19:33.58Z" }, - { url = "https://files.pythonhosted.org/packages/30/80/1956290bf428da40d3ebbdb6673eebcfbb0d5085d7a70d9d9d9f6995a98c/sqlite_vec-0.1.7a2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux1_x86_64.whl", hash = "sha256:0fb454ac72eda4f5fe0d49ded740bf90c397e8beced6099112d6937f98740202", size = 151584, upload-time = "2025-01-10T23:19:35.921Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/823066a928b65dff2d8d7c82c9929b96c0b3846c9f5f9667f1377c53690f/sqlite_vec-0.1.7a2-py3-none-win_amd64.whl", hash = "sha256:b6c3365e0fb62ee6eceaba269c57792a100c52ebd564866a64f15596e50c3f42", size = 282080, upload-time = "2025-01-10T23:19:37.018Z" }, -] - -[[package]] -name = "sqlmodel" -version = "0.0.24" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "sqlalchemy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/86/4b/c2ad0496f5bdc6073d9b4cef52be9c04f2b37a5773441cc6600b1857648b/sqlmodel-0.0.24.tar.gz", hash = "sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423", size = 116780, upload-time = "2025-03-07T05:43:32.887Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/91/484cd2d05569892b7fef7f5ceab3bc89fb0f8a8c0cde1030d383dbc5449c/sqlmodel-0.0.24-py3-none-any.whl", hash = "sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193", size = 28622, upload-time = "2025-03-07T05:43:30.37Z" }, -] - [[package]] name = "sse-starlette" -version = "2.3.6" +version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8c/f4/989bc70cb8091eda43a9034ef969b25145291f3601703b82766e5172dfed/sse_starlette-2.3.6.tar.gz", hash = "sha256:0382336f7d4ec30160cf9ca0518962905e1b69b72d6c1c995131e0a703b436e3", size = 18284, upload-time = "2025-05-30T13:34:12.914Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/05/78850ac6e79af5b9508f8841b0f26aa9fd329a1ba00bf65453c2d312bcc8/sse_starlette-2.3.6-py3-none-any.whl", hash = "sha256:d49a8285b182f6e2228e2609c350398b2ca2c36216c2675d875f81e93548f760", size = 10606, upload-time = "2025-05-30T13:34:11.703Z" }, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "asttokens" }, - { name = "executing" }, - { name = "pure-eval" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, ] [[package]] name = "starlette" -version = "0.46.2" +version = "0.47.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" }, -] - -[[package]] -name = "sympy" -version = "1.14.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mpmath" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948, upload-time = "2025-07-20T17:31:58.522Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984, upload-time = "2025-07-20T17:31:56.738Z" }, ] [[package]] @@ -3336,70 +2291,51 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.8.0" +version = "0.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/37/02/576ff3a6639e755c4f70997b2d315f56d6d71e0d046f4fb64cb81a3fb099/tiktoken-0.8.0.tar.gz", hash = "sha256:9ccbb2740f24542534369c5635cfd9b2b3c2490754a78ac8831d99f89f94eeb2", size = 35107, upload-time = "2024-10-03T22:44:04.196Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/22/34b2e136a6f4af186b6640cbfd6f93400783c9ef6cd550d9eab80628d9de/tiktoken-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:881839cfeae051b3628d9823b2e56b5cc93a9e2efb435f4cf15f17dc45f21586", size = 1039357, upload-time = "2024-10-03T22:43:36.362Z" }, - { url = "https://files.pythonhosted.org/packages/04/d2/c793cf49c20f5855fd6ce05d080c0537d7418f22c58e71f392d5e8c8dbf7/tiktoken-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fe9399bdc3f29d428f16a2f86c3c8ec20be3eac5f53693ce4980371c3245729b", size = 982616, upload-time = "2024-10-03T22:43:37.658Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a1/79846e5ef911cd5d75c844de3fa496a10c91b4b5f550aad695c5df153d72/tiktoken-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a58deb7075d5b69237a3ff4bb51a726670419db6ea62bdcd8bd80c78497d7ab", size = 1144011, upload-time = "2024-10-03T22:43:39.092Z" }, - { url = "https://files.pythonhosted.org/packages/26/32/e0e3a859136e95c85a572e4806dc58bf1ddf651108ae8b97d5f3ebe1a244/tiktoken-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2908c0d043a7d03ebd80347266b0e58440bdef5564f84f4d29fb235b5df3b04", size = 1175432, upload-time = "2024-10-03T22:43:40.323Z" }, - { url = "https://files.pythonhosted.org/packages/c7/89/926b66e9025b97e9fbabeaa59048a736fe3c3e4530a204109571104f921c/tiktoken-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:294440d21a2a51e12d4238e68a5972095534fe9878be57d905c476017bff99fc", size = 1236576, upload-time = "2024-10-03T22:43:41.516Z" }, - { url = "https://files.pythonhosted.org/packages/45/e2/39d4aa02a52bba73b2cd21ba4533c84425ff8786cc63c511d68c8897376e/tiktoken-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:d8f3192733ac4d77977432947d563d7e1b310b96497acd3c196c9bddb36ed9db", size = 883824, upload-time = "2024-10-03T22:43:43.33Z" }, - { url = "https://files.pythonhosted.org/packages/e3/38/802e79ba0ee5fcbf240cd624143f57744e5d411d2e9d9ad2db70d8395986/tiktoken-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:02be1666096aff7da6cbd7cdaa8e7917bfed3467cd64b38b1f112e96d3b06a24", size = 1039648, upload-time = "2024-10-03T22:43:45.22Z" }, - { url = "https://files.pythonhosted.org/packages/b1/da/24cdbfc302c98663fbea66f5866f7fa1048405c7564ab88483aea97c3b1a/tiktoken-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c94ff53c5c74b535b2cbf431d907fc13c678bbd009ee633a2aca269a04389f9a", size = 982763, upload-time = "2024-10-03T22:43:46.571Z" }, - { url = "https://files.pythonhosted.org/packages/e4/f0/0ecf79a279dfa41fc97d00adccf976ecc2556d3c08ef3e25e45eb31f665b/tiktoken-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b231f5e8982c245ee3065cd84a4712d64692348bc609d84467c57b4b72dcbc5", size = 1144417, upload-time = "2024-10-03T22:43:48.633Z" }, - { url = "https://files.pythonhosted.org/packages/ab/d3/155d2d4514f3471a25dc1d6d20549ef254e2aa9bb5b1060809b1d3b03d3a/tiktoken-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4177faa809bd55f699e88c96d9bb4635d22e3f59d635ba6fd9ffedf7150b9953", size = 1175108, upload-time = "2024-10-03T22:43:50.568Z" }, - { url = "https://files.pythonhosted.org/packages/19/eb/5989e16821ee8300ef8ee13c16effc20dfc26c777d05fbb6825e3c037b81/tiktoken-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5376b6f8dc4753cd81ead935c5f518fa0fbe7e133d9e25f648d8c4dabdd4bad7", size = 1236520, upload-time = "2024-10-03T22:43:51.759Z" }, - { url = "https://files.pythonhosted.org/packages/40/59/14b20465f1d1cb89cfbc96ec27e5617b2d41c79da12b5e04e96d689be2a7/tiktoken-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:18228d624807d66c87acd8f25fc135665617cab220671eb65b50f5d70fa51f69", size = 883849, upload-time = "2024-10-03T22:43:53.999Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, + { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919, upload-time = "2025-02-14T06:02:37.494Z" }, + { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877, upload-time = "2025-02-14T06:02:39.516Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095, upload-time = "2025-02-14T06:02:41.791Z" }, + { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649, upload-time = "2025-02-14T06:02:43Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465, upload-time = "2025-02-14T06:02:45.046Z" }, + { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669, upload-time = "2025-02-14T06:02:47.341Z" }, ] [[package]] name = "tokenizers" -version = "0.21.2" +version = "0.21.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hash = "sha256:fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77", size = 351545, upload-time = "2025-06-24T10:24:52.449Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec", size = 2875206, upload-time = "2025-06-24T10:24:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f", size = 2732655, upload-time = "2025-06-24T10:24:41.56Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12", size = 3019202, upload-time = "2025-06-24T10:24:31.791Z" }, - { url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91", size = 2934539, upload-time = "2025-06-24T10:24:34.567Z" }, - { url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb", size = 3248665, upload-time = "2025-06-24T10:24:39.024Z" }, - { url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab", size = 3451305, upload-time = "2025-06-24T10:24:36.133Z" }, - { url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae", size = 3214757, upload-time = "2025-06-24T10:24:37.784Z" }, - { url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020", size = 3121887, upload-time = "2025-06-24T10:24:40.293Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19", size = 9091965, upload-time = "2025-06-24T10:24:44.431Z" }, - { url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d", size = 9053372, upload-time = "2025-06-24T10:24:46.455Z" }, - { url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365", size = 9298632, upload-time = "2025-06-24T10:24:48.446Z" }, - { url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958", size = 9470074, upload-time = "2025-06-24T10:24:50.378Z" }, - { url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl", hash = "sha256:cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962", size = 2330115, upload-time = "2025-06-24T10:24:55.069Z" }, - { url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl", hash = "sha256:58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98", size = 2509918, upload-time = "2025-06-24T10:24:53.71Z" }, -] - -[[package]] -name = "tornado" -version = "6.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/89/c72771c81d25d53fe33e3dca61c233b665b2780f21820ba6fd2c6793c12b/tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c", size = 509934, upload-time = "2025-05-22T18:15:38.788Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/2f/402986d0823f8d7ca139d969af2917fefaa9b947d1fb32f6168c509f2492/tokenizers-0.21.4.tar.gz", hash = "sha256:fa23f85fbc9a02ec5c6978da172cdcbac23498c3ca9f3645c5c68740ac007880", size = 351253, upload-time = "2025-07-28T15:48:54.325Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/89/f4532dee6843c9e0ebc4e28d4be04c67f54f60813e4bf73d595fe7567452/tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7", size = 441948, upload-time = "2025-05-22T18:15:20.862Z" }, - { url = "https://files.pythonhosted.org/packages/15/9a/557406b62cffa395d18772e0cdcf03bed2fff03b374677348eef9f6a3792/tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6", size = 440112, upload-time = "2025-05-22T18:15:22.591Z" }, - { url = "https://files.pythonhosted.org/packages/55/82/7721b7319013a3cf881f4dffa4f60ceff07b31b394e459984e7a36dc99ec/tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888", size = 443672, upload-time = "2025-05-22T18:15:24.027Z" }, - { url = "https://files.pythonhosted.org/packages/7d/42/d11c4376e7d101171b94e03cef0cbce43e823ed6567ceda571f54cf6e3ce/tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331", size = 443019, upload-time = "2025-05-22T18:15:25.735Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f7/0c48ba992d875521ac761e6e04b0a1750f8150ae42ea26df1852d6a98942/tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e", size = 443252, upload-time = "2025-05-22T18:15:27.499Z" }, - { url = "https://files.pythonhosted.org/packages/89/46/d8d7413d11987e316df4ad42e16023cd62666a3c0dfa1518ffa30b8df06c/tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401", size = 443930, upload-time = "2025-05-22T18:15:29.299Z" }, - { url = "https://files.pythonhosted.org/packages/78/b2/f8049221c96a06df89bed68260e8ca94beca5ea532ffc63b1175ad31f9cc/tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692", size = 443351, upload-time = "2025-05-22T18:15:31.038Z" }, - { url = "https://files.pythonhosted.org/packages/76/ff/6a0079e65b326cc222a54720a748e04a4db246870c4da54ece4577bfa702/tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a", size = 443328, upload-time = "2025-05-22T18:15:32.426Z" }, - { url = "https://files.pythonhosted.org/packages/49/18/e3f902a1d21f14035b5bc6246a8c0f51e0eef562ace3a2cea403c1fb7021/tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365", size = 444396, upload-time = "2025-05-22T18:15:34.205Z" }, - { url = "https://files.pythonhosted.org/packages/7b/09/6526e32bf1049ee7de3bebba81572673b19a2a8541f795d887e92af1a8bc/tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b", size = 444840, upload-time = "2025-05-22T18:15:36.1Z" }, - { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596, upload-time = "2025-05-22T18:15:37.433Z" }, + { url = "https://files.pythonhosted.org/packages/98/c6/fdb6f72bf6454f52eb4a2510be7fb0f614e541a2554d6210e370d85efff4/tokenizers-0.21.4-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2ccc10a7c3bcefe0f242867dc914fc1226ee44321eb618cfe3019b5df3400133", size = 2863987, upload-time = "2025-07-28T15:48:44.877Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a6/28975479e35ddc751dc1ddc97b9b69bf7fcf074db31548aab37f8116674c/tokenizers-0.21.4-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:5e2f601a8e0cd5be5cc7506b20a79112370b9b3e9cb5f13f68ab11acd6ca7d60", size = 2732457, upload-time = "2025-07-28T15:48:43.265Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8f/24f39d7b5c726b7b0be95dca04f344df278a3fe3a4deb15a975d194cbb32/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b376f5a1aee67b4d29032ee85511bbd1b99007ec735f7f35c8a2eb104eade5", size = 3012624, upload-time = "2025-07-28T13:22:43.895Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/26358925717687a58cb74d7a508de96649544fad5778f0cd9827398dc499/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2107ad649e2cda4488d41dfd031469e9da3fcbfd6183e74e4958fa729ffbf9c6", size = 2939681, upload-time = "2025-07-28T13:22:47.499Z" }, + { url = "https://files.pythonhosted.org/packages/99/6f/cc300fea5db2ab5ddc2c8aea5757a27b89c84469899710c3aeddc1d39801/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c73012da95afafdf235ba80047699df4384fdc481527448a078ffd00e45a7d9", size = 3247445, upload-time = "2025-07-28T15:48:39.711Z" }, + { url = "https://files.pythonhosted.org/packages/be/bf/98cb4b9c3c4afd8be89cfa6423704337dc20b73eb4180397a6e0d456c334/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f23186c40395fc390d27f519679a58023f368a0aad234af145e0f39ad1212732", size = 3428014, upload-time = "2025-07-28T13:22:49.569Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/96c1cc780e6ca7f01a57c13235dd05b7bc1c0f3588512ebe9d1331b5f5ae/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc88bb34e23a54cc42713d6d98af5f1bf79c07653d24fe984d2d695ba2c922a2", size = 3193197, upload-time = "2025-07-28T13:22:51.471Z" }, + { url = "https://files.pythonhosted.org/packages/f2/90/273b6c7ec78af547694eddeea9e05de771278bd20476525ab930cecaf7d8/tokenizers-0.21.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51b7eabb104f46c1c50b486520555715457ae833d5aee9ff6ae853d1130506ff", size = 3115426, upload-time = "2025-07-28T15:48:41.439Z" }, + { url = "https://files.pythonhosted.org/packages/91/43/c640d5a07e95f1cf9d2c92501f20a25f179ac53a4f71e1489a3dcfcc67ee/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:714b05b2e1af1288bd1bc56ce496c4cebb64a20d158ee802887757791191e6e2", size = 9089127, upload-time = "2025-07-28T15:48:46.472Z" }, + { url = "https://files.pythonhosted.org/packages/44/a1/dd23edd6271d4dca788e5200a807b49ec3e6987815cd9d0a07ad9c96c7c2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:1340ff877ceedfa937544b7d79f5b7becf33a4cfb58f89b3b49927004ef66f78", size = 9055243, upload-time = "2025-07-28T15:48:48.539Z" }, + { url = "https://files.pythonhosted.org/packages/21/2b/b410d6e9021c4b7ddb57248304dc817c4d4970b73b6ee343674914701197/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:3c1f4317576e465ac9ef0d165b247825a2a4078bcd01cba6b54b867bdf9fdd8b", size = 9298237, upload-time = "2025-07-28T15:48:50.443Z" }, + { url = "https://files.pythonhosted.org/packages/b7/0a/42348c995c67e2e6e5c89ffb9cfd68507cbaeb84ff39c49ee6e0a6dd0fd2/tokenizers-0.21.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c212aa4e45ec0bb5274b16b6f31dd3f1c41944025c2358faaa5782c754e84c24", size = 9461980, upload-time = "2025-07-28T15:48:52.325Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d3/dacccd834404cd71b5c334882f3ba40331ad2120e69ded32cf5fda9a7436/tokenizers-0.21.4-cp39-abi3-win32.whl", hash = "sha256:6c42a930bc5f4c47f4ea775c91de47d27910881902b0f20e4990ebe045a415d0", size = 2329871, upload-time = "2025-07-28T15:48:56.841Z" }, + { url = "https://files.pythonhosted.org/packages/41/f2/fd673d979185f5dcbac4be7d09461cbb99751554ffb6718d0013af8604cb/tokenizers-0.21.4-cp39-abi3-win_amd64.whl", hash = "sha256:475d807a5c3eb72c59ad9b5fcdb254f6e17f53dfcbb9903233b0dfa9c943b597", size = 2507568, upload-time = "2025-07-28T15:48:55.456Z" }, ] [[package]] @@ -3414,15 +2350,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] -[[package]] -name = "traitlets" -version = "5.14.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, -] - [[package]] name = "typer" version = "0.16.0" @@ -3440,11 +2367,11 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.14.0" +version = "4.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, ] [[package]] @@ -3535,15 +2462,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] -[[package]] -name = "wcwidth" -version = "0.2.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, -] - [[package]] name = "websockets" version = "15.0.1" @@ -3575,15 +2493,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] -[[package]] -name = "win32-setctime" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, -] - [[package]] name = "wrapt" version = "1.17.2" diff --git a/ui/conf/nginx.conf b/ui/conf/nginx.conf index a0f97b02e..9b83a4ebc 100644 --- a/ui/conf/nginx.conf +++ b/ui/conf/nginx.conf @@ -44,6 +44,11 @@ http { proxy_set_header Origin $scheme://$host; proxy_cache_bypass $http_upgrade; } + + # static health check + location /health { + return 200 'OK'; + } # Backend routes location /api/ { diff --git a/ui/package-lock.json b/ui/package-lock.json index e588c4e37..c70687dec 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -8,6 +8,7 @@ "name": "kagents-ui", "version": "0.1.0", "dependencies": { + "@a2a-js/sdk": "^0.2.4", "@hookform/resolvers": "^5.1.1", "@radix-ui/react-accordion": "^1.2.11", "@radix-ui/react-alert-dialog": "^1.1.14", @@ -26,6 +27,7 @@ "@radix-ui/react-tabs": "^1.1.12", "@radix-ui/react-tooltip": "^1.2.7", "@tailwindcss/typography": "^0.5.16", + "@types/uuid": "^10.0.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", @@ -41,6 +43,7 @@ "sonner": "^2.0.5", "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", + "uuid": "^11.1.0", "zod": "^3.25.67", "zustand": "^5.0.6" }, @@ -69,6 +72,22 @@ "typescript": "5.8.3" } }, + "node_modules/@a2a-js/sdk": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@a2a-js/sdk/-/sdk-0.2.4.tgz", + "integrity": "sha512-s9wEF5SUswhaAeAERA3tIBcrYEqWfkf+B3yiofxFX8+wnJMQL2l6bT6e7LZqjFf8sup0IRqFtGbckBPDLQymjw==", + "dependencies": { + "@types/cors": "^2.8.17", + "@types/express": "^4.17.23", + "body-parser": "^2.2.0", + "cors": "^2.8.5", + "express": "^4.21.2", + "uuid": "^11.1.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@adobe/css-tools": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.3.tgz", @@ -712,6 +731,16 @@ "node": ">= 6" } }, + "node_modules/@cypress/request/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/@cypress/xvfb": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", @@ -3569,12 +3598,40 @@ "@babel/types": "^7.20.7" } }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", + "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/cookie": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", "dev": true }, + "node_modules/@types/cors": { + "version": "2.8.19", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", + "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/cypress": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/@types/cypress/-/cypress-0.1.6.tgz", @@ -3605,6 +3662,30 @@ "@types/estree": "*" } }, + "node_modules/@types/express": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz", + "integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==", + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, "node_modules/@types/graceful-fs": { "version": "4.1.9", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", @@ -3623,6 +3704,12 @@ "@types/unist": "*" } }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", + "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "license": "MIT" + }, "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", @@ -3722,6 +3809,12 @@ "@types/unist": "*" } }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "license": "MIT" + }, "node_modules/@types/ms": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", @@ -3732,7 +3825,6 @@ "version": "20.17.47", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.47.tgz", "integrity": "sha512-3dLX0Upo1v7RvUimvxLeXqwrfyKxUINk0EAM83swP2mlSUcwV73sZy8XhNz8bcZ3VbsfQyC/y6jRdL5tgCNpDQ==", - "devOptional": true, "dependencies": { "undici-types": "~6.19.2" } @@ -3744,6 +3836,18 @@ "dev": true, "peer": true }, + "node_modules/@types/qs": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "license": "MIT" + }, "node_modules/@types/react": { "version": "19.1.4", "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.4.tgz", @@ -3771,6 +3875,27 @@ "@types/react": "^19.0.0" } }, + "node_modules/@types/send": { + "version": "0.17.5", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", + "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.8", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", + "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, "node_modules/@types/sinonjs__fake-timers": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", @@ -3807,6 +3932,12 @@ "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", "license": "MIT" }, + "node_modules/@types/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==", + "license": "MIT" + }, "node_modules/@types/yargs": { "version": "17.0.33", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", @@ -4117,6 +4248,19 @@ "deprecated": "Use your platform's native atob() and btoa() methods instead", "dev": true }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/acorn": { "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", @@ -4357,6 +4501,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, "node_modules/array-includes": { "version": "3.1.8", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", @@ -4801,6 +4951,26 @@ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", "dev": true }, + "node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -4927,6 +5097,15 @@ "node": ">=10.16.0" } }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/cachedir": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", @@ -4959,7 +5138,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -4973,7 +5151,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -5558,6 +5735,27 @@ "dev": true, "license": "MIT" }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -5573,12 +5771,31 @@ "node": ">= 0.6" } }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" + }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==", "dev": true }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/create-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", @@ -5997,6 +6214,15 @@ "node": ">=0.4.0" } }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -6006,6 +6232,16 @@ "node": ">=6" } }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, "node_modules/detect-libc": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", @@ -6109,7 +6345,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -6136,6 +6371,12 @@ "safer-buffer": "^2.1.0" } }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, "node_modules/ejs": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", @@ -6175,6 +6416,15 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "license": "MIT" }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/end-of-stream": { "version": "1.4.5", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", @@ -6323,7 +6573,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -6333,7 +6582,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -6391,7 +6639,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0" @@ -6453,6 +6700,12 @@ "node": ">=6" } }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -6962,6 +7215,15 @@ "node": ">=0.10.0" } }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/eventemitter2": { "version": "6.4.7", "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", @@ -7034,6 +7296,179 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express/node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/express/node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express/node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/express/node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/express/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/express/node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -7226,6 +7661,48 @@ "node": ">=8" } }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/finalhandler/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -7321,6 +7798,24 @@ "node": ">= 6" } }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -7417,7 +7912,6 @@ "version": "1.2.7", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", - "dev": true, "license": "MIT", "dependencies": { "call-bind-apply-helpers": "^1.0.1", @@ -7460,7 +7954,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, "license": "MIT", "dependencies": { "dunder-proto": "^1.0.1", @@ -7639,7 +8132,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -7726,7 +8218,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -7853,6 +8344,31 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/http-proxy-agent": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", @@ -7907,7 +8423,6 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" }, @@ -8014,8 +8529,7 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/ini": { "version": "2.0.0", @@ -8047,6 +8561,15 @@ "node": ">= 0.4" } }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, "node_modules/is-alphabetical": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", @@ -10374,7 +10897,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -10654,6 +11176,24 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -10669,6 +11209,15 @@ "node": ">= 8" } }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/micromark": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.1.tgz", @@ -11238,11 +11787,22 @@ "node": ">=8.6" } }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -11251,7 +11811,6 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "dependencies": { "mime-db": "1.52.0" }, @@ -11415,6 +11974,15 @@ "dev": true, "license": "MIT" }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/next": { "version": "15.3.4", "resolved": "https://registry.npmjs.org/next/-/next-15.3.4.tgz", @@ -11566,7 +12134,6 @@ "version": "1.13.3", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -11690,6 +12257,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -11892,6 +12471,15 @@ "url": "https://github.com/inikulin/parse5?sponsor=1" } }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -12319,6 +12907,19 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/proxy-from-env": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", @@ -12377,7 +12978,6 @@ "version": "6.14.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", - "dev": true, "dependencies": { "side-channel": "^1.1.0" }, @@ -12414,6 +13014,30 @@ ], "license": "MIT" }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.6.3", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/react": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", @@ -12883,7 +13507,6 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, "funding": [ { "type": "github", @@ -12937,8 +13560,7 @@ "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "node_modules/saxes": { "version": "6.0.0", @@ -12973,6 +13595,78 @@ "node": ">=10" } }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -13022,6 +13716,12 @@ "node": ">= 0.4" } }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, "node_modules/sharp": { "version": "0.34.2", "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.2.tgz", @@ -13088,7 +13788,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -13108,7 +13807,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -13125,7 +13823,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -13144,7 +13841,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -13954,6 +14650,15 @@ "node": ">=8.0" } }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, "node_modules/tough-cookie": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz", @@ -14218,6 +14923,41 @@ "node": ">=8" } }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/typed-array-buffer": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", @@ -14332,7 +15072,6 @@ "version": "6.19.8", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "devOptional": true, "license": "MIT" }, "node_modules/unified": { @@ -14431,6 +15170,15 @@ "node": ">= 10.0.0" } }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/untildify": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", @@ -14539,13 +15287,26 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", "bin": { - "uuid": "dist/bin/uuid" + "uuid": "dist/esm/bin/uuid" } }, "node_modules/v8-compile-cache-lib": { @@ -14568,6 +15329,15 @@ "node": ">=10.12.0" } }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", diff --git a/ui/package.json b/ui/package.json index f48408443..cd58209ec 100644 --- a/ui/package.json +++ b/ui/package.json @@ -11,6 +11,7 @@ "test:watch": "jest --watch" }, "dependencies": { + "@a2a-js/sdk": "^0.2.4", "@hookform/resolvers": "^5.1.1", "@radix-ui/react-accordion": "^1.2.11", "@radix-ui/react-alert-dialog": "^1.1.14", @@ -29,6 +30,7 @@ "@radix-ui/react-tabs": "^1.1.12", "@radix-ui/react-tooltip": "^1.2.7", "@tailwindcss/typography": "^0.5.16", + "@types/uuid": "^10.0.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", @@ -44,6 +46,7 @@ "sonner": "^2.0.5", "tailwind-merge": "^2.6.0", "tailwindcss-animate": "^1.0.7", + "uuid": "^11.1.0", "zod": "^3.25.67", "zustand": "^5.0.6" }, diff --git a/ui/src/app/a2a/[namespace]/[agentName]/route.ts b/ui/src/app/a2a/[namespace]/[agentName]/route.ts new file mode 100644 index 000000000..d1916ac49 --- /dev/null +++ b/ui/src/app/a2a/[namespace]/[agentName]/route.ts @@ -0,0 +1,123 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { getBackendUrl } from '@/lib/utils'; + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ namespace: string; agentName: string }> } +) { + const { namespace, agentName } = await params; + + try { + const a2aRequest = await request.json(); + + const backendUrl = getBackendUrl(); + const targetUrl = `${backendUrl}/a2a/${namespace}/${agentName}/`; + + const backendResponse = await fetch(targetUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'User-Agent': 'kagent-ui', + }, + body: JSON.stringify(a2aRequest), + }); + + if (!backendResponse.ok) { + const errorText = await backendResponse.text(); + return new Response(errorText || 'Backend request failed', { + status: backendResponse.status, + headers: { + 'Content-Type': 'text/plain', + } + }); + } + + if (!backendResponse.body) { + return new Response('Backend response body is null', { status: 500 }); + } + + // Stream the response back to the frontend + const responseHeaders = new Headers({ + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Authorization, Accept', + }); + + const stream = new ReadableStream({ + start(controller) { + const reader = backendResponse.body!.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + let isClosed = false; + + const pump = (): Promise => { + return reader.read().then(({ done, value }): Promise => { + if (done) { + if (!isClosed) { + controller.close(); + isClosed = true; + } + return Promise.resolve(); + } + + buffer += decoder.decode(value, { stream: true }); + + // Process complete SSE events (delimited by \n\n) + let eventEndIndex; + while ((eventEndIndex = buffer.indexOf('\n\n')) >= 0) { + const eventText = buffer.substring(0, eventEndIndex); + buffer = buffer.substring(eventEndIndex + 2); + + if (eventText.trim()) { + const eventData = eventText + '\n\n'; + if (!isClosed) { + controller.enqueue(new TextEncoder().encode(eventData)); + } + } + } + + return pump(); + }).catch(error => { + if (!isClosed) { + controller.error(error); + isClosed = true; + } + return Promise.resolve(); + }); + }; + + pump(); + } + }); + + return new Response(stream, { + headers: responseHeaders, + status: backendResponse.status, + }); + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Internal server error'; + return NextResponse.json({ error: errorMessage }, { status: 500 }); + } +} + +export async function OPTIONS( + request: NextRequest, + { params }: { params: Promise<{ namespace: string; agentName: string }> } +) { + return new Response(null, { + status: 200, + headers: { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS', + 'Access-Control-Allow-Headers': 'Content-Type, Authorization, Accept', + 'Access-Control-Max-Age': '86400', + }, + }); +} \ No newline at end of file diff --git a/ui/src/app/actions/agents.ts b/ui/src/app/actions/agents.ts index cd756c15d..0a2bf505b 100644 --- a/ui/src/app/actions/agents.ts +++ b/ui/src/app/actions/agents.ts @@ -156,6 +156,12 @@ export async function deleteAgent(agentName: string): Promise */ export async function createAgent(agentConfig: AgentFormData, update: boolean = false): Promise> { try { + + // Only get the name of the model, not the full ref + if (agentConfig.model.ref) { + agentConfig.model.ref = agentConfig.model.ref.split("/").pop() || ""; + } + const agentPayload = fromAgentFormDataToAgent(agentConfig); const response = await fetchApi>(`/agents`, { method: update ? "PUT" : "POST", diff --git a/ui/src/app/actions/sessions.ts b/ui/src/app/actions/sessions.ts index 3288a6f79..d4d2121f3 100644 --- a/ui/src/app/actions/sessions.ts +++ b/ui/src/app/actions/sessions.ts @@ -62,11 +62,7 @@ export async function createSession(session: CreateSessionRequest): Promise; - server_name: string; -} +import { fetchApi } from "./utils"; +import { ToolResponse } from "@/types/datamodel"; /** * Gets all available tools * @returns A promise with all tools */ -export async function getTools(): Promise[]> { +export async function getTools(): Promise { try { const response = await fetchApi>("/tools"); if (!response) { throw new Error("Failed to get built-in tools"); } - - const toolsComponents = response.data?.map((t) => { - // set the label in component to the server_name, because we use the server name (kagent-tool-server) to determine - // whether a tool is a built-in tool or not. - // TODO (peterj): Ideally, instead of returning the Component we could just directly return the actual ToolResponse. - t.component.label = t.server_name; - return t.component; - }); - if (!toolsComponents) { - throw new Error("Failed to get built-in tools"); - } - - // Convert API components to Component format - const convertedTools = toolsComponents.map((tool) => { - // Convert to Component format - return { - provider: tool.provider, - label: tool.label || "", - description: tool.description || "", - config: tool.config || {}, - component_type: tool.component_type || "tool", - } as Component; - }); - - return convertedTools || []; + return response.data || []; } catch (error) { - throw new Error("Error getting built-in tools"); + throw new Error(`Error getting built-in tools. ${error}`); } } - -/** - * Gets a specific tool by its provider name and optionally tool name - * @param allTools The list of all tools - * @param provider The tool provider name - * @param toolName Optional tool name for MCP tools - * @returns A promise with the tool data - */ -export async function getToolByProvider(allTools: Component[], provider: string, toolName?: string): Promise | null> { - // For MCP tools, we need to match both provider and tool name - if (isMcpProvider(provider) && toolName) { - const tool = allTools.find(t => - t.provider === provider && - (t.config as MCPToolConfig)?.tool?.name === toolName - ); - - if (tool) { - // For MCP tools, use the description from the tool object - return { - ...tool, - description: (tool.config as MCPToolConfig)?.tool?.description || "No description available" - } - }; - } else { - // For non-MCP tools, just match the provider - const tool = allTools.find(t => t.provider === provider); - if (tool) { - return tool; - } - } - - return null; -} diff --git a/ui/src/app/agents/new/page.tsx b/ui/src/app/agents/new/page.tsx index 87ce23a95..b1e7c8d07 100644 --- a/ui/src/app/agents/new/page.tsx +++ b/ui/src/app/agents/new/page.tsx @@ -347,7 +347,6 @@ function AgentPageContent({ isEditMode, agentName, agentNamespace }: AgentPageCo {server.discoveredTools .sort((a, b) => { - const aName = getToolDisplayName(a.component) || ""; - const bName = getToolDisplayName(b.component) || ""; + const aName = a.name || ""; + const bName = b.name || ""; return aName.localeCompare(bName); }) .map((tool) => ( -
+
-
{getToolDisplayName(tool.component)}
-
{getToolDescription(tool.component)}
-
{getToolIdentifier(tool.component)}
+
{tool.name}
+
{tool.description}
diff --git a/ui/src/app/stream/[sessionId]/route.ts b/ui/src/app/stream/[sessionId]/route.ts deleted file mode 100644 index 53bbd0372..000000000 --- a/ui/src/app/stream/[sessionId]/route.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { NextResponse, NextRequest } from 'next/server'; -import { getBackendUrl } from '@/lib/utils'; -import { getCurrentUserId } from '@/app/actions/utils'; - - -export async function POST( - request: NextRequest, - { params }: { params: Promise<{ sessionId: string }> } -) { - const { sessionId } = await params; - console.log("Received request to invoke stream"); - try { - if (!sessionId) { - return NextResponse.json({ error: 'Session ID is required' }, { status: 400 }); - } - - const userId = await getCurrentUserId(); - if (!userId) { - return NextResponse.json({ error: 'User ID could not be determined' }, { status: 401 }); - } - - // Read the plain text content from the request body - const content = await request.text(); - if (!content) { - return NextResponse.json({ error: 'Content is required' }, { status: 400 }); - } - - const backendUrl = getBackendUrl(); - const targetUrl = `${backendUrl}/sessions/${sessionId}/invoke/stream?user_id=${userId}`; - - // Fetch from your actual backend - const backendResponse = await fetch(targetUrl, { - method: 'POST', - headers: { - 'Content-Type': 'text/plain', - 'Accept': 'text/event-stream', - }, - body: content, - signal: request.signal, - }); - - if (!backendResponse.ok) { - const errorText = await backendResponse.text(); - return new Response(errorText || 'Backend request failed', { status: backendResponse.status }); - } - - if (!backendResponse.body) { - return new Response('Backend response body is null', { status: 500 }); - } - - const responseHeaders = new Headers({ - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', - }); - - const readableStream = backendResponse.body; - return new Response(readableStream, { - headers: responseHeaders, - status: backendResponse.status, - }); - - } catch (error) { - console.error('[API /invoke/stream] Error:', error); - const errorMessage = error instanceof Error ? error.message : 'Internal server error'; - return NextResponse.json({ error: errorMessage }, { status: 500 }); - } -} \ No newline at end of file diff --git a/ui/src/app/tools/page.tsx b/ui/src/app/tools/page.tsx index 5df8fa245..3e3120fdc 100644 --- a/ui/src/app/tools/page.tsx +++ b/ui/src/app/tools/page.tsx @@ -6,28 +6,23 @@ import { Input } from "@/components/ui/input"; import { Button } from "@/components/ui/button"; import { ScrollArea } from "@/components/ui/scroll-area"; import { Badge } from "@/components/ui/badge"; -import { getToolCategory, getToolDescription, getToolDisplayName, getToolIdentifier, getToolProvider, isMcpProvider } from "@/lib/toolUtils"; -import { Component, ToolConfig, ToolServerConfiguration } from "@/types/datamodel"; +import { getToolResponseDisplayName, getToolResponseDescription, getToolResponseCategory, getToolResponseIdentifier } from "@/lib/toolUtils"; +import { ToolResponse } from "@/types/datamodel"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; import { getTools } from "../actions/tools"; -import { getServers } from "../actions/servers"; import Link from "next/link"; import CategoryFilter from "@/components/tools/CategoryFilter"; -import McpIcon from "@/components/icons/McpIcon"; export default function ToolsPage() { - // Consolidated state const [toolsData, setToolsData] = useState<{ - tools: Component[]; - serversMap: Map; + tools: ToolResponse[]; categories: Set; isLoading: boolean; error: string | null; }>({ - tools: [], // Normalized tools from both sources - serversMap: new Map(), // Map of server_id to server name/label - categories: new Set(), // Unique categories + tools: [], + categories: new Set(), isLoading: true, error: null }); @@ -43,80 +38,25 @@ export default function ToolsPage() { fetchData(); }, []); - // Fetch and consolidate tools data + // Fetch tools data const fetchData = async () => { try { setToolsData(prev => ({ ...prev, isLoading: true, error: null })); - // Fetch both data sources in parallel - const [serversResponse, toolsResponse] = await Promise.all([ - getServers(), - getTools() - ]); - - // Process servers - const serversMap = new Map(); - const toolsFromServers: Component[] = []; - - if (!serversResponse.error && serversResponse.data) { - serversResponse.data.forEach(server => { - serversMap.set(server.ref, { - name: server.ref, - label: server.ref, - config: server.config - }); - - // Process discovered tools from this server - if (server.discoveredTools && Array.isArray(server.discoveredTools)) { - server.discoveredTools.forEach(tool => { - const labeledTool = { - ...tool.component, - label: server.ref - }; - toolsFromServers.push(labeledTool); - }); - } - }); - } - - // Process DB tools - let allTools: Component[] = []; - allTools = [...toolsResponse]; - - // Combine tools from both sources (prioritizing DB tools if there are duplicates) - // This assumes getToolIdentifier returns a unique identifier for each tool - const toolMap = new Map>(); - - // First add all DB tools - allTools.forEach(tool => { - const toolId = getToolIdentifier(tool); - toolMap.set(toolId, tool); - }); - - // Then add server tools only if they don't already exist - toolsFromServers.forEach(tool => { - const toolId = getToolIdentifier(tool); - if (!toolMap.has(toolId)) { - toolMap.set(toolId, tool); - } - }); - - // Convert map back to array - const consolidatedTools = Array.from(toolMap.values()); + const tools = await getTools(); // Extract unique categories and initialize expanded state const uniqueCategories = new Set(); const initialExpandedState: { [key: string]: boolean } = {}; - consolidatedTools.forEach(tool => { - const category = getToolCategory(tool); + tools.forEach(tool => { + const category = getToolResponseCategory(tool); uniqueCategories.add(category); initialExpandedState[category] = true; }); - // Update state with consolidated data + // Update state with tools data setToolsData({ - tools: consolidatedTools, - serversMap, + tools, categories: uniqueCategories, isLoading: false, error: null @@ -124,11 +64,11 @@ export default function ToolsPage() { setExpandedCategories(initialExpandedState); } catch (error) { - console.error("Error fetching data:", error); + console.error("Error fetching tools:", error); setToolsData(prev => ({ ...prev, isLoading: false, - error: "An error occurred while fetching data." + error: "An error occurred while fetching tools." })); } }; @@ -154,12 +94,12 @@ export default function ToolsPage() { return toolsData.tools.filter(tool => { const searchLower = searchTerm.toLowerCase(); const matchesSearch = - getToolDisplayName(tool)?.toLowerCase().includes(searchLower) || - getToolDescription(tool)?.toLowerCase().includes(searchLower) || - getToolProvider(tool)?.toLowerCase().includes(searchLower) || - getToolIdentifier(tool)?.toLowerCase().includes(searchLower); + getToolResponseDisplayName(tool)?.toLowerCase().includes(searchLower) || + getToolResponseDescription(tool)?.toLowerCase().includes(searchLower) || + tool.server_name?.toLowerCase().includes(searchLower) || + tool.id?.toLowerCase().includes(searchLower); - const toolCategory = getToolCategory(tool); + const toolCategory = getToolResponseCategory(tool); const matchesCategory = selectedCategories.size === 0 || selectedCategories.has(toolCategory); return matchesSearch && matchesCategory; @@ -168,15 +108,15 @@ export default function ToolsPage() { // Group tools by category const toolsByCategory = useMemo(() => { - const groups: Record[]> = {}; + const groups: Record = {}; const sortedTools = [...filteredTools].sort((a, b) => { - const aName = getToolDisplayName(a) || ""; - const bName = getToolDisplayName(b) || ""; + const aName = getToolResponseDisplayName(a); + const bName = getToolResponseDisplayName(b); return aName.localeCompare(bName); }); sortedTools.forEach(tool => { - const category = getToolCategory(tool); + const category = getToolResponseCategory(tool); if (!groups[category]) { groups[category] = []; } @@ -224,7 +164,7 @@ export default function ToolsPage() {
setSearchTerm(e.target.value)} className="pl-10" @@ -268,8 +208,6 @@ export default function ToolsPage() {
{Object.entries(toolsByCategory) .map(([category, categoryTools]) => { - // Check if any tool in this category is an MCP tool - const hasMcpTool = categoryTools.some(tool => isMcpProvider(tool.provider)); return (
{expandedCategories[category] ? : } - {hasMcpTool && }

{highlightMatch(category, searchTerm)}

{categoryTools.length}
@@ -289,23 +226,20 @@ export default function ToolsPage() { {categoryTools .map(tool => (
-
{highlightMatch(getToolDisplayName(tool), searchTerm)}
+
{highlightMatch(getToolResponseDisplayName(tool), searchTerm)}
- {highlightMatch(getToolDescription(tool), searchTerm)} + {highlightMatch(getToolResponseDescription(tool), searchTerm)}
- {highlightMatch(tool.label || 'Unknown Server', searchTerm)} -
-
- Provider: {highlightMatch(getToolProvider(tool), searchTerm)} + {highlightMatch(tool.server_name, searchTerm)}
@@ -318,7 +252,7 @@ export default function ToolsPage() { -

{getToolIdentifier(tool)}

+

{getToolResponseIdentifier(tool)}

@@ -339,7 +273,7 @@ export default function ToolsPage() {

{searchTerm || selectedCategories.size > 0 ? "Try adjusting your search or filters to find tools." - : "Connect a server to discover tools."} + : "No tools are currently available."}

{searchTerm || selectedCategories.size > 0 ? (
diff --git a/ui/src/components/AgentsProvider.tsx b/ui/src/components/AgentsProvider.tsx index 24dd0c5ee..934be66d1 100644 --- a/ui/src/components/AgentsProvider.tsx +++ b/ui/src/components/AgentsProvider.tsx @@ -2,7 +2,7 @@ import React, { createContext, useContext, useState, useEffect, ReactNode } from "react"; import { getAgent as getAgentAction, createAgent, getAgents } from "@/app/actions/agents"; -import { Component, ToolConfig, Agent, Tool, AgentResponse } from "@/types/datamodel"; +import { Agent, Tool, AgentResponse, ToolResponse } from "@/types/datamodel"; import { getTools } from "@/app/actions/tools"; import type { BaseResponse, ModelConfig } from "@/lib/types"; import { getModelConfigs } from "@/app/actions/modelConfigs"; @@ -34,7 +34,7 @@ interface AgentsContextType { models: ModelConfig[]; loading: boolean; error: string; - tools: Component[]; + tools: ToolResponse[]; refreshTeams: () => Promise; createNewAgent: (agentData: AgentFormData) => Promise>; updateAgent: (agentData: AgentFormData) => Promise>; @@ -60,7 +60,7 @@ export function AgentsProvider({ children }: AgentsProviderProps) { const [agents, setAgents] = useState([]); const [error, setError] = useState(""); const [loading, setLoading] = useState(true); - const [tools, setTools] = useState[]>([]); + const [tools, setTools] = useState([]); const [models, setModels] = useState([]); const fetchTeams = async () => { diff --git a/ui/src/components/chat/ChatInterface.tsx b/ui/src/components/chat/ChatInterface.tsx index b1c32aeeb..d200a5c3c 100644 --- a/ui/src/components/chat/ChatInterface.tsx +++ b/ui/src/components/chat/ChatInterface.tsx @@ -5,7 +5,7 @@ import { useState, useRef, useEffect } from "react"; import { ArrowBigUp, X, Loader2 } from "lucide-react"; import { Button } from "@/components/ui/button"; import { Textarea } from "@/components/ui/textarea"; -import type { Session, AgentMessageConfig, TeamConfig, Component } from "@/types/datamodel"; +import type { Session, AgentMessageConfig } from "@/types/datamodel"; import { ScrollArea } from "@/components/ui/scroll-area"; import ChatMessage from "@/components/chat/ChatMessage"; import StreamingMessage from "./StreamingMessage"; @@ -14,12 +14,14 @@ import { TokenStats } from "@/lib/types"; import StatusDisplay from "./StatusDisplay"; import { createSession, getSessionMessages, checkSessionExists } from "@/app/actions/sessions"; import { getCurrentUserId } from "@/app/actions/utils"; -import { getAgent } from "@/app/actions/agents"; import { toast } from "sonner"; import { useRouter } from "next/navigation"; import { createMessageHandlers } from "@/lib/messageHandlers"; +import { kagentA2AClient } from "@/lib/a2aClient"; +import { v4 as uuidv4 } from "uuid"; +import { getStatusPlaceholder } from "@/lib/statusUtils"; -export type ChatStatus = "ready" | "thinking" | "error"; +export type ChatStatus = "ready" | "thinking" | "error" | "submitted" | "working" | "input_required" | "auth_required" | "processing_tools" | "generating_response"; interface ChatInterfaceProps { selectedAgentName: string; @@ -37,7 +39,6 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se input: 0, output: 0, }); - const [teamConfig, setTeamConfig] = useState>(); const [chatStatus, setChatStatus] = useState("ready"); @@ -56,11 +57,18 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se setMessages, setIsStreaming, setStreamingContent, - setTokenStats + setTokenStats, + setChatStatus, + agentContext: { + namespace: selectedNamespace, + agentName: selectedAgentName + } }); useEffect(() => { async function initializeChat() { + setTokenStats({ total: 0, input: 0, output: 0 }); + // Skip completely if this is a first message session creation flow if (isFirstMessage || isCreatingSessionRef.current) { return; @@ -115,19 +123,7 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se } }, [messages, streamingContent]); - useEffect(() => { - async function loadTeamConfig() { - try { - const teamResponse = await getAgent(selectedAgentName, selectedNamespace); - if (!teamResponse.error && teamResponse.data) { - setTeamConfig(teamResponse.data.component); - } - } catch (error) { - console.error("Error loading team config:", error); - } - } - loadTeamConfig(); - }, [selectedAgentName, selectedNamespace]); + const handleSendMessage = async (e: React.FormEvent) => { e.preventDefault(); @@ -200,89 +196,48 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se abortControllerRef.current = new AbortController(); try { - const requestBody = { - task: userMessageText, - team_config: teamConfig, - } - - const response = await fetch( - `/stream/${currentSessionId}`, - { - method: 'POST', - headers: { - 'Content-Type': 'text/plain', - }, - body: JSON.stringify(requestBody), - signal: abortControllerRef.current.signal, - } - ); + const messageId = uuidv4(); + const a2aMessage = kagentA2AClient.createA2AMessage(userMessageText, messageId); + const sendParams = kagentA2AClient.createMessageSendParams(a2aMessage); + const stream = await kagentA2AClient.sendMessageStream(selectedNamespace, selectedAgentName, sendParams); - if (!response.ok) { - let errorText = `HTTP error! status: ${response.status}`; - try { - const resText = await response.text(); - if (resText) errorText = `${errorText} - ${resText}`; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - } catch (e) { /* ignore */ } - toast.error(errorText); - throw new Error(errorText); - } - - if (!response.body) { - toast.error("Response body is null"); - throw new Error("Response body is null"); - } + let lastEventTime = Date.now(); + const streamTimeout = 60000; - const reader = response.body.getReader(); - const decoder = new TextDecoder(); + for await (const event of stream) { + lastEventTime = Date.now(); - let buffer = ""; - - while (true) { - const { value, done } = await reader.read(); - - if (done) { - break; + try { + handleMessageEvent(event as AgentMessageConfig); + } catch (error) { + console.error("❌ Event that caused error:", event); } - if (!value) { - continue; + // Check if we should stop streaming due to cancellation + if (abortControllerRef.current?.signal.aborted) { + break; } - buffer += decoder.decode(value, { stream: true }); - - let eventData = ''; - // Process all complete lines in buffer - const lines = buffer.split('\n'); - buffer = lines.pop() || ''; // Keep the last incomplete line in buffer - - for (const line of lines) { - if (line.trim() === '') continue; - - if (line.includes('data:')) { - eventData = line.substring(line.indexOf('data:') + 5).trim(); - - if (eventData) { - try { - const eventDataJson = JSON.parse(eventData) as AgentMessageConfig; - handleMessageEvent(eventDataJson); - } catch (error) { - toast.error("Error parsing event data"); - console.error("Error parsing event data:", error, eventData); - } - } - } + // Timeout check (in case stream hangs) + if (Date.now() - lastEventTime > streamTimeout) { + console.warn("⏰ Stream timeout - no events received for 30 seconds"); + break; } } // eslint-disable-next-line @typescript-eslint/no-explicit-any } catch (error: any) { if (error.name === "AbortError") { - toast.error("Fetch aborted"); + toast.info("Request cancelled"); + setChatStatus("ready"); } else { - toast.error("Streaming failed"); + toast.error(`Streaming failed: ${error.message}`); setChatStatus("error"); setCurrentInputMessage(userMessageText); } + + // Clean up streaming state + setIsStreaming(false); + setStreamingContent(""); } finally { setChatStatus("ready"); abortControllerRef.current = null; @@ -297,16 +252,21 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se const handleCancel = (e: React.FormEvent) => { e.preventDefault(); + if (abortControllerRef.current) { abortControllerRef.current.abort(); } + + setIsStreaming(false); + setStreamingContent(""); setChatStatus("ready"); + toast.error("Request cancelled"); }; const handleKeyDown = (e: React.KeyboardEvent) => { if ((e.metaKey || e.ctrlKey) && e.key === "Enter") { e.preventDefault(); - if (currentInputMessage.trim() && selectedAgentName && selectedNamespace) { + if (currentInputMessage.trim() && selectedAgentName && selectedNamespace && chatStatus === "ready") { handleSendMessage(e); } } @@ -372,18 +332,18 @@ export default function ChatInterface({ selectedAgentName, selectedNamespace, se