diff --git a/.dockerignore b/.dockerignore index 3466d315..843dec4f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,2 @@ -* -!*.py -!requirements.txt -!images/* -!front-end/* -front-end/node_modules/* +Dockerfile +charts/ diff --git a/.github/workflows/Genstack.yaml b/.github/workflows/Genstack.yaml new file mode 100644 index 00000000..20efea26 --- /dev/null +++ b/.github/workflows/Genstack.yaml @@ -0,0 +1,66 @@ +name: Genstack +"on": + push: + branches: + - main + workflow_dispatch: {} +env: + ACR_RESOURCE_GROUP: kubeops_group + AZURE_CONTAINER_REGISTRY: acrworkflow1722973530004 + CLUSTER_NAME: kube + CLUSTER_RESOURCE_GROUP: kubeops_group + CONTAINER_NAME: image-workflow-1722973530004 + DEPLOYMENT_MANIFEST_PATH: | + ./front-end/package.json + ./front-end/package-lock.json + ./front-end/jsconfig.json +jobs: + buildImage: + permissions: + contents: read + id-token: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 + name: Azure login + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + - name: Build and push image to ACR + run: az acr build --image ${{ env.CONTAINER_NAME }}:${{ github.sha }} --registry ${{ env.AZURE_CONTAINER_REGISTRY }} -g ${{ env.ACR_RESOURCE_GROUP }} -f ./docker-compose.yml ./ + deploy: + permissions: + actions: read + contents: read + id-token: write + runs-on: ubuntu-latest + needs: + - buildImage + steps: + - uses: actions/checkout@v3 + - uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 + name: Azure login + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + - uses: azure/use-kubelogin@v1 + name: Set up kubelogin for non-interactive login + with: + kubelogin-version: v0.0.25 + - uses: azure/aks-set-context@v3 + name: Get K8s context + with: + admin: "false" + cluster-name: ${{ env.CLUSTER_NAME }} + resource-group: ${{ env.CLUSTER_RESOURCE_GROUP }} + use-kubelogin: "true" + - uses: Azure/k8s-deploy@v4 + name: Deploys application + with: + action: deploy + images: ${{ env.AZURE_CONTAINER_REGISTRY }}.azurecr.io/${{ env.CONTAINER_NAME }}:${{ github.sha }} + manifests: ${{ env.DEPLOYMENT_MANIFEST_PATH }} + namespace: namespace-workflow-1722973530004 diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml new file mode 100644 index 00000000..3f53646d --- /dev/null +++ b/.github/workflows/docker-image.yml @@ -0,0 +1,18 @@ +name: Docker Image CI + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Build the Docker image + run: docker build . --file Dockerfile --tag my-image-name:$(date +%s) diff --git a/.github/workflows/gen-AutoDeployTrigger-30f01697-997f-42c1-9f65-5264610fb798.yml b/.github/workflows/gen-AutoDeployTrigger-30f01697-997f-42c1-9f65-5264610fb798.yml new file mode 100644 index 00000000..cfdf8e47 --- /dev/null +++ b/.github/workflows/gen-AutoDeployTrigger-30f01697-997f-42c1-9f65-5264610fb798.yml @@ -0,0 +1,48 @@ +name: Trigger auto deployment for gen + +# When this action will be executed +on: + # Automatically trigger it when detected changes in repo + push: + branches: + [ main ] + paths: + - '**' + - '.github/workflows/gen-AutoDeployTrigger-30f01697-997f-42c1-9f65-5264610fb798.yml' + + # Allow manual trigger + workflow_dispatch: + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + permissions: + id-token: write #This is required for requesting the OIDC JWT Token + contents: read #Required when GH token is used to authenticate with private repo + + steps: + - name: Checkout to the branch + uses: actions/checkout@v2 + + - name: Azure Login + uses: azure/login@v1 + with: + client-id: ${{ secrets.GEN_AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.GEN_AZURE_TENANT_ID }} + subscription-id: ${{ secrets.GEN_AZURE_SUBSCRIPTION_ID }} + + - name: Build and push container image to registry + uses: azure/container-apps-deploy-action@v2 + with: + appSourcePath: ${{ github.workspace }} + _dockerfilePathKey_: _dockerfilePath_ + registryUrl: + registryUsername: ${{ secrets.GEN_REGISTRY_USERNAME }} + registryPassword: ${{ secrets.GEN_REGISTRY_PASSWORD }} + containerAppName: gen + resourceGroup: kubeops_group + imageToBuild: default/[parameters('containerAppName')]:${{ github.sha }} + _buildArgumentsKey_: | + _buildArgumentsValues_ + + diff --git a/.github/workflows/genai-AutoDeployTrigger-08802d17-3be0-4081-89af-8ab34c2110f5.yml b/.github/workflows/genai-AutoDeployTrigger-08802d17-3be0-4081-89af-8ab34c2110f5.yml new file mode 100644 index 00000000..c7077b07 --- /dev/null +++ b/.github/workflows/genai-AutoDeployTrigger-08802d17-3be0-4081-89af-8ab34c2110f5.yml @@ -0,0 +1,48 @@ +name: Trigger auto deployment for genai + +# When this action will be executed +on: + # Automatically trigger it when detected changes in repo + push: + branches: + [ main ] + paths: + - '**' + - '.github/workflows/genai-AutoDeployTrigger-08802d17-3be0-4081-89af-8ab34c2110f5.yml' + + # Allow manual trigger + workflow_dispatch: + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + permissions: + id-token: write #This is required for requesting the OIDC JWT Token + contents: read #Required when GH token is used to authenticate with private repo + + steps: + - name: Checkout to the branch + uses: actions/checkout@v2 + + - name: Azure Login + uses: azure/login@v1 + with: + client-id: ${{ secrets.GENAI_AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.GENAI_AZURE_TENANT_ID }} + subscription-id: ${{ secrets.GENAI_AZURE_SUBSCRIPTION_ID }} + + - name: Build and push container image to registry + uses: azure/container-apps-deploy-action@v2 + with: + appSourcePath: ${{ github.workspace }} + _dockerfilePathKey_: _dockerfilePath_ + registryUrl: hub.docker.com + registryUsername: ${{ secrets.GENAI_REGISTRY_USERNAME }} + registryPassword: ${{ secrets.GENAI_REGISTRY_PASSWORD }} + containerAppName: genai + resourceGroup: kubeops_group + imageToBuild: hub.docker.com/genai:${{ github.sha }} + _buildArgumentsKey_: | + _buildArgumentsValues_ + + diff --git a/.github/workflows/genstack.yaml b/.github/workflows/genstack.yaml new file mode 100644 index 00000000..ce65a69a --- /dev/null +++ b/.github/workflows/genstack.yaml @@ -0,0 +1,65 @@ +name: genstack +"on": + push: + branches: + - main + workflow_dispatch: {} +env: + ACR_RESOURCE_GROUP: kubeops_group + AZURE_CONTAINER_REGISTRY: acrworkflow1723012492625 + CLUSTER_NAME: kube + CLUSTER_RESOURCE_GROUP: kubeops_group + CONTAINER_NAME: image-workflow-1723012492625 + DEPLOYMENT_MANIFEST_PATH: | + manifests/deployment.yaml + manifests/service.yaml +jobs: + buildImage: + permissions: + contents: read + id-token: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 + name: Azure login + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + - name: Build and push image to ACR + run: az acr build --image ${{ env.CONTAINER_NAME }}:${{ github.sha }} --registry ${{ env.AZURE_CONTAINER_REGISTRY }} -g ${{ env.ACR_RESOURCE_GROUP }} -f Dockerfile ./ + deploy: + permissions: + actions: read + contents: read + id-token: write + runs-on: ubuntu-latest + needs: + - buildImage + steps: + - uses: actions/checkout@v3 + - uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 + name: Azure login + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + - uses: azure/use-kubelogin@v1 + name: Set up kubelogin for non-interactive login + with: + kubelogin-version: v0.0.25 + - uses: azure/aks-set-context@v3 + name: Get K8s context + with: + admin: "false" + cluster-name: ${{ env.CLUSTER_NAME }} + resource-group: ${{ env.CLUSTER_RESOURCE_GROUP }} + use-kubelogin: "true" + - uses: Azure/k8s-deploy@v4 + name: Deploys application + with: + action: deploy + images: ${{ env.AZURE_CONTAINER_REGISTRY }}.azurecr.io/${{ env.CONTAINER_NAME }}:${{ github.sha }} + manifests: ${{ env.DEPLOYMENT_MANIFEST_PATH }} + namespace: aks-istio-system diff --git a/.github/workflows/sync-upstream.yml b/.github/workflows/sync-upstream.yml new file mode 100644 index 00000000..a9edbe84 --- /dev/null +++ b/.github/workflows/sync-upstream.yml @@ -0,0 +1,27 @@ +name: Sync Upstream + +on: + schedule: + - cron: '0 0 * * 0' # Runs every Sunday at midnight + workflow_dispatch: + +jobs: + sync: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + + - name: Run sync script + run: ./scripts/sync-upstream.sh diff --git a/.npmrc b/.npmrc new file mode 100644 index 00000000..6d567622 --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +registry=https://packages.us-west-2.codecatalyst.aws/npm/Sauditech/mygit/Repository/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..16322aae --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.11-slim +ENV PORT 8000 +EXPOSE 8000 +WORKDIR /usr/src/app + +COPY requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +ENTRYPOINT ["python"] +CMD ["app.py"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..b74089e6 --- /dev/null +++ b/README.md @@ -0,0 +1,226 @@ +# GenAI Stack +The GenAI Stack will get you started building your own GenAI application in no time. +The demo applications can serve as inspiration or as a starting point. +Learn more about the details in the [introduction blog post](https://neo4j.com/blog/introducing-genai-stack-developers/). + +# Configure + +Create a `.env` file from the environment template file `env.example` + +Available variables: +| Variable Name | Default value | Description | +|------------------------|------------------------------------|-------------------------------------------------------------------------| +| OLLAMA_BASE_URL | http://host.docker.internal:11434 | REQUIRED - URL to Ollama LLM API | +| NEO4J_URI | neo4j://database:7687 | REQUIRED - URL to Neo4j database | +| NEO4J_USERNAME | neo4j | REQUIRED - Username for Neo4j database | +| NEO4J_PASSWORD | password | REQUIRED - Password for Neo4j database | +| LLM | llama2 | REQUIRED - Can be any Ollama model tag, or gpt-4 or gpt-3.5 or claudev2 | +| EMBEDDING_MODEL | sentence_transformer | REQUIRED - Can be sentence_transformer, openai, aws, ollama or google-genai-embedding-001| +| AWS_ACCESS_KEY_ID | | REQUIRED - Only if LLM=claudev2 or embedding_model=aws | +| AWS_SECRET_ACCESS_KEY | | REQUIRED - Only if LLM=claudev2 or embedding_model=aws | +| AWS_DEFAULT_REGION | | REQUIRED - Only if LLM=claudev2 or embedding_model=aws | +| OPENAI_API_KEY | | REQUIRED - Only if LLM=gpt-4 or LLM=gpt-3.5 or embedding_model=openai | +| GOOGLE_API_KEY | | REQUIRED - Only required when using GoogleGenai LLM or embedding model google-genai-embedding-001| +| LANGCHAIN_ENDPOINT | "https://api.smith.langchain.com" | OPTIONAL - URL to Langchain Smith API | +| LANGCHAIN_TRACING_V2 | false | OPTIONAL - Enable Langchain tracing v2 | +| LANGCHAIN_PROJECT | | OPTIONAL - Langchain project name | +| LANGCHAIN_API_KEY | | OPTIONAL - Langchain API key | +| PXE_SERVER_IP | | REQUIRED - IP address of the PXE server | +| PXE_SERVER_PORT | | REQUIRED - Port number of the PXE server | + +## LLM Configuration +MacOS and Linux users can use any LLM that's available via Ollama. Check the "tags" section under the model page you want to use on https://ollama.ai/library and write the tag for the value of the environment variable `LLM=` in the `.env` file. +All platforms can use GPT-3.5-turbo and GPT-4 (bring your own API keys for OpenAI models). + +**MacOS** +Install [Ollama](https://ollama.ai) on MacOS and start it before running `docker compose up` using `ollama serve` in a separate terminal. + +**Linux** +No need to install Ollama manually, it will run in a container as +part of the stack when running with the Linux profile: run `docker compose --profile linux up`. +Make sure to set the `OLLAMA_BASE_URL=http://llm:11434` in the `.env` file when using Ollama docker container. + +To use the Linux-GPU profile: run `docker compose --profile linux-gpu up`. Also change `OLLAMA_BASE_URL=http://llm-gpu:11434` in the `.env` file. + +**Windows** +Ollama now supports Windows. Install [Ollama](https://ollama.ai) on Windows and start it before running `docker compose up` using `ollama serve` in a separate terminal. Alternatively, Windows users can generate an OpenAI API key and configure the stack to use `gpt-3.5` or `gpt-4` in the `.env` file. +# Develop + +> [!WARNING] +> There is a performance issue that impacts python applications in the `4.24.x` releases of Docker Desktop. Please upgrade to the latest release before using this stack. + +**To start everything** +``` +docker compose up +``` +If changes to build scripts have been made, **rebuild**. +``` +docker compose up --build +``` + +To enter **watch mode** (auto rebuild on file changes). +First start everything, then in new terminal: +``` +docker compose watch +``` + +**Shutdown** +If health check fails or containers don't start up as expected, shutdown +completely to start up again. +``` +docker compose down +``` + +# Applications + +Here's what's in this repo: + +| Name | Main files | Compose name | URLs | Description | +|---|---|---|---|---| +| Support Bot | `bot.py` | `bot` | http://localhost:8501 | Main usecase. Fullstack Python application. | +| Stack Overflow Loader | `loader.py` | `loader` | http://localhost:8502 | Load SO data into the database (create vector embeddings etc). Fullstack Python application. | +| PDF Reader | `pdf_bot.py` | `pdf_bot` | http://localhost:8503 | Read local PDF and ask it questions. Fullstack Python application. | +| Standalone Bot API | `api.py` | `api` | http://localhost:8504 | Standalone HTTP API streaming (SSE) + non-streaming endpoints Python. | +| Standalone Bot UI | `front-end/` | `front-end` | http://localhost:8505 | Standalone client that uses the Standalone Bot API to interact with the model. JavaScript (Svelte) front-end. | + +The database can be explored at http://localhost:7474. + +## App 1 - Support Agent Bot + +UI: http://localhost:8501 +DB client: http://localhost:7474 + +- answer support question based on recent entries +- provide summarized answers with sources +- demonstrate difference between + - RAG Disabled (pure LLM response) + - RAG Enabled (vector + knowledge graph context) +- allow to generate a high quality support ticket for the current conversation based on the style of highly rated questions in the database. + +![](.github/media/app1-rag-selector.png) +*(Chat input + RAG mode selector)* + +| | | +|---|---| +| ![](.github/media/app1-generate.png) | ![](.github/media/app1-ticket.png) | +| *(CTA to auto generate support ticket draft)* | *(UI of the auto generated support ticket draft)* | + +--- + +## App 2 - Loader + +UI: http://localhost:8502 +DB client: http://localhost:7474 + +- import recent Stack Overflow data for certain tags into a KG +- embed questions and answers and store them in vector index +- UI: choose tags, run import, see progress, some stats of data in the database +- Load high ranked questions (regardless of tags) to support the ticket generation feature of App 1. + + + + +| | | +|---|---| +| ![](.github/media/app2-ui-1.png) | ![](.github/media/app2-model.png) | + +## App 3 Question / Answer with a local PDF +UI: http://localhost:8503 +DB client: http://localhost:7474 + +This application lets you load a local PDF into text +chunks and embed it into Neo4j so you can ask questions about +its contents and have the LLM answer them using vector similarity +search. + +![](.github/media/app3-ui.png) + +## App 4 Standalone HTTP API +Endpoints: + - http://localhost:8504/query?text=hello&rag=false (non streaming) + - http://localhost:8504/query-stream?text=hello&rag=false (SSE streaming) + +Example cURL command: +```bash +curl http://localhost:8504/query-stream\?text\=minimal%20hello%20world%20in%20python\&rag\=false +``` + +Exposes the functionality to answer questions in the same way as App 1 above. Uses +same code and prompts. + +## App 5 Static front-end +UI: http://localhost:8505 + +This application has the same features as App 1, but is built separate from +the back-end code using modern best practices (Vite, Svelte, Tailwind). +The auto-reload on changes are instant using the Docker watch `sync` config. +![](.github/media/app5-ui.png) + +## PXE Server Configuration + +To make this repository self-bootable using the PXE server, follow these steps: + +1. Add the following environment variables to your `.env` file: + ``` + PXE_SERVER_IP= + PXE_SERVER_PORT= + ``` + +2. Update the `docker-compose.yml` file to include the PXE server service. The PXE server service should be configured to use the same network as other services and should start before other services. + +3. Start the PXE server service using Docker Compose: + ``` + docker-compose up pxe-server + ``` + +4. Configure your network and devices to boot from the PXE server. Refer to the documentation of your network devices for specific instructions on how to configure PXE boot. + +5. Once the PXE server is running and your devices are configured to boot from it, you should be able to boot your devices using the PXE server. + +For more information on configuring and using the PXE server, refer to the documentation of the `node-js-pxe-server` repository: https://github.com/abdulrahman305/node-js-pxe-server + +## Update Functionality + +The repository now includes functionality to update data in the Neo4j database. The update functionality is implemented in the `api.py` file and can be accessed via the `/update` endpoint. + +### Update Endpoint + +The `/update` endpoint allows you to update data in the Neo4j database. The endpoint expects a JSON request body with the following structure: + +```json +{ + "id": "node_id", + "data": { + "property1": "value1", + "property2": "value2", + ... + } +} +``` + +### Example Usage + +To update a node in the Neo4j database, send a `POST` request to the `/update` endpoint with the JSON request body. Here is an example using `curl`: + +```bash +curl -X POST http://localhost:8504/update -H "Content-Type: application/json" -d '{ + "id": "node_id", + "data": { + "property1": "value1", + "property2": "value2" + } +}' +``` + +This will update the node with the specified `id` in the Neo4j database with the provided data. + +## Syncing with Upstream Repositories + +To sync your local repository with the upstream repository, follow these steps: + +1. Run the sync script: + ``` + ./scripts/sync-upstream.sh + ``` + +This script will fetch and merge changes from the upstream repository into your local repository. diff --git a/api.py b/api.py index 37271d96..139fe6b0 100644 --- a/api.py +++ b/api.py @@ -5,6 +5,7 @@ from utils import ( create_vector_index, BaseLogger, + update_data ) from chains import ( load_embedding_model, @@ -117,6 +118,11 @@ class BaseTicket(BaseModel): text: str +class UpdateData(BaseModel): + id: str + data: dict + + @app.get("/query-stream") def qstream(question: Question = Depends()): output_function = llm_chain @@ -159,3 +165,9 @@ async def generate_ticket_api(question: BaseTicket = Depends()): input_question=question.text, ) return {"result": {"title": new_title, "text": new_question}, "model": llm_name} + + +@app.post("/update") +async def update(update_data: UpdateData): + update_data(neo4j_graph, update_data.id, update_data.data) + return {"message": "Data updated successfully"} diff --git a/docker-compose.yml b/docker-compose.yml index 7faafe52..8fb3b568 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -239,5 +239,22 @@ services: ports: - 8505:8505 + pxe-server: + image: abdulrahman305/node-js-pxe-server:latest + networks: + - net + depends_on: + - database + - pull-model + + sync-upstream: + image: alpine:latest + volumes: + - .:/repo + working_dir: /repo + entrypoint: ["/bin/sh", "-c", "./scripts/sync-upstream.sh"] + networks: + - net + networks: net: diff --git a/front-end/package.json b/front-end/package.json index 910db7cd..e6096a63 100644 --- a/front-end/package.json +++ b/front-end/package.json @@ -9,14 +9,14 @@ "preview": "vite preview" }, "devDependencies": { - "@sveltejs/vite-plugin-svelte": "^2.4.2", - "autoprefixer": "^10.4.16", - "postcss": "^8.4.31", + "@sveltejs/vite-plugin-svelte": "^2.5.3", + "autoprefixer": "^10.4.19", + "postcss": "^8.4.41", "svelte": "^4.0.5", - "tailwindcss": "^3.3.3", - "vite": "^4.4.12" + "tailwindcss": "^3.4.15", + "vite": "^4.5.3" }, "dependencies": { - "svelte-markdown": "^0.4.0" + "svelte-markdown": "^0.4.1" } } diff --git a/manifests/deployment.yaml b/manifests/deployment.yaml new file mode 100644 index 00000000..6a90b5fe --- /dev/null +++ b/manifests/deployment.yaml @@ -0,0 +1,24 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: genstack + labels: + app: genstack + kubernetes.azure.com/generator: devhub + namespace: aks-istio-system +spec: + replicas: 1 + selector: + matchLabels: + app: genstack + template: + metadata: + labels: + app: genstack + spec: + containers: + - name: genstack + image: acrworkflow1723012492625.azurecr.io/image-workflow-1723012492625:latest + imagePullPolicy: Always + ports: + - containerPort: 8000 diff --git a/manifests/graphrag b/manifests/graphrag new file mode 160000 index 00000000..073f650b --- /dev/null +++ b/manifests/graphrag @@ -0,0 +1 @@ +Subproject commit 073f650ba9f059bbc4d3c9558d880fa88836487f diff --git a/manifests/package-lock.json b/manifests/package-lock.json new file mode 100644 index 00000000..4c5228eb --- /dev/null +++ b/manifests/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "manifests", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/manifests/service.yaml b/manifests/service.yaml new file mode 100644 index 00000000..cdaf290a --- /dev/null +++ b/manifests/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: genstack + namespace: aks-istio-system + labels: + kubernetes.azure.com/generator: devhub +spec: + type: LoadBalancer + selector: + app: genstack + ports: + - protocol: TCP + port: 8000 + targetPort: 8000 \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..731ed4c5 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "genai-stack", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/requirements.txt b/requirements.txt index ad0f6f90..4c89d04e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ sentence_transformers==2.2.2 Pillow fastapi PyPDF2 -torch==2.0.1 +torch==2.2.0 pydantic uvicorn sse-starlette diff --git a/scripts/sync-upstream.sh b/scripts/sync-upstream.sh new file mode 100644 index 00000000..73e1de6a --- /dev/null +++ b/scripts/sync-upstream.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +# Fetch the latest changes from the upstream repository +git fetch upstream + +# Merge the changes from the upstream repository into the current branch +git merge upstream/main + +# Push the merged changes to the origin repository +git push origin main diff --git a/utils.py b/utils.py index 9404f154..2f0c8822 100644 --- a/utils.py +++ b/utils.py @@ -52,3 +52,11 @@ def create_constraints(driver): driver.query( "CREATE CONSTRAINT tag_name IF NOT EXISTS FOR (t:Tag) REQUIRE (t.name) IS UNIQUE" ) + + +def update_data(driver, node_id: str, data: dict) -> None: + update_query = """ + MATCH (n {id: $node_id}) + SET n += $data + """ + driver.query(update_query, {"node_id": node_id, "data": data})