diff --git a/NOTICE.md b/NOTICE.md index beb584eea..5a668cb97 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -113,10 +113,6 @@ From the following locations: -### @azure/storage-blob - - - ### classnames diff --git a/app/backend/app.py b/app/backend/app.py index 6886b2cf3..d59ffd924 100644 --- a/app/backend/app.py +++ b/app/backend/app.py @@ -3,6 +3,7 @@ from io import StringIO from typing import Optional +from datetime import datetime import asyncio import logging import os @@ -10,9 +11,8 @@ import urllib.parse import pandas as pd import pydantic -from datetime import datetime, timedelta from fastapi.staticfiles import StaticFiles -from fastapi import FastAPI, File, HTTPException, Request, UploadFile +from fastapi import FastAPI, File, HTTPException, Request, UploadFile, Form from fastapi.responses import RedirectResponse, StreamingResponse import openai from approaches.comparewebwithwork import CompareWebWithWork @@ -24,7 +24,7 @@ from azure.identity import ManagedIdentityCredential, AzureAuthorityHosts, DefaultAzureCredential, get_bearer_token_provider from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient from azure.search.documents import SearchClient -from azure.storage.blob import BlobServiceClient, generate_container_sas, ContainerSasPermissions +from azure.storage.blob import BlobServiceClient, ContentSettings from approaches.mathassistant import( generate_response, process_agent_response, @@ -106,6 +106,7 @@ log.propagate = True class StatusResponse(pydantic.BaseModel): + """The response model for the health check endpoint""" status: str uptime_seconds: float version: str @@ -114,7 +115,7 @@ class StatusResponse(pydantic.BaseModel): IS_READY = False -dffinal = None +DF_FINAL = None # Used by the OpenAI SDK openai.api_type = "azure" openai.api_base = ENV["AZURE_OPENAI_ENDPOINT"] @@ -132,9 +133,11 @@ class StatusResponse(pydantic.BaseModel): azure_credential = DefaultAzureCredential(authority=AUTHORITY) else: azure_credential = ManagedIdentityCredential(authority=AUTHORITY) -# Comment these two lines out if using keys, set your API key in the OPENAI_API_KEY environment variable instead +# Comment these two lines out if using keys, set your API key in the OPENAI_API_KEY +# environment variable instead openai.api_type = "azure_ad" -token_provider = get_bearer_token_provider(azure_credential, f'https://{ENV["AZURE_AI_CREDENTIAL_DOMAIN"]}/.default') +token_provider = get_bearer_token_provider(azure_credential, + f'https://{ENV["AZURE_AI_CREDENTIAL_DOMAIN"]}/.default') openai.azure_ad_token_provider = token_provider #openai.api_key = ENV["AZURE_OPENAI_SERVICE_KEY"] @@ -157,9 +160,11 @@ class StatusResponse(pydantic.BaseModel): credential=azure_credential, ) blob_container = blob_client.get_container_client(ENV["AZURE_BLOB_STORAGE_CONTAINER"]) +blob_upload_container_client = blob_client.get_container_client( + os.environ["AZURE_BLOB_STORAGE_UPLOAD_CONTAINER"]) -model_name = '' -model_version = '' +MODEL_NAME = '' +MODEL_VERSION = '' # Set up OpenAI management client openai_mgmt_client = CognitiveServicesManagementClient( @@ -173,20 +178,20 @@ class StatusResponse(pydantic.BaseModel): account_name=ENV["AZURE_OPENAI_SERVICE"], deployment_name=ENV["AZURE_OPENAI_CHATGPT_DEPLOYMENT"]) -model_name = deployment.properties.model.name -model_version = deployment.properties.model.version +MODEL_NAME = deployment.properties.model.name +MODEL_VERSION = deployment.properties.model.version -if (str_to_bool.get(ENV["USE_AZURE_OPENAI_EMBEDDINGS"])): +if str_to_bool.get(ENV["USE_AZURE_OPENAI_EMBEDDINGS"]): embedding_deployment = openai_mgmt_client.deployments.get( resource_group_name=ENV["AZURE_OPENAI_RESOURCE_GROUP"], account_name=ENV["AZURE_OPENAI_SERVICE"], deployment_name=ENV["EMBEDDING_DEPLOYMENT_NAME"]) - embedding_model_name = embedding_deployment.properties.model.name - embedding_model_version = embedding_deployment.properties.model.version + EMBEDDING_MODEL_NAME = embedding_deployment.properties.model.name + EMBEDDING_MODEL_VERSION = embedding_deployment.properties.model.version else: - embedding_model_name = "" - embedding_model_version = "" + EMBEDDING_MODEL_NAME = "" + EMBEDDING_MODEL_VERSION = "" chat_approaches = { Approaches.ReadRetrieveRead: ChatReadRetrieveReadApproach( @@ -200,8 +205,8 @@ class StatusResponse(pydantic.BaseModel): ENV["AZURE_BLOB_STORAGE_CONTAINER"], blob_client, ENV["QUERY_TERM_LANGUAGE"], - model_name, - model_version, + MODEL_NAME, + MODEL_VERSION, ENV["TARGET_EMBEDDINGS_MODEL"], ENV["ENRICHMENT_APPSERVICE_URL"], ENV["TARGET_TRANSLATION_LANGUAGE"], @@ -211,7 +216,7 @@ class StatusResponse(pydantic.BaseModel): str_to_bool.get(ENV["USE_SEMANTIC_RERANKER"]) ), Approaches.ChatWebRetrieveRead: ChatWebRetrieveRead( - model_name, + MODEL_NAME, ENV["AZURE_OPENAI_CHATGPT_DEPLOYMENT"], ENV["TARGET_TRANSLATION_LANGUAGE"], ENV["BING_SEARCH_ENDPOINT"], @@ -220,8 +225,8 @@ class StatusResponse(pydantic.BaseModel): ENV["AZURE_OPENAI_ENDPOINT"], token_provider ), - Approaches.CompareWorkWithWeb: CompareWorkWithWeb( - model_name, + Approaches.CompareWorkWithWeb: CompareWorkWithWeb( + MODEL_NAME, ENV["AZURE_OPENAI_CHATGPT_DEPLOYMENT"], ENV["TARGET_TRANSLATION_LANGUAGE"], ENV["BING_SEARCH_ENDPOINT"], @@ -241,8 +246,8 @@ class StatusResponse(pydantic.BaseModel): ENV["AZURE_BLOB_STORAGE_CONTAINER"], blob_client, ENV["QUERY_TERM_LANGUAGE"], - model_name, - model_version, + MODEL_NAME, + MODEL_VERSION, ENV["TARGET_EMBEDDINGS_MODEL"], ENV["ENRICHMENT_APPSERVICE_URL"], ENV["TARGET_TRANSLATION_LANGUAGE"], @@ -255,8 +260,8 @@ class StatusResponse(pydantic.BaseModel): token_provider, ENV["AZURE_OPENAI_CHATGPT_DEPLOYMENT"], ENV["QUERY_TERM_LANGUAGE"], - model_name, - model_version, + MODEL_NAME, + MODEL_VERSION, ENV["AZURE_OPENAI_ENDPOINT"] ) } @@ -315,43 +320,25 @@ async def chat(request: Request): impl = chat_approaches.get(Approaches(int(approach))) if not impl: return {"error": "unknown approach"}, 400 - - if (Approaches(int(approach)) == Approaches.CompareWorkWithWeb or Approaches(int(approach)) == Approaches.CompareWebWithWork): - r = impl.run(json_body.get("history", []), json_body.get("overrides", {}), json_body.get("citation_lookup", {}), json_body.get("thought_chain", {})) + + if (Approaches(int(approach)) == Approaches.CompareWorkWithWeb or + Approaches(int(approach)) == Approaches.CompareWebWithWork): + r = impl.run(json_body.get("history", []), + json_body.get("overrides", {}), + json_body.get("citation_lookup", {}), + json_body.get("thought_chain", {})) else: - r = impl.run(json_body.get("history", []), json_body.get("overrides", {}), {}, json_body.get("thought_chain", {})) - + r = impl.run(json_body.get("history", []), + json_body.get("overrides", {}), + {}, + json_body.get("thought_chain", {})) + return StreamingResponse(r, media_type="application/x-ndjson") except Exception as ex: - log.error(f"Error in chat:: {ex}") + log.error("Error in chat:: %s", ex) raise HTTPException(status_code=500, detail=str(ex)) from ex - - - -@app.get("/getblobclienturl") -async def get_blob_client_url(): - """Get a URL for a file in Blob Storage with SAS token. - - This function generates a Shared Access Signature (SAS) token for accessing a file in Blob Storage. - The generated URL includes the SAS token as a query parameter. - - Returns: - dict: A dictionary containing the URL with the SAS token. - """ - # Obtain the user delegation key - user_delegation_key = blob_client.get_user_delegation_key(key_start_time=datetime.utcnow(), key_expiry_time=datetime.utcnow() + timedelta(hours=2)) - - sas_token = generate_container_sas(account_name=blob_client.account_name, - container_name=ENV["AZURE_BLOB_STORAGE_UPLOAD_CONTAINER"], - permission=ContainerSasPermissions(read=True, write=True, delete=False, list=True, tag=True), - user_delegation_key=user_delegation_key, - expiry=datetime.utcnow() + timedelta(hours=2) - ) - - return {"url": f"{blob_client.url}upload?{sas_token}"} - @app.post("/getalluploadstatus") async def get_all_upload_status(request: Request): """ @@ -367,40 +354,41 @@ async def get_all_upload_status(request: Request): timeframe = json_body.get("timeframe") state = json_body.get("state") folder = json_body.get("folder") - tag = json_body.get("tag") + tag = json_body.get("tag") try: - results = statusLog.read_files_status_by_timeframe(timeframe, - State[state], - folder, + results = statusLog.read_files_status_by_timeframe(timeframe, + State[state], + folder, tag, os.environ["AZURE_BLOB_STORAGE_UPLOAD_CONTAINER"]) # retrieve tags for each file # Initialize an empty list to hold the tags - items = [] - cosmos_client = CosmosClient(url=statusLog._url, credential=azure_credential, consistency_level='Session') + items = [] + cosmos_client = CosmosClient(url=statusLog._url, + credential=azure_credential, + consistency_level='Session') database = cosmos_client.get_database_client(statusLog._database_name) container = database.get_container_client(statusLog._container_name) query_string = "SELECT DISTINCT VALUE t FROM c JOIN t IN c.tags" items = list(container.query_items( query=query_string, enable_cross_partition_query=True - )) + )) # Extract and split tags unique_tags = set() for item in items: tags = item.split(',') - unique_tags.update(tags) + unique_tags.update(tags) - except Exception as ex: log.exception("Exception in /getalluploadstatus") raise HTTPException(status_code=500, detail=str(ex)) from ex return results @app.post("/getfolders") -async def get_folders(request: Request): +async def get_folders(): """ Get all folders. @@ -588,16 +576,16 @@ async def get_info_data(): """ response = { "AZURE_OPENAI_CHATGPT_DEPLOYMENT": ENV["AZURE_OPENAI_CHATGPT_DEPLOYMENT"], - "AZURE_OPENAI_MODEL_NAME": f"{model_name}", - "AZURE_OPENAI_MODEL_VERSION": f"{model_version}", + "AZURE_OPENAI_MODEL_NAME": f"{MODEL_NAME}", + "AZURE_OPENAI_MODEL_VERSION": f"{MODEL_VERSION}", "AZURE_OPENAI_SERVICE": ENV["AZURE_OPENAI_SERVICE"], "AZURE_SEARCH_SERVICE": ENV["AZURE_SEARCH_SERVICE"], "AZURE_SEARCH_INDEX": ENV["AZURE_SEARCH_INDEX"], "TARGET_LANGUAGE": ENV["QUERY_TERM_LANGUAGE"], "USE_AZURE_OPENAI_EMBEDDINGS": ENV["USE_AZURE_OPENAI_EMBEDDINGS"], "EMBEDDINGS_DEPLOYMENT": ENV["EMBEDDING_DEPLOYMENT_NAME"], - "EMBEDDINGS_MODEL_NAME": f"{embedding_model_name}", - "EMBEDDINGS_MODEL_VERSION": f"{embedding_model_version}", + "EMBEDDINGS_MODEL_NAME": f"{EMBEDDING_MODEL_NAME}", + "EMBEDDINGS_MODEL_VERSION": f"{EMBEDDING_MODEL_VERSION}", } return response @@ -699,12 +687,12 @@ async def getHint(question: Optional[str] = None): @app.post("/posttd") async def posttd(csv: UploadFile = File(...)): try: - global dffinal + global DF_FINAL # Read the file into a pandas DataFrame content = await csv.read() df = pd.read_csv(StringIO(content.decode('utf-8-sig'))) - dffinal = df + DF_FINAL = df # Process the DataFrame... save_df(df) except Exception as ex: @@ -714,12 +702,12 @@ async def posttd(csv: UploadFile = File(...)): #return {"filename": csv.filename} @app.get("/process_td_agent_response") async def process_td_agent_response(retries=3, delay=1000, question: Optional[str] = None): - save_df(dffinal) + save_df(DF_FINAL) if question is None: raise HTTPException(status_code=400, detail="Question is required") for i in range(retries): try: - results = td_agent_response(question,dffinal) + results = td_agent_response(question,DF_FINAL) return results except AttributeError as ex: log.exception(f"Exception in /process_tabular_data_agent_response:{str(ex)}") @@ -739,14 +727,14 @@ async def process_td_agent_response(retries=3, delay=1000, question: Optional[st @app.get("/getTdAnalysis") async def getTdAnalysis(retries=3, delay=1, question: Optional[str] = None): - global dffinal + global DF_FINAL if question is None: raise HTTPException(status_code=400, detail="Question is required") for i in range(retries): try: - save_df(dffinal) - results = td_agent_scratch_pad(question, dffinal) + save_df(DF_FINAL) + results = td_agent_scratch_pad(question, DF_FINAL) return results except AttributeError as ex: log.exception(f"Exception in /getTdAnalysis:{str(ex)}") @@ -796,11 +784,11 @@ async def stream_response(question: str): @app.get("/tdstream") async def td_stream_response(question: str): - save_df(dffinal) + save_df(DF_FINAL) try: - stream = td_agent_scratch_pad(question, dffinal) + stream = td_agent_scratch_pad(question, DF_FINAL) return StreamingResponse(stream, media_type="text/event-stream") except Exception as ex: log.exception("Exception in /stream") @@ -857,6 +845,37 @@ async def get_feature_flags(): } return response +@app.post("/file") +async def upload_file( + file: UploadFile = File(...), + file_path: str = Form(...), + tags: str = Form(None) +): + """ + Upload a file to Azure Blob Storage. + Parameters: + - file: The file to upload. + - file_path: The path to save the file in Blob Storage. + - tags: The tags to associate with the file. + Returns: + - response: A message indicating the result of the upload. + """ + try: + blob_upload_client = blob_upload_container_client.get_blob_client(file_path) + + blob_upload_client.upload_blob( + file.file, + overwrite=True, + content_settings=ContentSettings(content_type=file.content_type), + metadata= {"tags": tags} + ) + + return {"message": f"File '{file.filename}' uploaded successfully"} + + except Exception as ex: + log.exception("Exception in /file") + raise HTTPException(status_code=500, detail=str(ex)) from ex + app.mount("/", StaticFiles(directory="static"), name="static") if __name__ == "__main__": diff --git a/app/backend/testsuite.py b/app/backend/testsuite.py index 241aabfa8..d60ffeea2 100644 --- a/app/backend/testsuite.py +++ b/app/backend/testsuite.py @@ -6,6 +6,7 @@ import os from fastapi.testclient import TestClient from dotenv import load_dotenv +import io dir = current_working_directory = os.getcwd() # We're running from MAKE file, so we need to change directory to app/backend @@ -351,7 +352,105 @@ def test_delete_item(): assert response.status_code == 200 assert response.json() == True +def test_upload_file_one_tag(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} + + +def test_uploadfilenotagsnofolder(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "parts_inventory.csv", "tags": ""} + ) + print(response.json()) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} +def test_uploadfiletags(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "parts_inventory.csv", "tags": "test,inventory"} + ) + print(response.json()) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} +def test_uploadfilespecificfolder(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "Finance/parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} +def test_uploadfilespecificfoldernested(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "Finance/new/parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} + +def test_upload_file_no_file(): + response = client.post( + "/file", + data={"file_path": "parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 422 # Unprocessable Entity + +def test_upload_file_large_file(): + file_content = b"a" * (10 * 1024 * 1024) # 10 MB file + file = io.BytesIO(file_content) + file.name = "large_parts_inventory.csv" + + response = client.post( + "/file", + files={"file": (file.name, file, "text/csv")}, + data={"file_path": "large_parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'large_parts_inventory.csv' uploaded successfully"} + +def test_upload_file_missing_file_path(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"tags": "test"} + ) + assert response.status_code == 422 # Unprocessable Entity +def test_upload_file_special_characters_in_file_path(): + with open("test_data/parts_inventory.csv", "rb") as file: + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "Finance/@new/parts_inventory.csv", "tags": "test"} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} + +def test_upload_file_long_tags(): + with open("test_data/parts_inventory.csv", "rb") as file: + long_tags = ",".join(["tag"] * 1000) # Very long tags string + response = client.post( + "/file", + files={"file": ("parts_inventory.csv", file, "text/csv")}, + data={"file_path": "parts_inventory.csv", "tags": long_tags} + ) + assert response.status_code == 200 + assert response.json() == {"message": "File 'parts_inventory.csv' uploaded successfully"} # This test requires some amount of data to be present and processed in IA # It is commented out because processing the data takes time and the test will fail if the data is not processed # Change the question to a valid question that will produce citations if you want to run this test diff --git a/app/frontend/package.json b/app/frontend/package.json index f1df48085..dfd973414 100644 --- a/app/frontend/package.json +++ b/app/frontend/package.json @@ -9,7 +9,6 @@ "watch": "tsc && vite build --watch" }, "dependencies": { - "@azure/storage-blob": "^12.24.0", "@fluentui/react": "^8.110.7", "@fluentui/react-icons": "^2.0.195", "@react-spring/web": "^9.7.1", diff --git a/app/frontend/src/api/api.ts b/app/frontend/src/api/api.ts index 91b072f15..44c61335d 100644 --- a/app/frontend/src/api/api.ts +++ b/app/frontend/src/api/api.ts @@ -3,7 +3,6 @@ import { ChatResponse, ChatRequest, - BlobClientUrlResponse, AllFilesUploadStatus, GetUploadStatusRequest, GetInfoResponse, @@ -64,22 +63,6 @@ export function getCitationFilePath(citation: string): string { return `${encodeURIComponent(citation)}`; } -export async function getBlobClientUrl(): Promise { - const response = await fetch("/getblobclienturl", { - method: "GET", - headers: { - "Content-Type": "application/json" - } - }); - - const parsedResponse: BlobClientUrlResponse = await response.json(); - if (response.status > 299 || !response.ok) { - throw Error(parsedResponse.error || "Unknown error"); - } - - return parsedResponse.url; -} - export async function getAllUploadStatus(options: GetUploadStatusRequest): Promise { const response = await fetch("/getalluploadstatus", { method: "POST", diff --git a/app/frontend/src/components/FolderPicker/FolderPicker.tsx b/app/frontend/src/components/FolderPicker/FolderPicker.tsx index c392e609e..2b4c977df 100644 --- a/app/frontend/src/components/FolderPicker/FolderPicker.tsx +++ b/app/frontend/src/components/FolderPicker/FolderPicker.tsx @@ -18,9 +18,8 @@ import { ITextFieldStyleProps, ITextFieldStyles, TextField } from '@fluentui/rea import { ILabelStyles, ILabelStyleProps } from '@fluentui/react/lib/Label'; import { IIconProps } from '@fluentui/react'; import { IButtonProps } from '@fluentui/react/lib/Button'; -import { ContainerClient } from "@azure/storage-blob"; -import { getBlobClientUrl } from "../../api"; +import { getFolders } from "../../api"; import styles from "./FolderPicker.module.css"; var allowNewFolders = false; @@ -85,26 +84,13 @@ export const FolderPicker = ({allowFolderCreation, onSelectedKeyChange, preSelec async function fetchBlobFolderData() { try { - const blobClientUrl = await getBlobClientUrl(); - var containerClient = new ContainerClient(blobClientUrl); - const delimiter = "/"; - const prefix = ""; - var newOptions: IComboBoxOption[] = allowNewFolders ? [] : [ + const newOptions: IComboBoxOption[] = allowNewFolders ? [] : [ { key: 'selectAll', text: 'Select All', itemType: SelectableOptionMenuItemType.SelectAll }, { key: 'FolderHeader', text: 'Folders', itemType: SelectableOptionMenuItemType.Header }]; - for await (const item of containerClient.listBlobsByHierarchy(delimiter, { - prefix, - })) { - // Check if the item is a folder - if (item.kind === "prefix") { - // Get the folder name and add to the dropdown list - var folderName = item.name.slice(0,-1); - - newOptions.push({key: folderName, text: folderName}); - setOptions(newOptions); - } - } - if (!allowNewFolders) { + const folders = await getFolders(); + newOptions.push(...folders.map((folder: string) => ({ key: folder, text: folder }))); + setOptions(newOptions); + if (!allowNewFolders) { var filteredOptions = newOptions.filter( option => (option.itemType === SelectableOptionMenuItemType.Normal || option.itemType === undefined) && !option.disabled, diff --git a/app/frontend/src/components/filepicker/file-picker.tsx b/app/frontend/src/components/filepicker/file-picker.tsx index 596377b55..7f2f56a71 100644 --- a/app/frontend/src/components/filepicker/file-picker.tsx +++ b/app/frontend/src/components/filepicker/file-picker.tsx @@ -1,149 +1,151 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { ContainerClient } from "@azure/storage-blob"; -import classNames from "classnames"; -import { nanoid } from "nanoid"; -import { useCallback, useEffect, useMemo, useState } from "react"; -import { DropZone } from "./drop-zone" -import styles from "./file-picker.module.css"; -import { FilesList } from "./files-list"; -import { getBlobClientUrl, logStatus, StatusLogClassification, StatusLogEntry, StatusLogState } from "../../api" - -interface Props { - folderPath: string; - tags: string[]; -} - -const FilePicker = ({folderPath, tags}: Props) => { - const [files, setFiles] = useState([]); - const [progress, setProgress] = useState(0); - const [uploadStarted, setUploadStarted] = useState(false); - - // handler called when files are selected via the Dropzone component - const handleOnChange = useCallback((files: any) => { - - let filesArray = Array.from(files); - - filesArray = filesArray.map((file) => ({ - id: nanoid(), - file - })); - setFiles(filesArray as any); - setProgress(0); - setUploadStarted(false); -}, []); - - // handle for removing files form the files list view - const handleClearFile = useCallback((id: any) => { - setFiles((prev: any) => prev.filter((file: any) => file.id !== id)); - }, []); - - // whether to show the progress bar or not - const canShowProgress = useMemo(() => files.length > 0, [files.length]); - - // execute the upload operation - const handleUpload = useCallback(async () => { - try { - const data = new FormData(); - console.log("files", files); - setUploadStarted(true); - - // create an instance of the BlobServiceClient - const blobClientUrl = await getBlobClientUrl(); - - const containerClient = new ContainerClient(blobClientUrl); - var counter = 1; - files.forEach(async (indexedFile: any) => { - // add each file into Azure Blob Storage - var file = indexedFile.file as File; - var filePath = (folderPath == "") ? file.name : folderPath + "/" + file.name; - // set mimetype as determined from browser with file upload control - const options = { - blobHTTPHeaders: { blobContentType: file.type }, - metadata: { tags: tags.map(encodeURIComponent).join(",") } - }; - try { - // upload file - await containerClient.uploadBlockBlob(filePath, file, file.size, options) - //write status to log - var logEntry: StatusLogEntry = { - path: "upload/"+filePath, - status: "File uploaded from browser to Azure Blob Storage", - status_classification: StatusLogClassification.Info, - state: StatusLogState.Uploaded - } - await logStatus(logEntry); - } - catch (error) { - console.log("Unable to upload file"+filePath+" : Error: "+error); +import classNames from "classnames"; +import { nanoid } from "nanoid"; +import { useCallback, useEffect, useMemo, useState } from "react"; +import { DropZone } from "./drop-zone"; +import styles from "./file-picker.module.css"; +import { FilesList } from "./files-list"; +import { logStatus, StatusLogClassification, StatusLogEntry, StatusLogState } from "../../api"; + +interface Props { + folderPath: string; + tags: string[]; +} + +const FilePicker = ({ folderPath, tags }: Props) => { + const [files, setFiles] = useState([]); + const [progress, setProgress] = useState(0); + const [uploadStarted, setUploadStarted] = useState(false); + + // handler called when files are selected via the Dropzone component + const handleOnChange = useCallback((files: any) => { + let filesArray = Array.from(files); + filesArray = filesArray.map((file) => ({ + id: nanoid(), + file, + })); + setFiles(filesArray as any); + setProgress(0); + setUploadStarted(false); + }, []); + + // handle for removing files form the files list view + const handleClearFile = useCallback((id: any) => { + setFiles((prev: any) => prev.filter((file: any) => file.id !== id)); + }, []); + + // whether to show the progress bar or not + const canShowProgress = useMemo(() => files.length > 0, [files.length]); + + // execute the upload operation + const handleUpload = useCallback(async () => { + try { + const data = new FormData(); + console.log("files", files); + setUploadStarted(true); + + const uploadPromises = files.map(async (indexedFile:any, index:any) => { + const file = indexedFile.file as File; + const filePath = folderPath === "" ? file.name : `${folderPath}/${file.name}`; + + // Append file and other data to FormData + data.append("file", file); + data.append("file_path", filePath); + + if (tags.length > 0) { + data.append("tags", tags.map(encodeURIComponent).join(",")); } - setProgress((counter/files.length) * 100); - counter++; + + try { + const response = await fetch("/file", { + method: "POST", + body: data, + }); + + if (!response.ok) { + throw new Error(`Failed to upload file: ${filePath}`); + } + + const result = await response.json(); + console.log(result); + + // Write status to log + const logEntry: StatusLogEntry = { + path: "upload/" + filePath, + status: "File uploaded from browser to backend API", + status_classification: StatusLogClassification.Info, + state: StatusLogState.Uploaded, + }; + await logStatus(logEntry); + } catch (error) { + console.log("Unable to upload file " + filePath + " : Error: " + error); + } + + setProgress(((index + 1) / files.length) * 100); }); - - setUploadStarted(false); - } catch (error) { - console.log(error); - } + + await Promise.all(uploadPromises); + setUploadStarted(false); + } catch (error) { + console.log(error); + } }, [files, folderPath, tags]); - - // set progress to zero when there are no files - useEffect(() => { - if (files.length < 1) { - setProgress(0); - } - }, [files.length]); - - // set uploadStarted to false when the upload is complete - useEffect(() => { - if (progress === 100) { - setUploadStarted(false); - } - }, [progress]); - - const uploadComplete = useMemo(() => progress === 100, [progress]); - - return ( -
- {/* canvas */} -
- -
- - {/* files listing */} - {files.length ? ( -
- -
- ) : null} - - {/* progress bar */} - {canShowProgress ? ( -
- -
- ) : null} - - {/* upload button */} - {files.length ? ( - - ) : null} -
- ); -}; - -export { FilePicker }; + + // set progress to zero when there are no files + useEffect(() => { + if (files.length < 1) { + setProgress(0); + } + }, [files.length]); + + // set uploadStarted to false when the upload is complete + useEffect(() => { + if (progress === 100) { + setUploadStarted(false); + } + }, [progress]); + + const uploadComplete = useMemo(() => progress === 100, [progress]); + + return ( +
+ {/* canvas */} +
+ +
+ {/* files listing */} + {files.length ? ( +
+ +
+ ) : null} + {/* progress bar */} + {canShowProgress ? ( +
+ +
+ ) : null} + {/* upload button */} + {files.length ? ( + + ) : null} +
+ ); +}; + +export { FilePicker }; \ No newline at end of file diff --git a/infra/core/host/webapp/webapp.tf b/infra/core/host/webapp/webapp.tf index 96b2cafa7..85fbbda17 100644 --- a/infra/core/host/webapp/webapp.tf +++ b/infra/core/host/webapp/webapp.tf @@ -82,7 +82,7 @@ resource "azurerm_linux_web_app" "app_service" { https_only = true tags = var.tags webdeploy_publish_basic_authentication_enabled = false - public_network_access_enabled = var.is_secure_mode ? false : true + public_network_access_enabled = true virtual_network_subnet_id = var.is_secure_mode ? var.snetIntegration_id : null site_config {