diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000000..38cc5aeabb5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,114 @@ +name: release + +on: + schedule: + - cron: '0 13 * * *' # This schedule runs every 13:00:00Z(21:00:00+08:00) + # The "create tags" trigger is specifically focused on the creation of new tags, while the "push tags" trigger is activated when tags are pushed, including both new tag creations and updates to existing tags. + create: + tags: + - "v*.*.*" # normal release + - "nightly" # the only one mutable tag + +# https://docs.github.com/en/actions/using-jobs/using-concurrency +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + release: + runs-on: [ "self-hosted", "overseas" ] + steps: + - name: Ensure workspace ownership + run: echo "chown -R $USER $GITHUB_WORKSPACE" && sudo chown -R $USER $GITHUB_WORKSPACE + + # https://github.com/actions/checkout/blob/v3/README.md + - name: Check out code + uses: actions/checkout@v4 + with: + token: ${{ secrets.MY_GITHUB_TOKEN }} # Use the secret as an environment variable + + - name: Prepare release body + run: | + if [[ $GITHUB_EVENT_NAME == 'create' ]]; then + RELEASE_TAG=${GITHUB_REF#refs/tags/} + if [[ $RELEASE_TAG == 'nightly' ]]; then + PRERELEASE=true + else + PRERELEASE=false + fi + echo "Workflow triggered by create tag: $RELEASE_TAG" + else + RELEASE_TAG=nightly + PRERELEASE=true + echo "Workflow triggered by schedule" + fi + echo "RELEASE_TAG=$RELEASE_TAG" >> $GITHUB_ENV + echo "PRERELEASE=$PRERELEASE" >> $GITHUB_ENV + RELEASE_DATETIME=$(date --rfc-3339=seconds) + echo Release $RELEASE_TAG created from $GITHUB_SHA at $RELEASE_DATETIME > release_body.md + + - name: Move the existing mutable tag + # https://github.com/softprops/action-gh-release/issues/171 + run: | + if [[ $GITHUB_EVENT_NAME == 'schedule' ]]; then + # Determine if a given tag exists and matches a specific Git commit. + # actions/checkout@v4 fetch-tags doesn't work when triggered by schedule + git fetch --tags + if [ "$(git rev-parse -q --verify "refs/tags/$RELEASE_TAG")" = "$GITHUB_SHA" ]; then + echo "mutable tag $RELEASE_TAG exists and matches $GITHUB_SHA" + else + git tag -f $RELEASE_TAG $GITHUB_SHA + git push -f origin $RELEASE_TAG:refs/tags/$RELEASE_TAG + echo "created/moved mutable tag $RELEASE_TAG to $GITHUB_SHA" + fi + fi + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # https://github.com/marketplace/actions/docker-login + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: infiniflow + password: ${{ secrets.DOCKERHUB_TOKEN }} + + # https://github.com/marketplace/actions/build-and-push-docker-images + - name: Build and push full image + uses: docker/build-push-action@v6 + with: + context: . + push: true + tags: infiniflow/ragflow:${{ env.RELEASE_TAG }} + file: Dockerfile + platforms: linux/amd64 + + # https://github.com/marketplace/actions/build-and-push-docker-images + - name: Build and push slim image + uses: docker/build-push-action@v6 + with: + context: . + push: true + tags: infiniflow/ragflow:${{ env.RELEASE_TAG }}-slim + file: Dockerfile + build-args: LIGHTEN=1 + platforms: linux/amd64 + + - name: Build ragflow-sdk + if: startsWith(github.ref, 'refs/tags/v') + run: | + apt install -y pipx && \ + pipx install poetry && \ + cd sdk/python && \ + poetry build + + - name: Publish package distributions to PyPI + if: startsWith(github.ref, 'refs/tags/v') + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ + password: ${{ secrets.PYPI_API_TOKEN }} + verbose: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c7d53763c2f..4e467fdad61 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -52,9 +52,8 @@ jobs: - name: Build ragflow:dev-slim run: | RUNNER_WORKSPACE_PREFIX=${RUNNER_WORKSPACE_PREFIX:-$HOME} - cp -r ${RUNNER_WORKSPACE_PREFIX}/huggingface.co ${RUNNER_WORKSPACE_PREFIX}/nltk_data ${RUNNER_WORKSPACE_PREFIX}/libssl*.deb ${RUNNER_WORKSPACE_PREFIX}/tika-server*.jar* ${RUNNER_WORKSPACE_PREFIX}/chrome* ${RUNNER_WORKSPACE_PREFIX}/cl100k_base.tiktoken . sudo docker pull ubuntu:22.04 - sudo docker build --progress=plain -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . + sudo docker build --progress=plain --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . - name: Build ragflow:dev run: | diff --git a/Dockerfile b/Dockerfile index 985eb061bbd..002955f63d2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,37 +3,57 @@ FROM ubuntu:22.04 AS base USER root SHELL ["/bin/bash", "-c"] -ENV LIGHTEN=0 +ARG LIGHTEN=0 +ENV LIGHTEN=${LIGHTEN} WORKDIR /ragflow -RUN rm -f /etc/apt/apt.conf.d/docker-clean \ - && echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache - -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ - apt update && apt-get --no-install-recommends install -y ca-certificates +# Copy models downloaded via download_deps.py +RUN mkdir -p /ragflow/rag/res/deepdoc /root/.ragflow +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/huggingface.co,target=/huggingface.co \ + tar --exclude='.*' -cf - \ + /huggingface.co/InfiniFlow/text_concat_xgb_v1.0 \ + /huggingface.co/InfiniFlow/deepdoc \ + | tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/huggingface.co,target=/huggingface.co \ + if [ "$LIGHTEN" == "0" ]; then \ + (tar -cf - \ + /huggingface.co/BAAI/bge-large-zh-v1.5 \ + /huggingface.co/BAAI/bge-reranker-v2-m3 \ + /huggingface.co/maidalun1020/bce-embedding-base_v1 \ + /huggingface.co/maidalun1020/bce-reranker-base_v1 \ + | tar -xf - --strip-components=2 -C /root/.ragflow) \ + fi -# Setup apt mirror site -RUN sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list +# https://github.com/chrismattmann/tika-python +# This is the only way to run python-tika without internet access. Without this set, the default is to check the tika version and pull latest every time from Apache. +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \ + cp -r /deps/nltk_data /root/ && \ + cp /deps/tika-server-standard-3.0.0.jar /deps/tika-server-standard-3.0.0.jar.md5 /ragflow/ && \ + cp /deps/cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4 + +ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard-3.0.0.jar" + +# Setup apt +RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \ + sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list && \ + rm -f /etc/apt/apt.conf.d/docker-clean && \ + echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache && \ + apt update && apt --no-install-recommends install -y ca-certificates && \ + rm -rf /var/lib/apt/lists/* -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ +# cv2 requires libGL.so.1 +RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \ apt update && DEBIAN_FRONTEND=noninteractive apt install -y curl libpython3-dev nginx libglib2.0-0 libglx-mesa0 pkg-config libicu-dev libgdiplus default-jdk python3-pip pipx \ - libatk-bridge2.0-0 libgtk-4-1 libnss3 xdg-utils unzip libgbm-dev wget git \ - && rm -rf /var/lib/apt/lists/* - -RUN pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && pip3 config set global.trusted-host "pypi.tuna.tsinghua.edu.cn mirrors.pku.edu.cn" && pip3 config set global.extra-index-url "https://mirrors.pku.edu.cn/pypi/web/simple" \ - && pipx install poetry \ - && /root/.local/bin/poetry self add poetry-plugin-pypi-mirror + libatk-bridge2.0-0 libgtk-4-1 libnss3 xdg-utils unzip libgbm-dev wget git nginx libgl1 vim less && \ + rm -rf /var/lib/apt/lists/* -# https://forum.aspose.com/t/aspose-slides-for-net-no-usable-version-of-libssl-found-with-linux-server/271344/13 -# aspose-slides on linux/arm64 is unavailable -RUN --mount=type=bind,source=libssl1.1_1.1.1f-1ubuntu2_amd64.deb,target=/root/libssl1.1_1.1.1f-1ubuntu2_amd64.deb \ - --mount=type=bind,source=libssl1.1_1.1.1f-1ubuntu2_arm64.deb,target=/root/libssl1.1_1.1.1f-1ubuntu2_arm64.deb \ - if [ "$(uname -m)" = "x86_64" ]; then \ - dpkg -i /root/libssl1.1_1.1.1f-1ubuntu2_amd64.deb; \ - elif [ "$(uname -m)" = "aarch64" ]; then \ - dpkg -i /root/libssl1.1_1.1.1f-1ubuntu2_arm64.deb; \ - fi +RUN pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \ + pip3 config set global.trusted-host pypi.tuna.tsinghua.edu.cn && \ + pipx install poetry && \ + pipx runpip poetry config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \ + pipx runpip poetry config set global.trusted-host pypi.tuna.tsinghua.edu.cn && \ + /root/.local/bin/poetry self add poetry-plugin-pypi-mirror ENV PYTHONDONTWRITEBYTECODE=1 DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1 ENV PATH=/root/.local/bin:$PATH @@ -45,7 +65,7 @@ ENV POETRY_REQUESTS_TIMEOUT=15 ENV POETRY_PYPI_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple/ # nodejs 12.22 on Ubuntu 22.04 is too old -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ +RUN --mount=type=cache,id=ragflow_apt,target=/var/cache/apt,sharing=locked \ curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \ apt purge -y nodejs npm && \ apt autoremove && \ @@ -53,6 +73,26 @@ RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked apt install -y nodejs cargo && \ rm -rf /var/lib/apt/lists/* +# Add dependencies of selenium +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/chrome-linux64-121-0-6167-85,target=/chrome-linux64.zip \ + unzip /chrome-linux64.zip && \ + mv chrome-linux64 /opt/chrome && \ + ln -s /opt/chrome/chrome /usr/local/bin/ +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/chromedriver-linux64-121-0-6167-85,target=/chromedriver-linux64.zip \ + unzip -j /chromedriver-linux64.zip chromedriver-linux64/chromedriver && \ + mv chromedriver /usr/local/bin/ && \ + rm -f /usr/bin/google-chrome + +# https://forum.aspose.com/t/aspose-slides-for-net-no-usable-version-of-libssl-found-with-linux-server/271344/13 +# aspose-slides on linux/arm64 is unavailable +RUN --mount=type=bind,from=infiniflow/ragflow_deps:latest,source=/,target=/deps \ + if [ "$(uname -m)" = "x86_64" ]; then \ + dpkg -i /deps/libssl1.1_1.1.1f-1ubuntu2_amd64.deb; \ + elif [ "$(uname -m)" = "aarch64" ]; then \ + dpkg -i /deps/libssl1.1_1.1.1f-1ubuntu2_arm64.deb; \ + fi + + # builder stage FROM base AS builder USER root @@ -62,7 +102,7 @@ WORKDIR /ragflow # install dependencies from poetry.lock file COPY pyproject.toml poetry.toml poetry.lock ./ -RUN --mount=type=cache,id=ragflow_builder_poetry,target=/root/.cache/pypoetry,sharing=locked \ +RUN --mount=type=cache,id=ragflow_poetry,target=/root/.cache/pypoetry,sharing=locked \ if [ "$LIGHTEN" == "1" ]; then \ poetry install --no-root; \ else \ @@ -71,20 +111,12 @@ RUN --mount=type=cache,id=ragflow_builder_poetry,target=/root/.cache/pypoetry,sh COPY web web COPY docs docs -RUN --mount=type=cache,id=ragflow_builder_npm,target=/root/.npm,sharing=locked \ +RUN --mount=type=cache,id=ragflow_npm,target=/root/.npm,sharing=locked \ cd web && npm install --force && npm run build COPY .git /ragflow/.git -RUN current_commit=$(git rev-parse --short HEAD); \ - last_tag=$(git describe --tags --abbrev=0); \ - commit_count=$(git rev-list --count "$last_tag..HEAD"); \ - version_info=""; \ - if [ "$commit_count" -eq 0 ]; then \ - version_info=$last_tag; \ - else \ - version_info="$current_commit($last_tag~$commit_count)"; \ - fi; \ +RUN version_info=$(git describe --tags --match=v* --dirty); \ if [ "$LIGHTEN" == "1" ]; then \ version_info="$version_info slim"; \ else \ @@ -104,49 +136,6 @@ ENV VIRTUAL_ENV=/ragflow/.venv COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" -# Install python packages' dependencies -# cv2 requires libGL.so.1 -RUN --mount=type=cache,id=ragflow_production_apt,target=/var/cache/apt,sharing=locked \ - apt update && apt install -y --no-install-recommends nginx libgl1 vim less && \ - rm -rf /var/lib/apt/lists/* - -# Copy models downloaded via download_deps.py -RUN mkdir -p /ragflow/rag/res/deepdoc /root/.ragflow -RUN --mount=type=bind,source=huggingface.co,target=/huggingface.co \ - tar --exclude='.*' -cf - \ - /huggingface.co/InfiniFlow/text_concat_xgb_v1.0 \ - /huggingface.co/InfiniFlow/deepdoc \ - | tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc -RUN --mount=type=bind,source=huggingface.co,target=/huggingface.co \ - tar -cf - \ - /huggingface.co/BAAI/bge-large-zh-v1.5 \ - /huggingface.co/BAAI/bge-reranker-v2-m3 \ - /huggingface.co/maidalun1020/bce-embedding-base_v1 \ - /huggingface.co/maidalun1020/bce-reranker-base_v1 \ - | tar -xf - --strip-components=2 -C /root/.ragflow - -# Copy nltk data downloaded via download_deps.py -COPY nltk_data /root/nltk_data - -# https://github.com/chrismattmann/tika-python -# This is the only way to run python-tika without internet access. Without this set, the default is to check the tika version and pull latest every time from Apache. -COPY tika-server-standard-3.0.0.jar /ragflow/tika-server-standard.jar -COPY tika-server-standard-3.0.0.jar.md5 /ragflow/tika-server-standard.jar.md5 -ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard.jar" - -# Copy cl100k_base -COPY cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4 - -# Add dependencies of selenium -RUN --mount=type=bind,source=chrome-linux64-121-0-6167-85,target=/chrome-linux64.zip \ - unzip /chrome-linux64.zip && \ - mv chrome-linux64 /opt/chrome && \ - ln -s /opt/chrome/chrome /usr/local/bin/ -RUN --mount=type=bind,source=chromedriver-linux64-121-0-6167-85,target=/chromedriver-linux64.zip \ - unzip -j /chromedriver-linux64.zip chromedriver-linux64/chromedriver && \ - mv chromedriver /usr/local/bin/ && \ - rm -f /usr/bin/google-chrome - ENV PYTHONPATH=/ragflow/ COPY web web diff --git a/Dockerfile.deps b/Dockerfile.deps new file mode 100644 index 00000000000..438ab5ce9fe --- /dev/null +++ b/Dockerfile.deps @@ -0,0 +1,10 @@ +# This builds an image that contains the resources needed by Dockerfile +# +FROM ubuntu:22.04 + +# Copy resources downloaded via download_deps.py +COPY chromedriver-linux64-121-0-6167-85 chrome-linux64-121-0-6167-85 cl100k_base.tiktoken libssl1.1_1.1.1f-1ubuntu2_amd64.deb libssl1.1_1.1.1f-1ubuntu2_arm64.deb tika-server-standard-3.0.0.jar tika-server-standard-3.0.0.jar.md5 libssl*.deb / + +COPY nltk_data /nltk_data + +COPY huggingface.co /huggingface.co diff --git a/Dockerfile.slim b/Dockerfile.slim deleted file mode 100644 index 17da40f615c..00000000000 --- a/Dockerfile.slim +++ /dev/null @@ -1,163 +0,0 @@ -# base stage -FROM ubuntu:22.04 AS base -USER root -SHELL ["/bin/bash", "-c"] - -ENV LIGHTEN=1 - -WORKDIR /ragflow - -RUN rm -f /etc/apt/apt.conf.d/docker-clean \ - && echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache - -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ - apt update && apt-get --no-install-recommends install -y ca-certificates - -# Setup apt mirror site -RUN sed -i 's|http://archive.ubuntu.com|https://mirrors.tuna.tsinghua.edu.cn|g' /etc/apt/sources.list - -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ - apt update && DEBIAN_FRONTEND=noninteractive apt install -y curl libpython3-dev nginx libglib2.0-0 libglx-mesa0 pkg-config libicu-dev libgdiplus default-jdk python3-pip pipx \ - libatk-bridge2.0-0 libgtk-4-1 libnss3 xdg-utils unzip libgbm-dev wget git \ - && rm -rf /var/lib/apt/lists/* - -RUN pip3 config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && pip3 config set global.trusted-host "pypi.tuna.tsinghua.edu.cn mirrors.pku.edu.cn" && pip3 config set global.extra-index-url "https://mirrors.pku.edu.cn/pypi/web/simple" \ - && pipx install poetry \ - && /root/.local/bin/poetry self add poetry-plugin-pypi-mirror - -# https://forum.aspose.com/t/aspose-slides-for-net-no-usable-version-of-libssl-found-with-linux-server/271344/13 -# aspose-slides on linux/arm64 is unavailable -RUN --mount=type=bind,source=libssl1.1_1.1.1f-1ubuntu2_amd64.deb,target=/root/libssl1.1_1.1.1f-1ubuntu2_amd64.deb \ - --mount=type=bind,source=libssl1.1_1.1.1f-1ubuntu2_arm64.deb,target=/root/libssl1.1_1.1.1f-1ubuntu2_arm64.deb \ - if [ "$(uname -m)" = "x86_64" ]; then \ - dpkg -i /root/libssl1.1_1.1.1f-1ubuntu2_amd64.deb; \ - elif [ "$(uname -m)" = "aarch64" ]; then \ - dpkg -i /root/libssl1.1_1.1.1f-1ubuntu2_arm64.deb; \ - fi - -ENV PYTHONDONTWRITEBYTECODE=1 DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1 -ENV PATH=/root/.local/bin:$PATH -# Configure Poetry -ENV POETRY_NO_INTERACTION=1 -ENV POETRY_VIRTUALENVS_IN_PROJECT=true -ENV POETRY_VIRTUALENVS_CREATE=true -ENV POETRY_REQUESTS_TIMEOUT=15 -ENV POETRY_PYPI_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple/ - -# nodejs 12.22 on Ubuntu 22.04 is too old -RUN --mount=type=cache,id=ragflow_base_apt,target=/var/cache/apt,sharing=locked \ - curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \ - apt purge -y nodejs npm && \ - apt autoremove && \ - apt update && \ - apt install -y nodejs cargo && \ - rm -rf /var/lib/apt/lists/* - -# builder stage -FROM base AS builder -USER root - -WORKDIR /ragflow - -COPY .git /ragflow/.git - -RUN current_commit=$(git rev-parse --short HEAD); \ - last_tag=$(git describe --tags --abbrev=0); \ - commit_count=$(git rev-list --count "$last_tag..HEAD"); \ - version_info=""; \ - if [ "$commit_count" -eq 0 ]; then \ - version_info=$last_tag; \ - else \ - version_info="$current_commit($last_tag~$commit_count)"; \ - fi; \ - if [ "$LIGHTEN" == "1" ]; then \ - version_info="$version_info slim"; \ - else \ - version_info="$version_info full"; \ - fi; \ - echo "RAGFlow version: $version_info"; \ - echo $version_info > /ragflow/VERSION - -COPY web web -COPY docs docs -RUN --mount=type=cache,id=ragflow_builder_npm,target=/root/.npm,sharing=locked \ - cd web && npm install --force && npm run build - -# install dependencies from poetry.lock file -COPY pyproject.toml poetry.toml poetry.lock ./ - -RUN --mount=type=cache,id=ragflow_builder_poetry,target=/root/.cache/pypoetry,sharing=locked \ - if [ "$LIGHTEN" == "1" ]; then \ - poetry install --no-root; \ - else \ - poetry install --no-root --with=full; \ - fi - -# production stage -FROM base AS production -USER root - -WORKDIR /ragflow - -COPY --from=builder /ragflow/VERSION /ragflow/VERSION - -# Install python packages' dependencies -# cv2 requires libGL.so.1 -RUN --mount=type=cache,id=ragflow_production_apt,target=/var/cache/apt,sharing=locked \ - apt update && apt install -y --no-install-recommends nginx libgl1 vim less && \ - rm -rf /var/lib/apt/lists/* - -COPY web web -COPY api api -COPY conf conf -COPY deepdoc deepdoc -COPY rag rag -COPY agent agent -COPY graphrag graphrag -COPY pyproject.toml poetry.toml poetry.lock ./ - -# Copy models downloaded via download_deps.py -RUN mkdir -p /ragflow/rag/res/deepdoc /root/.ragflow -RUN --mount=type=bind,source=huggingface.co,target=/huggingface.co \ - tar --exclude='.*' -cf - \ - /huggingface.co/InfiniFlow/text_concat_xgb_v1.0 \ - /huggingface.co/InfiniFlow/deepdoc \ - | tar -xf - --strip-components=3 -C /ragflow/rag/res/deepdoc - -# Copy nltk data downloaded via download_deps.py -COPY nltk_data /root/nltk_data - -# https://github.com/chrismattmann/tika-python -# This is the only way to run python-tika without internet access. Without this set, the default is to check the tika version and pull latest every time from Apache. -COPY tika-server-standard-3.0.0.jar /ragflow/tika-server-standard.jar -COPY tika-server-standard-3.0.0.jar.md5 /ragflow/tika-server-standard.jar.md5 -ENV TIKA_SERVER_JAR="file:///ragflow/tika-server-standard.jar" - -# Copy cl100k_base -COPY cl100k_base.tiktoken /ragflow/9b5ad71b2ce5302211f9c61530b329a4922fc6a4 - -# Add dependencies of selenium -RUN --mount=type=bind,source=chrome-linux64-121-0-6167-85,target=/chrome-linux64.zip \ - unzip /chrome-linux64.zip && \ - mv chrome-linux64 /opt/chrome && \ - ln -s /opt/chrome/chrome /usr/local/bin/ -RUN --mount=type=bind,source=chromedriver-linux64-121-0-6167-85,target=/chromedriver-linux64.zip \ - unzip -j /chromedriver-linux64.zip chromedriver-linux64/chromedriver && \ - mv chromedriver /usr/local/bin/ && \ - rm -f /usr/bin/google-chrome - -# Copy compiled web pages -COPY --from=builder /ragflow/web/dist /ragflow/web/dist - -# Copy Python environment and packages -ENV VIRTUAL_ENV=/ragflow/.venv -COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} -ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" - -ENV PYTHONPATH=/ragflow/ - -COPY docker/service_conf.yaml.template ./conf/service_conf.yaml.template -COPY docker/entrypoint.sh ./entrypoint.sh -RUN chmod +x ./entrypoint.sh - -ENTRYPOINT ["./entrypoint.sh"] diff --git a/README.md b/README.md index 7193cac46f1..d65f8fa2e73 100644 --- a/README.md +++ b/README.md @@ -272,9 +272,7 @@ This image is approximately 1 GB in size and relies on external LLM and embeddin ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` ## 🔧 Build a Docker image including embedding models @@ -284,8 +282,6 @@ This image is approximately 9 GB in size. As it includes embedding models, it re ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/README_id.md b/README_id.md index f382ca1f67e..b12315e40c4 100644 --- a/README_id.md +++ b/README_id.md @@ -247,9 +247,7 @@ Image ini berukuran sekitar 1 GB dan bergantung pada aplikasi LLM eksternal dan ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` ## 🔧 Membangun Docker Image Termasuk Model Embedding @@ -259,8 +257,6 @@ Image ini berukuran sekitar 9 GB. Karena sudah termasuk model embedding, ia hany ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/README_ja.md b/README_ja.md index 6df951d0fa5..8fcda98523d 100644 --- a/README_ja.md +++ b/README_ja.md @@ -228,9 +228,7 @@ RAGFlow はデフォルトで Elasticsearch を使用して全文とベクトル ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` ## 🔧 ソースコードをコンパイルしたDockerイメージ(埋め込みモデルを含む) @@ -240,8 +238,6 @@ docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/README_ko.md b/README_ko.md index fd46dcc3546..ec33c78db36 100644 --- a/README_ko.md +++ b/README_ko.md @@ -230,9 +230,7 @@ RAGFlow 는 기본적으로 Elasticsearch 를 사용하여 전체 텍스트 및 ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` ## 🔧 소스 코드로 Docker 이미지를 컴파일합니다(임베딩 모델 포함) @@ -242,8 +240,6 @@ docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/README_zh.md b/README_zh.md index b9d85952594..c03e05ed5a9 100644 --- a/README_zh.md +++ b/README_zh.md @@ -235,9 +235,7 @@ RAGFlow 默认使用 Elasticsearch 存储文本和向量数据. 如果要切换 ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` ## 🔧 源码编译 Docker 镜像(包含 embedding 模型) @@ -247,8 +245,6 @@ docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/agent/canvas.py b/agent/canvas.py index 72ab3c103ec..41410e64e3d 100644 --- a/agent/canvas.py +++ b/agent/canvas.py @@ -224,17 +224,16 @@ def prepare2run(cpns): for m in prepare2run([switch_out]): yield {"content": m, "running_status": True} except Exception as e: - yield {"content": "*Exception*: {}".format(e), "running_status": True} logging.exception("Canvas.run got exception") + raise e continue try: for m in prepare2run(cpn["downstream"]): yield {"content": m, "running_status": True} except Exception as e: - yield {"content": "*Exception*: {}".format(e), "running_status": True} logging.exception("Canvas.run got exception") - ran += 1 + raise e if ran >= len(self.path[-1]) and waiting: without_dependent_checking = waiting diff --git a/agent/component/base.py b/agent/component/base.py index 63d5f0d84be..5825eba582e 100644 --- a/agent/component/base.py +++ b/agent/component/base.py @@ -383,6 +383,9 @@ def __str__(self): "params": {} } """ + out = json.loads(str(self._param)).get("output", {}) + if isinstance(out, dict) and "vector" in out: + del out["vector"] return """{{ "component_name": "{}", "params": {}, @@ -390,7 +393,7 @@ def __str__(self): "inputs": {} }}""".format(self.component_name, self._param, - json.dumps(json.loads(str(self._param)).get("output", {}), ensure_ascii=False), + json.dumps(out, ensure_ascii=False), json.dumps(json.loads(str(self._param)).get("inputs", []), ensure_ascii=False) ) diff --git a/agent/templates/DB Assistant.json b/agent/templates/DB Assistant.json index 874e123509d..277de732e94 100644 --- a/agent/templates/DB Assistant.json +++ b/agent/templates/DB Assistant.json @@ -620,7 +620,7 @@ "text": "Searches for description about meanings of tables and fields." }, "label": "Note", - "name": "N:DB Desctription" + "name": "N:DB Description" }, "dragging": false, "height": 128, @@ -679,7 +679,7 @@ { "data": { "form": { - "text": "DDL(Data Definition Language).\n\nSearches for relevent database creation statements.\n\nIt should bind with a KB to which DDL is dumped in.\nYou could use 'General' as parsing method and ';' as delimiter." + "text": "DDL(Data Definition Language).\n\nSearches for relevant database creation statements.\n\nIt should bind with a KB to which DDL is dumped in.\nYou could use 'General' as parsing method and ';' as delimiter." }, "label": "Note", "name": "N: DDL" diff --git a/agent/templates/customer_service.json b/agent/templates/customer_service.json index 3cea3029d3b..edc9931c188 100644 --- a/agent/templates/customer_service.json +++ b/agent/templates/customer_service.json @@ -90,7 +90,7 @@ "message_history_window_size": 12, "parameters": [], "presence_penalty": 0.4, - "prompt": "Role: You are a customer support. \n\nTask: Please answer the question based on content of knowledge base. \n\nReuirements & restrictions:\n - DO NOT make things up when all knowledge base content is irrelevant to the question. \n - Answers need to consider chat history.\n - Request about customer's contact information like, Wechat number, LINE number, twitter, discord, etc,. , when knowlegebase content can't answer his question. So, product expert could contact him soon to solve his problem.\n\n Knowledge base content is as following:\n {input}\n The above is the content of knowledge base.", + "prompt": "Role: You are a customer support. \n\nTask: Please answer the question based on content of knowledge base. \n\nRequirements & restrictions:\n - DO NOT make things up when all knowledge base content is irrelevant to the question. \n - Answers need to consider chat history.\n - Request about customer's contact information like, Wechat number, LINE number, twitter, discord, etc,. , when knowledge base content can't answer his question. So, product expert could contact him soon to solve his problem.\n\n Knowledge base content is as following:\n {input}\n The above is the content of knowledge base.", "temperature": 0.1, "top_p": 0.3 } @@ -336,7 +336,7 @@ "parameters": [], "presencePenaltyEnabled": true, "presence_penalty": 0.4, - "prompt": "Role: You are a customer support. \n\nTask: Please answer the question based on content of knowledge base. \n\nReuirements & restrictions:\n - DO NOT make things up when all knowledge base content is irrelevant to the question. \n - Answers need to consider chat history.\n - Request about customer's contact information like, Wechat number, LINE number, twitter, discord, etc,. , when knowlegebase content can't answer his question. So, product expert could contact him soon to solve his problem.\n\n Knowledge base content is as following:\n {input}\n The above is the content of knowledge base.", + "prompt": "Role: You are a customer support. \n\nTask: Please answer the question based on content of knowledge base. \n\nRequirements & restrictions:\n - DO NOT make things up when all knowledge base content is irrelevant to the question. \n - Answers need to consider chat history.\n - Request about customer's contact information like, Wechat number, LINE number, twitter, discord, etc,. , when knowledge base content can't answer his question. So, product expert could contact him soon to solve his problem.\n\n Knowledge base content is as following:\n {input}\n The above is the content of knowledge base.", "temperature": 0.1, "temperatureEnabled": true, "topPEnabled": true, @@ -603,7 +603,7 @@ { "data": { "form": { - "text": "Static messages.\nDefine replys after recieve user's contact information." + "text": "Static messages.\nDefine response after receive user's contact information." }, "label": "Note", "name": "N: What else?" @@ -691,7 +691,7 @@ { "data": { "form": { - "text": "Complete questions by conversation history.\nUser: What's RAGFlow?\nAssistant: RAGFlow is xxx.\nUser: How to deloy it?\n\nRefine it: How to deploy RAGFlow?" + "text": "Complete questions by conversation history.\nUser: What's RAGFlow?\nAssistant: RAGFlow is xxx.\nUser: How to deploy it?\n\nRefine it: How to deploy RAGFlow?" }, "label": "Note", "name": "N: Refine Question" diff --git a/agent/templates/general_chat_bot.json b/agent/templates/general_chat_bot.json index 1abdddcfbcc..6645e988772 100644 --- a/agent/templates/general_chat_bot.json +++ b/agent/templates/general_chat_bot.json @@ -70,8 +70,8 @@ "to": "QWeather:DeepKiwisTeach" }, "2. finance": { - "description": "Question is about finace/economic information, stock market, economic news.", - "examples": "昨日涨幅大于5%的军工股?\nStocks have MACD buyin signals?\nWhen is the next interest rate cut by the Federal Reserve?\n国家救市都有哪些举措?", + "description": "Question is about finance/economic information, stock market, economic news.", + "examples": "Stocks have MACD buy signals?\nWhen is the next interest rate cut by the Federal Reserve?\n", "to": "Concentrator:TrueGeckosSlide" }, "3. medical": { @@ -268,7 +268,7 @@ "message_history_window_size": 12, "parameters": [], "presence_penalty": 0.4, - "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Provide useful advice to your friend.\n- Tell jokes to make your firend happy.\n\nThe following is the weatcher information:\n{weather}", + "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Provide useful advice to your friend.\n- Tell jokes to make your friend happy.\n\nThe following is the weather information:\n{weather}", "temperature": 0.1, "top_p": 0.3 } @@ -497,7 +497,7 @@ } ], "presence_penalty": 0.4, - "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Tell your friend the weather if there's weather information provided. If your friend did not provide region information, ask about where he/she is.\n\nThe following is the weatcher information:\n{weather}\n", + "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Tell your friend the weather if there's weather information provided. If your friend did not provide region information, ask about where he/she is.\n\nThe following is the weather information:\n{weather}\n", "temperature": 0.1, "top_p": 0.3 } @@ -622,8 +622,8 @@ "to": "QWeather:DeepKiwisTeach" }, "2. finance": { - "description": "Question is about finace/economic information, stock market, economic news.", - "examples": "昨日涨幅大于5%的军工股?\nStocks have MACD buyin signals?\nWhen is the next interest rate cut by the Federal Reserve?\n国家救市都有哪些举措?", + "description": "Question is about finance/economic information, stock market, economic news.", + "examples": "Stocks have MACD buy signals?\nWhen is the next interest rate cut by the Federal Reserve?\n", "to": "Concentrator:TrueGeckosSlide" }, "3. medical": { @@ -927,7 +927,7 @@ "parameters": [], "presencePenaltyEnabled": true, "presence_penalty": 0.4, - "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Provide useful advice to your friend.\n- Tell jokes to make your firend happy.\n\nThe following is the weatcher information:\n{weather}", + "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Provide useful advice to your friend.\n- Tell jokes to make your friend happy.\n\nThe following is the weather information:\n{weather}", "temperature": 0.1, "temperatureEnabled": true, "topPEnabled": true, @@ -1011,7 +1011,7 @@ "top_p": 0.3 }, "label": "Generate", - "name": "tranlate to Chinese" + "name": "translate to Chinese" }, "dragging": false, "height": 86, @@ -1276,7 +1276,7 @@ ], "presencePenaltyEnabled": true, "presence_penalty": 0.4, - "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Tell your friend the weather if there's weather information provided. If your friend did not provide region information, ask about where he/she is.\n\nThe following is the weatcher information:\n{weather}\n", + "prompt": "Role: You‘re warm-hearted lovely young girl, 22 years old, located at Shanghai in China. Your name is R. Who are talking to you is your very good old friend of yours.\n\nTask: \n- Chat with the friend.\n- Ask question and care about them.\n- Tell your friend the weather if there's weather information provided. If your friend did not provide region information, ask about where he/she is.\n\nThe following is the weather information:\n{weather}\n", "temperature": 0.1, "temperatureEnabled": true, "topPEnabled": true, diff --git a/agent/templates/interpreter.json b/agent/templates/interpreter.json index edb3b8be5ef..d5892786a81 100644 --- a/agent/templates/interpreter.json +++ b/agent/templates/interpreter.json @@ -476,7 +476,7 @@ "text": "Translation Agent: Agentic translation using reflection workflow\n\nThis is inspired by Andrew NG's project: https://github.com/andrewyng/translation-agent\n\n1. Prompt an LLM to translate a text into the target language;\n2. Have the LLM reflect on the translation and provide constructive suggestions for improvement;\n3. Use these suggestions to improve the translation." }, "label": "Note", - "name": "Breif" + "name": "Brief" }, "dragHandle": ".note-drag-handle", "dragging": false, diff --git a/agent/templates/medical_consultation.json b/agent/templates/medical_consultation.json index 17c24b412ef..c6b228ff0e6 100644 --- a/agent/templates/medical_consultation.json +++ b/agent/templates/medical_consultation.json @@ -534,7 +534,7 @@ { "data": { "form": { - "text": "A prompt sumerize content from search result from PubMed and Q&A dataset." + "text": "A prompt summarize content from search result from PubMed and Q&A dataset." }, "label": "Note", "name": "N: LLM" diff --git a/agent/templates/seo_blog.json b/agent/templates/seo_blog.json index 1be6d6729b5..b59251b56f3 100644 --- a/agent/templates/seo_blog.json +++ b/agent/templates/seo_blog.json @@ -347,7 +347,7 @@ } ], "presence_penalty": 0.4, - "prompt": "You are an SEO expert who writes in a direct, practical, educational style that is factual rather than storytelling or narrative, focusing on explaining to {audience} the \"how\" and \"what is\" and \u201cwhy\u201d rather than narrating to the audience. \n - Please write at a sixth grade reading level. \n - ONLY output in Markdown format.\n - Use positive, present tense expressions and avoid using complex words and sentence structures that lack narrative, such as \"reveal\" and \"dig deep.\"\n - Next, please continue writing articles related to our topic with a concise title, {title_0}{title} {keywords_0}{keywords}. \n - Please AVOID repeating what has already been written and do not use the same sentence structure. \n - JUST write the body of the article based on the outline.\n - DO NOT include introduction, title.\n - DO NOT miss anything mentioned in artical outline, except introduction and title.\n - Please use the information I provide to create in-depth, interesting and unique content. Also, incorporate the references and data points I provided earlier into the article to increase its value to the reader.\n - MUST be in language of \"{keywords_0} {title_0}\".\n\n\n{outline}\n\n", + "prompt": "You are an SEO expert who writes in a direct, practical, educational style that is factual rather than storytelling or narrative, focusing on explaining to {audience} the \"how\" and \"what is\" and \u201cwhy\u201d rather than narrating to the audience. \n - Please write at a sixth grade reading level. \n - ONLY output in Markdown format.\n - Use positive, present tense expressions and avoid using complex words and sentence structures that lack narrative, such as \"reveal\" and \"dig deep.\"\n - Next, please continue writing articles related to our topic with a concise title, {title_0}{title} {keywords_0}{keywords}. \n - Please AVOID repeating what has already been written and do not use the same sentence structure. \n - JUST write the body of the article based on the outline.\n - DO NOT include introduction, title.\n - DO NOT miss anything mentioned in article outline, except introduction and title.\n - Please use the information I provide to create in-depth, interesting and unique content. Also, incorporate the references and data points I provided earlier into the article to increase its value to the reader.\n - MUST be in language of \"{keywords_0} {title_0}\".\n\n\n{outline}\n\n", "query": [], "temperature": 0.1, "top_p": 0.3 diff --git a/agent/templates/text2sql.json b/agent/templates/text2sql.json index 8d69ab3dfb9..cf6dd5307b2 100644 --- a/agent/templates/text2sql.json +++ b/agent/templates/text2sql.json @@ -440,7 +440,7 @@ { "data": { "form": { - "text": "DDL(Data Definition Language).\n\nSearches for relevent database creation statements.\n\nIt should bind with a KB to which DDL is dumped in.\nYou could use 'General' as parsing method and ';' as delimiter." + "text": "DDL(Data Definition Language).\n\nSearches for relevant database creation statements.\n\nIt should bind with a KB to which DDL is dumped in.\nYou could use 'General' as parsing method and ';' as delimiter." }, "label": "Note", "name": "N: DDL" diff --git a/agent/templates/websearch_assistant.json b/agent/templates/websearch_assistant.json index 1da685c6b40..390c83c45f4 100644 --- a/agent/templates/websearch_assistant.json +++ b/agent/templates/websearch_assistant.json @@ -577,7 +577,7 @@ "text": "Based on the keywords, searches on Wikipedia and returns the found content." }, "label": "Note", - "name": "N: Wiukipedia" + "name": "N: Wikipedia" }, "dragging": false, "height": 128, diff --git a/api/apps/llm_app.py b/api/apps/llm_app.py index dccd283db99..9c7c8dcc4f2 100644 --- a/api/apps/llm_app.py +++ b/api/apps/llm_app.py @@ -216,7 +216,7 @@ def apikey_json(keys): base_url=llm["api_base"]) try: arr, tc = mdl.encode(["Test if the api key is available"]) - if len(arr[0]) == 0 or tc == 0: + if len(arr[0]) == 0: raise Exception("Fail") except Exception as e: msg += f"\nFail to access embedding model({llm['llm_name']})." + str(e) @@ -242,7 +242,7 @@ def apikey_json(keys): ) try: arr, tc = mdl.similarity("Hello~ Ragflower!", ["Hi, there!", "Ohh, my friend!"]) - if len(arr) == 0 or tc == 0: + if len(arr) == 0: raise Exception("Not known.") except Exception as e: msg += f"\nFail to access model({llm['llm_name']})." + str( diff --git a/api/db/services/llm_service.py b/api/db/services/llm_service.py index 128f154f2d6..16fd42ca161 100644 --- a/api/db/services/llm_service.py +++ b/api/db/services/llm_service.py @@ -196,8 +196,7 @@ def increase_usage(cls, tenant_id, llm_type, used_tokens, llm_name=None): else: tenant_llms = cls.query(tenant_id=tenant_id, llm_name=llm_name) if not tenant_llms: - if not llm_factory: llm_factory = mdlnm - num = cls.model.create(tenant_id=tenant_id, llm_factory=llm_factory, llm_name=llm_name, used_tokens=used_tokens) + return num else: tenant_llm = tenant_llms[0] num = cls.model.update(used_tokens=tenant_llm.used_tokens + used_tokens)\ diff --git a/api/versions.py b/api/versions.py index 46f6faf2852..99093492c1a 100644 --- a/api/versions.py +++ b/api/versions.py @@ -42,28 +42,11 @@ def get_ragflow_version() -> str: def get_closest_tag_and_count(): try: # Get the current commit hash - commit_id = ( - subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]) + version_info = ( + subprocess.check_output(["git", "describe", "--tags", "--match=v*", "--dirty"]) .strip() .decode("utf-8") ) - # Get the closest tag - closest_tag = ( - subprocess.check_output(["git", "describe", "--tags", "--abbrev=0"]) - .strip() - .decode("utf-8") - ) - # Get the commit count since the closest tag - process = subprocess.Popen( - ["git", "rev-list", "--count", f"{closest_tag}..HEAD"], - stdout=subprocess.PIPE, - ) - commits_count, _ = process.communicate() - commits_count = int(commits_count.strip()) - - if commits_count == 0: - return closest_tag - else: - return f"{commit_id}({closest_tag}~{commits_count})" + return version_info except Exception: return "unknown" diff --git a/docker/.env b/docker/.env index 19d2cf4dd3b..7a8eb62d339 100644 --- a/docker/.env +++ b/docker/.env @@ -81,7 +81,7 @@ SVR_HTTP_PORT=9380 # The RAGFlow Docker image to download. # Defaults to the dev-slim edition, which is the RAGFlow Docker image without embedding models. -RAGFLOW_IMAGE=infiniflow/ragflow:dev-slim +RAGFLOW_IMAGE=infiniflow/ragflow:dev # # To download the RAGFlow Docker image with embedding models, uncomment the following line instead: # RAGFLOW_IMAGE=infiniflow/ragflow:dev diff --git a/docs/guides/develop/build_docker_image.mdx b/docs/guides/develop/build_docker_image.mdx index 07bc7b07c49..3cfc1666fee 100644 --- a/docs/guides/develop/build_docker_image.mdx +++ b/docs/guides/develop/build_docker_image.mdx @@ -40,9 +40,7 @@ While we also test RAGFlow on ARM64 platforms, we do not plan to maintain RAGFlo ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py -docker build -f Dockerfile.slim -t infiniflow/ragflow:dev-slim . +docker build --build-arg LIGHTEN=1 -f Dockerfile -t infiniflow/ragflow:dev-slim . ``` @@ -58,8 +56,6 @@ While we also test RAGFlow on ARM64 platforms, we do not plan to maintain RAGFlo ```bash git clone https://github.com/infiniflow/ragflow.git cd ragflow/ -pip3 install huggingface-hub nltk -python3 download_deps.py docker build -f Dockerfile -t infiniflow/ragflow:dev . ``` diff --git a/download_deps.py b/download_deps.py index 36d83e60bba..c859007c957 100644 --- a/download_deps.py +++ b/download_deps.py @@ -1,4 +1,8 @@ #!/usr/bin/env python3 +# +# Install this script's dependencies with pip3: +# pip3 install huggingface-hub nltk + from huggingface_hub import snapshot_download import nltk diff --git a/ubuntu.sources b/ubuntu.sources deleted file mode 100644 index c4a9002d4b4..00000000000 --- a/ubuntu.sources +++ /dev/null @@ -1,39 +0,0 @@ -Types: deb -URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -Suites: noble noble-updates noble-backports -Components: main restricted universe multiverse -Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg - -# 默认注释了源码镜像以提高 apt update 速度,如有需要可自行取消注释 -# Types: deb-src -# URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -# Suites: noble noble-updates noble-backports -# Components: main restricted universe multiverse -# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg - -# 以下安全更新软件源包含了官方源与镜像站配置,如有需要可自行修改注释切换 -Types: deb -URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -Suites: noble-security -Components: main restricted universe multiverse -Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg - -# Types: deb-src -# URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -# Suites: noble-security -# Components: main restricted universe multiverse -# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg - -# 预发布软件源,不建议启用 - -# Types: deb -# URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -# Suites: noble-proposed -# Components: main restricted universe multiverse -# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg - -# # Types: deb-src -# # URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu -# # Suites: noble-proposed -# # Components: main restricted universe multiverse -# # Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg diff --git a/web/package-lock.json b/web/package-lock.json index f8380b43664..55f450db3da 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -28,6 +28,7 @@ "@radix-ui/react-switch": "^1.1.1", "@radix-ui/react-tabs": "^1.1.1", "@radix-ui/react-toast": "^1.2.2", + "@radix-ui/react-tooltip": "^1.1.4", "@tailwindcss/line-clamp": "^0.4.4", "@tanstack/react-query": "^5.40.0", "@tanstack/react-query-devtools": "^5.51.5", @@ -4891,6 +4892,39 @@ } } }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/@radix-ui/react-tooltip/-/react-tooltip-1.1.4.tgz", + "integrity": "sha512-QpObUH/ZlpaO4YgHSaYzrLO2VuO+ZBFFgGzjMUPwtiYnAzzNNDPJeEGRrT7qNOrWm/Jr08M1vlp+vTHtnSQ0Uw==", + "dependencies": { + "@radix-ui/primitive": "1.1.0", + "@radix-ui/react-compose-refs": "1.1.0", + "@radix-ui/react-context": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.1", + "@radix-ui/react-id": "1.1.0", + "@radix-ui/react-popper": "1.2.0", + "@radix-ui/react-portal": "1.1.2", + "@radix-ui/react-presence": "1.1.1", + "@radix-ui/react-primitive": "2.0.0", + "@radix-ui/react-slot": "1.1.0", + "@radix-ui/react-use-controllable-state": "1.1.0", + "@radix-ui/react-visually-hidden": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-use-callback-ref": { "version": "1.1.0", "resolved": "https://registry.npmmirror.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.0.tgz", diff --git a/web/package.json b/web/package.json index 63ecd6930ee..6bcea8f0235 100644 --- a/web/package.json +++ b/web/package.json @@ -39,6 +39,7 @@ "@radix-ui/react-switch": "^1.1.1", "@radix-ui/react-tabs": "^1.1.1", "@radix-ui/react-toast": "^1.2.2", + "@radix-ui/react-tooltip": "^1.1.4", "@tailwindcss/line-clamp": "^0.4.4", "@tanstack/react-query": "^5.40.0", "@tanstack/react-query-devtools": "^5.51.5", diff --git a/web/src/components/ui/tooltip.tsx b/web/src/components/ui/tooltip.tsx new file mode 100644 index 00000000000..25fe14c0528 --- /dev/null +++ b/web/src/components/ui/tooltip.tsx @@ -0,0 +1,30 @@ +'use client'; + +import * as TooltipPrimitive from '@radix-ui/react-tooltip'; +import * as React from 'react'; + +import { cn } from '@/lib/utils'; + +const TooltipProvider = TooltipPrimitive.Provider; + +const Tooltip = TooltipPrimitive.Root; + +const TooltipTrigger = TooltipPrimitive.Trigger; + +const TooltipContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, sideOffset = 4, ...props }, ref) => ( + +)); +TooltipContent.displayName = TooltipPrimitive.Content.displayName; + +export { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger }; diff --git a/web/src/constants/common.ts b/web/src/constants/common.ts index 3f9440788c0..803632b749f 100644 --- a/web/src/constants/common.ts +++ b/web/src/constants/common.ts @@ -66,27 +66,28 @@ export const LanguageTranslationMap = { Vietnamese: 'vi', }; -export const FileMimeTypeMap = { - bmp: 'image/bmp', - csv: 'text/csv', - odt: 'application/vnd.oasis.opendocument.text', - doc: 'application/msword', - docx: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - gif: 'image/gif', - htm: 'text/htm', - html: 'text/html', - jpg: 'image/jpg', - jpeg: 'image/jpeg', - pdf: 'application/pdf', - png: 'image/png', - ppt: 'application/vnd.ms-powerpoint', - pptx: 'application/vnd.openxmlformats-officedocument.presentationml.presentation', - tiff: 'image/tiff', - txt: 'text/plain', - xls: 'application/vnd.ms-excel', - xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', - mp4: 'video/mp4', -}; +export enum FileMimeType { + Bmp = 'image/bmp', + Csv = 'text/csv', + Odt = 'application/vnd.oasis.opendocument.text', + Doc = 'application/msword', + Docx = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + Gif = 'image/gif', + Htm = 'text/htm', + Html = 'text/html', + Jpg = 'image/jpg', + Jpeg = 'image/jpeg', + Pdf = 'application/pdf', + Png = 'image/png', + Ppt = 'application/vnd.ms-powerpoint', + Pptx = 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + Tiff = 'image/tiff', + Txt = 'text/plain', + Xls = 'application/vnd.ms-excel', + Xlsx = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + Mp4 = 'video/mp4', + Json = 'application/json', +} export const Domain = 'demo.ragflow.io'; diff --git a/web/src/locales/en.ts b/web/src/locales/en.ts index 4e44cf98efd..682882e86a5 100644 --- a/web/src/locales/en.ts +++ b/web/src/locales/en.ts @@ -160,7 +160,7 @@ export default { delimiterTip: 'Supports multiple characters as separators, and the multiple character separators are wrapped with `. For example, if it is configured like this: \n`##`; then the text will be separated by line breaks, two #s and a semicolon, and then assembled according to the size of the "token number".', html4excel: 'Excel to HTML', - html4excelTip: `When enabled, the spreadsheet will be parsed into HTML tables; otherwise, it will be parsed into key-value pairs by row.`, + html4excelTip: `When enabled, the spreadsheet will be parsed into HTML tables, and at most 256 rows for one table. Otherwise, it will be parsed into key-value pairs by row.`, autoKeywords: 'Auto-keyword', autoKeywordsTip: `Extract N keywords for each chunk to increase their ranking for queries containing those keywords. You can check or update the added keywords for a chunk from the chunk list. Be aware that extra tokens will be consumed by the LLM specified in 'System model settings'.`, autoQuestions: 'Auto-question', @@ -1077,6 +1077,8 @@ The above is the content you need to summarize.`, ccEmailTip: 'cc_email: CC email (Optional)', subjectTip: 'subject: Email subject (Optional)', contentTip: 'content: Email content (Optional)', + jsonUploadTypeErrorMessage: 'Please upload json file', + jsonUploadContentErrorMessage: 'json file error', }, footer: { profile: 'All rights reserved @ React', diff --git a/web/src/locales/zh-traditional.ts b/web/src/locales/zh-traditional.ts index a015f0e9afc..694135d916f 100644 --- a/web/src/locales/zh-traditional.ts +++ b/web/src/locales/zh-traditional.ts @@ -156,7 +156,7 @@ export default { delimiterTip: '支援多字元作為分隔符,多字元分隔符用`包裹。如配置成這樣:\n`##`;那麼就會用換行,兩個#以及分號先對文字進行分割,然後按照「 token number」大小進行拼裝。', html4excel: '表格轉HTML', - html4excelTip: `Excel 是否會被解析為 HTML 表格。如果為 FALSE,Excel 中的每一行都會形成一個區塊。`, + html4excelTip: `啟用後,電子表格將解析為 HTML 表格,一張表格最多 256 行。否則,會按行解析成鍵值對。`, autoKeywords: '自動關鍵字', autoKeywordsTip: `在查詢此類關鍵字時,為每個區塊提取 N 個關鍵字以提高其排名分數。在「系統模型設定」中設定的 LLM 將消耗額外的 token。您可以在區塊清單中查看結果。 `, autoQuestions: '自動問題', @@ -1011,6 +1011,8 @@ export default { testRun: '試運行', template: '模板轉換', templateDescription: '此元件用於排版各種元件的輸出。 ', + jsonUploadTypeErrorMessage: '請上傳json檔', + jsonUploadContentErrorMessage: 'json 檔案錯誤', }, footer: { profile: '“保留所有權利 @ react”', diff --git a/web/src/locales/zh.ts b/web/src/locales/zh.ts index 2b8f8a0e464..b9df9d07c8b 100644 --- a/web/src/locales/zh.ts +++ b/web/src/locales/zh.ts @@ -157,7 +157,7 @@ export default { delimiterTip: '支持多字符作为分隔符,多字符分隔符用`包裹。如配置成这样:\n`##`;那么就会用换行,两个#以及分号先对文本进行分割,然后按照“ token number”大小进行拼装。', html4excel: '表格转HTML', - html4excelTip: `Excel 是否将被解析为 HTML 表。如果为 FALSE,Excel 中的每一行都将形成一个块。`, + html4excelTip: `开启后电子表格会被解析为 HTML 表格,每张表格最多 256 行,否则会按行解析为键值对。`, autoKeywords: '自动关键词', autoKeywordsTip: `在查询此类关键词时,为每个块提取 N 个关键词以提高其排名得分。在“系统模型设置”中设置的 LLM 将消耗额外的 token。您可以在块列表中查看结果。`, autoQuestions: '自动问题', @@ -1055,6 +1055,8 @@ export default { ccEmailTip: 'cc_email: 抄送邮箱(可选)', subjectTip: 'subject: 邮件主题(可选)', contentTip: 'content: 邮件内容(可选)', + jsonUploadTypeErrorMessage: '请上传json文件', + jsonUploadContentErrorMessage: 'json 文件错误', }, footer: { profile: 'All rights reserved @ React', diff --git a/web/src/pages/chat/index.tsx b/web/src/pages/chat/index.tsx index 3a8b1ad45db..03b132ae522 100644 --- a/web/src/pages/chat/index.tsx +++ b/web/src/pages/chat/index.tsx @@ -334,17 +334,19 @@ const Chat = () => { {x.name} - {conversationActivated === x.id && x.id !== '' && ( -
- - - -
- )} + {conversationActivated === x.id && + x.id !== '' && + !x.is_new && ( +
+ + + +
+ )} ))} diff --git a/web/src/pages/flow/canvas/index.tsx b/web/src/pages/flow/canvas/index.tsx index d372f119862..4e42c0e80ed 100644 --- a/web/src/pages/flow/canvas/index.tsx +++ b/web/src/pages/flow/canvas/index.tsx @@ -1,8 +1,16 @@ +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from '@/components/ui/tooltip'; import { useSetModalState } from '@/hooks/common-hooks'; +import { FolderInput, FolderOutput } from 'lucide-react'; import { useCallback, useEffect } from 'react'; import ReactFlow, { Background, ConnectionMode, + ControlButton, Controls, NodeMouseHandler, } from 'reactflow'; @@ -13,12 +21,14 @@ import FormDrawer from '../flow-drawer'; import { useGetBeginNodeDataQuery, useHandleDrop, + useHandleExportOrImportJsonFile, useSelectCanvasData, useShowFormDrawer, useValidateConnection, useWatchNodeFormDataChange, } from '../hooks'; import { BeginQuery } from '../interface'; +import JsonUploadModal from '../json-upload-modal'; import RunDrawer from '../run-drawer'; import { ButtonEdge } from './edge'; import styles from './index.less'; @@ -115,6 +125,14 @@ function FlowCanvas({ drawerVisible, hideDrawer }: IProps) { const getBeginNodeDataQuery = useGetBeginNodeDataQuery(); + const { + handleExportJson, + handleImportJson, + fileUploadVisible, + onFileUploadOk, + hideFileUploadModal, + } = useHandleExportOrImportJsonFile(); + useEffect(() => { if (drawerVisible) { const query: BeginQuery[] = getBeginNodeDataQuery(); @@ -192,7 +210,28 @@ function FlowCanvas({ drawerVisible, hideDrawer }: IProps) { deleteKeyCode={['Delete', 'Backspace']} > - + + + + + + + + Import + + + + + + + + + + Export + + + + {formDrawerVisible && ( )} + {fileUploadVisible && ( + + )} ); } diff --git a/web/src/pages/flow/hooks.tsx b/web/src/pages/flow/hooks.tsx index 66e4efc948f..a8f7df5c2f1 100644 --- a/web/src/pages/flow/hooks.tsx +++ b/web/src/pages/flow/hooks.tsx @@ -12,14 +12,16 @@ import React, { import { Connection, Edge, Node, Position, ReactFlowInstance } from 'reactflow'; // import { shallow } from 'zustand/shallow'; import { variableEnabledFieldMap } from '@/constants/chat'; +import { FileMimeType } from '@/constants/common'; import { ModelVariableType, settledModelVariableMap, } from '@/constants/knowledge'; import { useFetchModelId } from '@/hooks/logic-hooks'; import { Variable } from '@/interfaces/database/chat'; +import { downloadJsonFile } from '@/utils/file-util'; import { useDebounceEffect } from 'ahooks'; -import { FormInstance, message } from 'antd'; +import { FormInstance, UploadFile, message } from 'antd'; import { DefaultOptionType } from 'antd/es/select'; import dayjs from 'dayjs'; import { humanId } from 'human-id'; @@ -261,30 +263,45 @@ export const useShowFormDrawer = () => { }; }; -export const useSaveGraph = () => { +export const useBuildDslData = () => { const { data } = useFetchFlow(); - const { setFlow, loading } = useSetFlow(); - const { id } = useParams(); const { nodes, edges } = useGraphStore((state) => state); - useEffect(() => {}, [nodes]); - const saveGraph = useCallback( - async (currentNodes?: Node[]) => { + + const buildDslData = useCallback( + (currentNodes?: Node[]) => { const dslComponents = buildDslComponentsByGraph( currentNodes ?? nodes, edges, data.dsl.components, ); + + return { + ...data.dsl, + graph: { nodes: currentNodes ?? nodes, edges }, + components: dslComponents, + }; + }, + [data.dsl, edges, nodes], + ); + + return { buildDslData }; +}; + +export const useSaveGraph = () => { + const { data } = useFetchFlow(); + const { setFlow, loading } = useSetFlow(); + const { id } = useParams(); + const { buildDslData } = useBuildDslData(); + + const saveGraph = useCallback( + async (currentNodes?: Node[]) => { return setFlow({ id, title: data.title, - dsl: { - ...data.dsl, - graph: { nodes: currentNodes ?? nodes, edges }, - components: dslComponents, - }, + dsl: buildDslData(currentNodes), }); }, - [nodes, edges, setFlow, id, data], + [setFlow, id, data.title, buildDslData], ); return { saveGraph, loading }; @@ -774,3 +791,54 @@ export const useWatchAgentChange = (chatDrawerVisible: boolean) => { return time; }; + +export const useHandleExportOrImportJsonFile = () => { + const { buildDslData } = useBuildDslData(); + const { + visible: fileUploadVisible, + hideModal: hideFileUploadModal, + showModal: showFileUploadModal, + } = useSetModalState(); + const setGraphInfo = useSetGraphInfo(); + const { data } = useFetchFlow(); + const { t } = useTranslation(); + + const onFileUploadOk = useCallback( + async (fileList: UploadFile[]) => { + if (fileList.length > 0) { + const file: File = fileList[0] as unknown as File; + if (file.type !== FileMimeType.Json) { + message.error(t('flow.jsonUploadTypeErrorMessage')); + return; + } + + const graphStr = await file.text(); + const errorMessage = t('flow.jsonUploadContentErrorMessage'); + try { + const graph = JSON.parse(graphStr); + if (graphStr && !isEmpty(graph) && Array.isArray(graph?.nodes)) { + setGraphInfo(graph ?? ({} as IGraph)); + hideFileUploadModal(); + } else { + message.error(errorMessage); + } + } catch (error) { + message.error(errorMessage); + } + } + }, + [hideFileUploadModal, setGraphInfo, t], + ); + + const handleExportJson = useCallback(() => { + downloadJsonFile(buildDslData().graph, `${data.title}.json`); + }, [buildDslData, data.title]); + + return { + fileUploadVisible, + handleExportJson, + handleImportJson: showFileUploadModal, + hideFileUploadModal, + onFileUploadOk, + }; +}; diff --git a/web/src/pages/flow/json-upload-modal/index.less b/web/src/pages/flow/json-upload-modal/index.less new file mode 100644 index 00000000000..8472339fed6 --- /dev/null +++ b/web/src/pages/flow/json-upload-modal/index.less @@ -0,0 +1,13 @@ +.uploader { + :global { + .ant-upload-list { + max-height: 40vh; + overflow-y: auto; + } + } +} + +.uploadLimit { + color: red; + font-size: 12px; +} diff --git a/web/src/pages/flow/json-upload-modal/index.tsx b/web/src/pages/flow/json-upload-modal/index.tsx new file mode 100644 index 00000000000..085ecf349e9 --- /dev/null +++ b/web/src/pages/flow/json-upload-modal/index.tsx @@ -0,0 +1,97 @@ +import { useTranslate } from '@/hooks/common-hooks'; +import { IModalProps } from '@/interfaces/common'; +import { InboxOutlined } from '@ant-design/icons'; +import { Modal, Upload, UploadFile, UploadProps } from 'antd'; +import { Dispatch, SetStateAction, useState } from 'react'; + +import { FileMimeType } from '@/constants/common'; + +import styles from './index.less'; + +const { Dragger } = Upload; + +const FileUpload = ({ + directory, + fileList, + setFileList, +}: { + directory: boolean; + fileList: UploadFile[]; + setFileList: Dispatch>; +}) => { + const { t } = useTranslate('fileManager'); + const props: UploadProps = { + multiple: false, + accept: FileMimeType.Json, + onRemove: (file) => { + const index = fileList.indexOf(file); + const newFileList = fileList.slice(); + newFileList.splice(index, 1); + setFileList(newFileList); + }, + beforeUpload: (file) => { + setFileList(() => { + return [file]; + }); + + return false; + }, + directory, + fileList, + }; + + return ( + +

+ +

+

{t('uploadTitle')}

+

{t('uploadDescription')}

+ {false &&

{t('uploadLimit')}

} +
+ ); +}; + +const JsonUploadModal = ({ + visible, + hideModal, + loading, + onOk: onFileUploadOk, +}: IModalProps) => { + const { t } = useTranslate('fileManager'); + const [fileList, setFileList] = useState([]); + const [directoryFileList, setDirectoryFileList] = useState([]); + + const clearFileList = () => { + setFileList([]); + setDirectoryFileList([]); + }; + + const onOk = async () => { + const ret = await onFileUploadOk?.([...fileList, ...directoryFileList]); + return ret; + }; + + const afterClose = () => { + clearFileList(); + }; + + return ( + + + + ); +}; + +export default JsonUploadModal; diff --git a/web/src/utils/file-util.ts b/web/src/utils/file-util.ts index 9645b560a35..cde7f0e6b9e 100644 --- a/web/src/utils/file-util.ts +++ b/web/src/utils/file-util.ts @@ -1,3 +1,4 @@ +import { FileMimeType } from '@/constants/common'; import fileManagerService from '@/services/file-manager-service'; import { UploadFile } from 'antd'; @@ -137,3 +138,11 @@ export const formatBytes = (x: string | number) => { return n.toFixed(n < 10 && l > 0 ? 1 : 0) + ' ' + Units[l]; }; + +export const downloadJsonFile = async ( + data: Record, + fileName: string, +) => { + const blob = new Blob([JSON.stringify(data)], { type: FileMimeType.Json }); + downloadFileFromBlob(blob, fileName); +};