Skip to content

Commit

Permalink
Merge branch 'langgenius:main' into lindorm-vdb
Browse files Browse the repository at this point in the history
  • Loading branch information
AlwaysBluer authored Dec 18, 2024
2 parents 179dfe9 + 558ab25 commit 7f3b8e0
Show file tree
Hide file tree
Showing 246 changed files with 4,736 additions and 1,889 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/expose_service_ports.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,6 @@ yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compos
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/docker-compose.yaml

echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
3 changes: 2 additions & 1 deletion .github/workflows/vdb-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh

- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
- name: Set up Vector Stores (TiDB, Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
Expand All @@ -67,6 +67,7 @@ jobs:
pgvector
chroma
elasticsearch
tidb
- name: Test Vector Stores
run: poetry run -C api bash dev/pytest/pytest_vdb.sh
20 changes: 8 additions & 12 deletions api/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,8 @@ DB_DATABASE=dify
STORAGE_TYPE=opendal

# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
STORAGE_OPENDAL_SCHEME=fs
# OpenDAL FS
OPENDAL_SCHEME=fs
OPENDAL_FS_ROOT=storage
# OpenDAL S3
OPENDAL_S3_ROOT=/
OPENDAL_S3_BUCKET=your-bucket-name
OPENDAL_S3_ENDPOINT=https://s3.amazonaws.com
OPENDAL_S3_ACCESS_KEY_ID=your-access-key
OPENDAL_S3_SECRET_ACCESS_KEY=your-secret-key
OPENDAL_S3_REGION=your-region
OPENDAL_S3_SERVER_SIDE_ENCRYPTION=

# S3 Storage configuration
S3_USE_AWS_MANAGED_IAM=false
Expand Down Expand Up @@ -313,8 +304,7 @@ UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50

# Model configuration
MULTIMODAL_SEND_IMAGE_FORMAT=base64
MULTIMODAL_SEND_VIDEO_FORMAT=base64
MULTIMODAL_SEND_FORMAT=base64
PROMPT_GENERATION_MAX_TOKENS=512
CODE_GENERATION_MAX_TOKENS=1024

Expand Down Expand Up @@ -399,6 +389,8 @@ LOG_FILE_BACKUP_COUNT=5
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
# Log Timezone
LOG_TZ=UTC
# Log format
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s

# Indexing configuration
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
Expand Down Expand Up @@ -431,3 +423,7 @@ RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5

CREATE_TIDB_SERVICE_JOB_ENABLED=false

# Maximum number of submitted thread count in a ThreadPool for parallel node execution
MAX_SUBMIT_COUNT=100
# Lockout duration in seconds
LOGIN_LOCKOUT_DURATION=86400
27 changes: 22 additions & 5 deletions api/app.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,30 @@
from app_factory import create_app
from libs import threadings_utils, version_utils
from libs import version_utils

# preparation before creating app
version_utils.check_supported_python_version()
threadings_utils.apply_gevent_threading_patch()


def is_db_command():
import sys

if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
return True
return False


# create app
app = create_app()
celery = app.extensions["celery"]
if is_db_command():
from app_factory import create_migrations_app

app = create_migrations_app()
else:
from app_factory import create_app
from libs import threadings_utils

threadings_utils.apply_gevent_threading_patch()

app = create_app()
celery = app.extensions["celery"]

if __name__ == "__main__":
app.run(host="0.0.0.0", port=5001)
21 changes: 11 additions & 10 deletions api/app_factory.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import logging
import os
import time

from configs import dify_config
Expand All @@ -17,15 +16,6 @@ def create_flask_app_with_configs() -> DifyApp:
dify_app = DifyApp(__name__)
dify_app.config.from_mapping(dify_config.model_dump())

# populate configs into system environment variables
for key, value in dify_app.config.items():
if isinstance(value, str):
os.environ[key] = value
elif isinstance(value, int | float | bool):
os.environ[key] = str(value)
elif value is None:
os.environ[key] = ""

return dify_app


Expand Down Expand Up @@ -98,3 +88,14 @@ def initialize_extensions(app: DifyApp):
end_time = time.perf_counter()
if dify_config.DEBUG:
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")


def create_migrations_app():
app = create_flask_app_with_configs()
from extensions import ext_database, ext_migrate

# Initialize only required extensions
ext_database.init_app(app)
ext_migrate.init_app(app)

return app
30 changes: 21 additions & 9 deletions api/configs/feature/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,17 @@ class WorkflowConfig(BaseSettings):
)


class WorkflowNodeExecutionConfig(BaseSettings):
"""
Configuration for workflow node execution
"""

MAX_SUBMIT_COUNT: PositiveInt = Field(
description="Maximum number of submitted thread count in a ThreadPool for parallel node execution",
default=100,
)


class AuthConfig(BaseSettings):
"""
Configuration for authentication and OAuth
Expand Down Expand Up @@ -474,6 +485,11 @@ class AuthConfig(BaseSettings):
default=60,
)

LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
default=86400,
)


class ModerationConfig(BaseSettings):
"""
Expand Down Expand Up @@ -649,14 +665,9 @@ class IndexingConfig(BaseSettings):
)


class VisionFormatConfig(BaseSettings):
MULTIMODAL_SEND_IMAGE_FORMAT: Literal["base64", "url"] = Field(
description="Format for sending images in multimodal contexts ('base64' or 'url'), default is base64",
default="base64",
)

MULTIMODAL_SEND_VIDEO_FORMAT: Literal["base64", "url"] = Field(
description="Format for sending videos in multimodal contexts ('base64' or 'url'), default is base64",
class MultiModalTransferConfig(BaseSettings):
MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
description="Format for sending files in multimodal contexts ('base64' or 'url'), default is base64",
default="base64",
)

Expand Down Expand Up @@ -762,19 +773,20 @@ class FeatureConfig(
FileAccessConfig,
FileUploadConfig,
HttpConfig,
VisionFormatConfig,
InnerAPIConfig,
IndexingConfig,
LoggingConfig,
MailConfig,
ModelLoadBalanceConfig,
ModerationConfig,
MultiModalTransferConfig,
PositionConfig,
RagEtlConfig,
SecurityConfig,
ToolConfig,
UpdateConfig,
WorkflowConfig,
WorkflowNodeExecutionConfig,
WorkspaceConfig,
LoginConfig,
# hosted services config
Expand Down
46 changes: 2 additions & 44 deletions api/configs/middleware/storage/opendal_storage_config.py
Original file line number Diff line number Diff line change
@@ -1,51 +1,9 @@
from enum import StrEnum
from typing import Literal

from pydantic import Field
from pydantic_settings import BaseSettings


class OpenDALScheme(StrEnum):
FS = "fs"
S3 = "s3"


class OpenDALStorageConfig(BaseSettings):
STORAGE_OPENDAL_SCHEME: str = Field(
default=OpenDALScheme.FS.value,
OPENDAL_SCHEME: str = Field(
default="fs",
description="OpenDAL scheme.",
)
# FS
OPENDAL_FS_ROOT: str = Field(
default="storage",
description="Root path for local storage.",
)
# S3
OPENDAL_S3_ROOT: str = Field(
default="/",
description="Root path for S3 storage.",
)
OPENDAL_S3_BUCKET: str = Field(
default="",
description="S3 bucket name.",
)
OPENDAL_S3_ENDPOINT: str = Field(
default="https://s3.amazonaws.com",
description="S3 endpoint URL.",
)
OPENDAL_S3_ACCESS_KEY_ID: str = Field(
default="",
description="S3 access key ID.",
)
OPENDAL_S3_SECRET_ACCESS_KEY: str = Field(
default="",
description="S3 secret access key.",
)
OPENDAL_S3_REGION: str = Field(
default="",
description="S3 region.",
)
OPENDAL_S3_SERVER_SIDE_ENCRYPTION: Literal["aws:kms", ""] = Field(
default="",
description="S3 server-side encryption.",
)
2 changes: 1 addition & 1 deletion api/configs/packaging/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):

CURRENT_VERSION: str = Field(
description="Dify version",
default="0.13.2",
default="0.14.0",
)

COMMIT_SHA: str = Field(
Expand Down
4 changes: 2 additions & 2 deletions api/controllers/console/app/model_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def post(self, app_model):
provider_type=agent_tool_entity.provider_type,
identity_id=f"AGENT.{app_model.id}",
)
except Exception as e:
except Exception:
continue

# get decrypted parameters
Expand Down Expand Up @@ -97,7 +97,7 @@ def post(self, app_model):
app_id=app_model.id,
agent_tool=agent_tool_entity,
)
except Exception as e:
except Exception:
continue

manager = ToolParameterConfigurationManager(
Expand Down
9 changes: 5 additions & 4 deletions api/controllers/console/app/ops_trace.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from flask_restful import Resource, reqparse
from werkzeug.exceptions import BadRequest

from controllers.console import api
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
Expand Down Expand Up @@ -26,7 +27,7 @@ def get(self, app_id):
return {"has_not_configured": True}
return trace_config
except Exception as e:
raise e
raise BadRequest(str(e))

@setup_required
@login_required
Expand All @@ -48,7 +49,7 @@ def post(self, app_id):
raise TracingConfigCheckError()
return result
except Exception as e:
raise e
raise BadRequest(str(e))

@setup_required
@login_required
Expand All @@ -68,7 +69,7 @@ def patch(self, app_id):
raise TracingConfigNotExist()
return {"result": "success"}
except Exception as e:
raise e
raise BadRequest(str(e))

@setup_required
@login_required
Expand All @@ -85,7 +86,7 @@ def delete(self, app_id):
raise TracingConfigNotExist()
return {"result": "success"}
except Exception as e:
raise e
raise BadRequest(str(e))


api.add_resource(TraceAppConfigApi, "/apps/<uuid:app_id>/trace-config")
2 changes: 1 addition & 1 deletion api/controllers/console/datasets/datasets_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -948,7 +948,7 @@ def post(self, dataset_id):
if document.indexing_status == "completed":
raise DocumentAlreadyFinishedError()
retry_documents.append(document)
except Exception as e:
except Exception:
logging.exception(f"Failed to retry document, document id: {document_id}")
continue
# retry document
Expand Down
2 changes: 2 additions & 0 deletions api/controllers/console/explore/recommended_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from constants.languages import languages
from controllers.console import api
from controllers.console.wraps import account_initialization_required
from libs.helper import AppIconUrlField
from libs.login import login_required
from services.recommended_app_service import RecommendedAppService

Expand All @@ -12,6 +13,7 @@
"name": fields.String,
"mode": fields.String,
"icon": fields.String,
"icon_url": AppIconUrlField,
"icon_background": fields.String,
}

Expand Down
4 changes: 4 additions & 0 deletions api/controllers/console/files.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from flask import request
from flask_login import current_user
from flask_restful import Resource, marshal_with
from werkzeug.exceptions import Forbidden

import services
from configs import dify_config
Expand Down Expand Up @@ -58,6 +59,9 @@ def post(self):
if not file.filename:
raise FilenameNotExistsError

if source == "datasets" and not current_user.is_dataset_editor:
raise Forbidden()

if source not in ("datasets", None):
source = None

Expand Down
Loading

0 comments on commit 7f3b8e0

Please sign in to comment.