diff --git a/.github/workflows/build_release_and_notify.yml b/.github/workflows/build_release_and_notify.yml
index 9bc43ac..7870623 100644
--- a/.github/workflows/build_release_and_notify.yml
+++ b/.github/workflows/build_release_and_notify.yml
@@ -1,16 +1,17 @@
# .github/workflows/build_release_and_notify.yml
-
name: Build and Deploy Executables
on:
release:
types: [published] # Release가 'published' 상태가 될 때 트리거
+ workflow_dispatch:
jobs:
# ==================================
# 파이프라인 시작 알림
# ==================================
start:
+ if: github.event_name == 'release'
runs-on: ubuntu-latest
steps:
- name: Send Pipeline Start Notification
@@ -28,7 +29,6 @@ jobs:
# 실행 파일 빌드
# ==================================
build:
- needs: start
strategy:
matrix:
os: [macos-latest, windows-latest]
@@ -75,6 +75,8 @@ jobs:
# 6. PyInstaller를 사용해 파이썬 코드를 실행 파일로 만듭니다.
- name: Build executable with PyInstaller
shell: bash
+ env:
+ ENV_AES256_KEY: ${{ secrets.ENV_AES256_KEY }}
run: poetry run pyinstaller --clean --additional-hooks-dir ./hooks --add-data "app/assets:assets" --onefile --name ${{ env.EXE_NAME }} app/main.py
# 7. 빌드된 실행 파일을 다음 단계(deploy)에서 사용할 수 있도록 아티팩트로 업로드합니다.
@@ -90,6 +92,7 @@ jobs:
# ==================================
deploy:
needs: build
+ if: github.event_name == 'release'
runs-on: ubuntu-latest
steps:
@@ -119,7 +122,7 @@ jobs:
if git diff-index --quiet HEAD; then
echo "No changes to commit to QGenie APP repository."
else
- git commit -m "feat: API 실행 파일 업데이트 (${{ github.ref_name }})"
+ git commit -m "feat: Update API executable (${{ github.ref_name }})"
git push
fi
@@ -129,7 +132,7 @@ jobs:
finish:
needs: deploy
runs-on: ubuntu-latest
- if: always()
+ if: always() && github.event_name == 'release'
steps:
- name: Send Success Notification
diff --git a/.github/workflows/pr_bot.yml b/.github/workflows/pr_bot.yml
index 22daee4..f9f5838 100644
--- a/.github/workflows/pr_bot.yml
+++ b/.github/workflows/pr_bot.yml
@@ -7,6 +7,10 @@ on:
types: [opened, closed, reopened, synchronize]
issue_comment:
types: [created]
+ pull_request_review:
+ types: [submitted]
+ pull_request_review_comment:
+ types: [created]
jobs:
notify:
@@ -23,7 +27,7 @@ jobs:
"username": "GitHub PR 봇",
"embeds": [{
"title": "Pull Request #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}",
- "description": "**${{ github.actor }}**님이 Pull Request를 생성하거나 업데이트했습니다.",
+ "description": "**${{ github.actor }}**님이 Pull Request를 생성 또는 업데이트했습니다.",
"url": "${{ github.event.pull_request.html_url }}",
"color": 2243312
}]
@@ -84,3 +88,41 @@ jobs:
}]
}' \
${{ secrets.DISCORD_WEBHOOK_URL }}
+
+ # ----------------------------
+ # 리뷰 제출(Submit Review) 알림
+ # ----------------------------
+ - name: Send Review Submitted Notification
+ if: github.event_name == 'pull_request_review' && github.event.action == 'submitted'
+ run: |
+ REVIEW_BODY=$(echo "${{ github.event.review.body }}" | sed 's/"/\\"/g' | sed ':a;N;$!ba;s/\n/\\n/g')
+ curl -X POST -H "Content-Type: application/json" \
+ -d "{
+ \"username\": \"GitHub 리뷰 봇\",
+ \"embeds\": [{
+ \"title\": \"New Review on PR #${{ github.event.pull_request.number }}\",
+ \"description\": \"**${{ github.actor }}**님이 리뷰를 남겼습니다: \\n${REVIEW_BODY}\",
+ \"url\": \"${{ github.event.review.html_url }}\",
+ \"color\": 16776960
+ }]
+ }" \
+ ${{ secrets.DISCORD_WEBHOOK_URL }}
+
+ # -------------------------
+ # 리뷰 댓글에 대한 답글 알림
+ # -------------------------
+ - name: Send Review Comment Notification
+ if: github.event_name == 'pull_request_review_comment' && github.event.action == 'created'
+ run: |
+ COMMENT_BODY=$(echo "${{ github.event.comment.body }}" | sed 's/"/\\"/g' | sed ':a;N;$!ba;s/\n/\\n/g')
+ curl -X POST -H "Content-Type: application/json" \
+ -d "{
+ \"username\": \"GitHub 댓글 봇\",
+ \"embeds\": [{
+ \"title\": \"New Reply on PR #${{ github.event.pull_request.number }}\",
+ \"description\": \"**${{ github.actor }}**님의 새 답글: \\n${COMMENT_BODY}\",
+ \"url\": \"${{ github.event.comment.html_url }}\",
+ \"color\": 15105570
+ }]
+ }" \
+ ${{ secrets.DISCORD_WEBHOOK_URL }}
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5977fee..99ba938 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,7 +2,7 @@ repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.12.2" # ruff 버전에 맞게 수정
hooks:
- - id: ruff
+ - id: ruff-check
args: ["--fix"] # 자동 수정 적용
- repo: https://github.com/psf/black
diff --git a/README.md b/README.md
index 2f093e9..2295f16 100644
--- a/README.md
+++ b/README.md
@@ -32,7 +32,7 @@
```bash
git clone https://github.com/Queryus/QGenie_api.git
- cd app # 복제된 저장소 디렉토리로 이동
+ cd QGenie_api
```
@@ -88,13 +88,13 @@
```bash
poetry shell
- uvicorn main:app --reload
+ uvicorn app.main:app --host 0.0.0.0 --port 39722 --reload
```
또는 Poetry Run을 사용하여 직접 실행할 수 있습니다.
```bash
- poetry run uvicorn main:app --reload
+ poetry run uvicorn main:app --host 0.0.0.0 --port 39722 --reload
```
### **코드 컨벤션 (PEP 8, Ruff, Black)**
@@ -141,9 +141,9 @@
1. **브라우저 확인**
- - 기본 루트 엔드포인트:
- - 헬스 체크 엔드포인트:
- - API 문서:
+ - 기본 루트 엔드포인트:
+ - 헬스 체크 엔드포인트:
+ - API 문서:
2. **CLI로 접속 확인하기**
@@ -151,14 +151,13 @@
- 기본 루트 엔드포인트:
```bash
- curl http://localhost:8000/
+ curl http://localhost:39722/
```
- 헬스 체크 엔드포인트:
```bash
- curl http://localhost:8000/health
+ curl http://localhost:39722/health
```
- API 문서:
```bash
- curl http://localhost:8000/openapi.json
+ curl http://localhost:39722/openapi.json
```
-
diff --git a/app/api/annotation_api.py b/app/api/annotation_api.py
new file mode 100644
index 0000000..f974189
--- /dev/null
+++ b/app/api/annotation_api.py
@@ -0,0 +1,75 @@
+from fastapi import APIRouter, Depends
+
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.annotation.request_model import AnnotationCreateRequest
+from app.schemas.annotation.response_model import AnnotationDeleteResponse, FullAnnotationResponse
+from app.services.annotation_service import AnnotationService, annotation_service
+
+annotation_service_dependency = Depends(lambda: annotation_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/create",
+ response_model=ResponseMessage[FullAnnotationResponse],
+ summary="새로운 어노테이션 생성",
+)
+async def create_annotation(
+ request: AnnotationCreateRequest,
+ service: AnnotationService = annotation_service_dependency,
+) -> ResponseMessage[FullAnnotationResponse]:
+ """
+ `db_profile_id`를 받아 AI를 통해 DB 스키마를 분석하고 어노테이션을 생성하여 반환합니다.
+ """
+ new_annotation = await service.create_annotation(request)
+ return ResponseMessage.success(value=new_annotation, code=CommonCode.SUCCESS_CREATE_ANNOTATION)
+
+
+@router.get(
+ "/find/{annotation_id}",
+ response_model=ResponseMessage[FullAnnotationResponse],
+ summary="특정 어노테이션 상세 정보 조회",
+)
+def get_annotation(
+ annotation_id: str,
+ service: AnnotationService = annotation_service_dependency,
+) -> ResponseMessage[FullAnnotationResponse]:
+ """
+ `annotation_id`에 해당하는 어노테이션의 전체 상세 정보를 조회합니다.
+ """
+ annotation = service.get_full_annotation(annotation_id)
+ return ResponseMessage.success(value=annotation, code=CommonCode.SUCCESS_FIND_ANNOTATION)
+
+
+@router.get(
+ "/find/db/{db_profile_id}",
+ response_model=ResponseMessage[FullAnnotationResponse],
+ summary="DB 프로필 ID로 어노테이션 조회",
+)
+def get_annotation_by_db_profile_id(
+ db_profile_id: str,
+ service: AnnotationService = annotation_service_dependency,
+) -> ResponseMessage[FullAnnotationResponse]:
+ """
+ `db_profile_id`에 연결된 어노테이션의 전체 상세 정보를 조회합니다.
+ """
+ annotation = service.get_annotation_by_db_profile_id(db_profile_id)
+ return ResponseMessage.success(value=annotation, code=CommonCode.SUCCESS_FIND_ANNOTATION)
+
+
+@router.delete(
+ "/remove/{annotation_id}",
+ response_model=ResponseMessage[AnnotationDeleteResponse],
+ summary="특정 어노테이션 삭제",
+)
+def delete_annotation(
+ annotation_id: str,
+ service: AnnotationService = annotation_service_dependency,
+) -> ResponseMessage[AnnotationDeleteResponse]:
+ """
+ `annotation_id`에 해당하는 어노테이션 및 하위 데이터를 모두 삭제합니다.
+ """
+ result = service.delete_annotation(annotation_id)
+ return ResponseMessage.success(value=result, code=CommonCode.SUCCESS_DELETE_ANNOTATION)
diff --git a/app/api/api_key_api.py b/app/api/api_key_api.py
new file mode 100644
index 0000000..0617237
--- /dev/null
+++ b/app/api/api_key_api.py
@@ -0,0 +1,144 @@
+from fastapi import APIRouter, Depends
+
+from app.core.enum.llm_service_info import LLMServiceEnum
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.api_key.create_model import APIKeyCreate
+from app.schemas.api_key.decrypted_response_model import DecryptedAPIKeyResponse
+from app.schemas.api_key.response_model import APIKeyResponse
+from app.schemas.api_key.update_model import APIKeyUpdate
+from app.services.api_key_service import APIKeyService, api_key_service
+
+api_key_service_dependency = Depends(lambda: api_key_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/create",
+ response_model=ResponseMessage[APIKeyResponse],
+ summary="API KEY 저장 (처음 한 번)",
+ description="외부 AI 서비스의 API Key를 암호화하여 로컬 데이터베이스에 저장합니다.",
+)
+def store_api_key(
+ api_key_data: APIKeyCreate, service: APIKeyService = api_key_service_dependency
+) -> ResponseMessage[APIKeyResponse]:
+ """
+ - **service_name**: API Key가 사용될 외부 서비스 이름 (예: "OpenAI")
+ - **api_key**: 암호화하여 저장할 실제 API Key (예: "sk-***..")
+ """
+ created_api_key = service.store_api_key(api_key_data)
+
+ response_data = APIKeyResponse(
+ id=created_api_key.id,
+ service_name=created_api_key.service_name.value,
+ created_at=created_api_key.created_at,
+ updated_at=created_api_key.updated_at,
+ )
+
+ return ResponseMessage.success(value=response_data, code=CommonCode.CREATED)
+
+
+@router.get(
+ "/result",
+ response_model=ResponseMessage[list[APIKeyResponse]],
+ summary="저장된 모든 API KEY 정보 조회",
+ description="""
+ ai_credential 테이블에 저장된 모든 서비스 이름을 확인합니다.
+ 이를 통해 프론트엔드에서는 비워둘 필드, 임의의 마스킹된 값을 채워둘 필드를 구분합니다.
+ """,
+)
+def get_all_api_keys(
+ service: APIKeyService = api_key_service_dependency,
+) -> ResponseMessage[list[APIKeyResponse]]:
+ """저장된 모든 API Key의 메타데이터를 조회하여 등록 여부를 확인합니다."""
+ api_keys_in_db = service.get_all_api_keys()
+
+ response_data = [
+ APIKeyResponse(
+ id=api_key.id,
+ service_name=api_key.service_name,
+ created_at=api_key.created_at,
+ updated_at=api_key.updated_at,
+ )
+ for api_key in api_keys_in_db
+ ]
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_GET_API_KEY)
+
+
+@router.get(
+ "/result/{serviceName}",
+ response_model=ResponseMessage[APIKeyResponse],
+ summary="특정 서비스의 API KEY 정보 조회",
+)
+def get_api_key_by_service_name(
+ serviceName: LLMServiceEnum, service: APIKeyService = api_key_service_dependency
+) -> ResponseMessage[APIKeyResponse]:
+ """서비스 이름을 기준으로 특정 API Key의 메타데이터를 조회합니다."""
+ api_key_in_db = service.get_api_key_by_service_name(serviceName)
+
+ response_data = APIKeyResponse(
+ id=api_key_in_db.id,
+ service_name=api_key_in_db.service_name,
+ created_at=api_key_in_db.created_at,
+ updated_at=api_key_in_db.updated_at,
+ )
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_GET_API_KEY)
+
+
+@router.get(
+ "/find/decrypted/{serviceName}",
+ response_model=ResponseMessage[DecryptedAPIKeyResponse],
+ summary="[내부용] 복호화된 API KEY 조회",
+ description="내부 AI 서버와 같이, 신뢰된 서비스가 복호화된 API 키를 요청할 때 사용합니다. (외부 노출 금지)",
+ # include_in_schema=False, # Swagger 문서에 포함하지 않음
+)
+def get_decrypted_api_key(
+ serviceName: LLMServiceEnum, service: APIKeyService = api_key_service_dependency
+) -> ResponseMessage[DecryptedAPIKeyResponse]:
+ """서비스 이름을 기준으로 API Key를 복호화하여 반환합니다."""
+ decrypted_key = service.get_decrypted_api_key(serviceName.value)
+ return ResponseMessage.success(
+ value=DecryptedAPIKeyResponse(api_key=decrypted_key), code=CommonCode.SUCCESS_GET_API_KEY
+ )
+
+
+@router.put(
+ "/modify/{serviceName}",
+ response_model=ResponseMessage[APIKeyResponse],
+ summary="특정 서비스의 API KEY 수정",
+)
+def update_api_key(
+ serviceName: LLMServiceEnum,
+ key_data: APIKeyUpdate,
+ service: APIKeyService = api_key_service_dependency,
+) -> ResponseMessage[APIKeyResponse]:
+ """
+ 서비스 이름을 기준으로 특정 API Key를 새로운 값으로 수정합니다.
+ - **service_name**: 수정할 서비스의 이름
+ - **api_key**: 새로운 API Key
+ """
+ updated_api_key = service.update_api_key(serviceName.value, key_data)
+
+ response_data = APIKeyResponse(
+ id=updated_api_key.id,
+ service_name=updated_api_key.service_name,
+ created_at=updated_api_key.created_at,
+ updated_at=updated_api_key.updated_at,
+ )
+
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_UPDATE_API_KEY)
+
+
+@router.delete(
+ "/remove/{serviceName}",
+ response_model=ResponseMessage,
+ summary="특정 서비스의 API KEY 삭제",
+)
+def delete_api_key(serviceName: LLMServiceEnum, service: APIKeyService = api_key_service_dependency) -> ResponseMessage:
+ """
+ 서비스 이름을 기준으로 특정 API Key를 삭제합니다.
+ - **service_name**: 삭제할 서비스의 이름
+ """
+ service.delete_api_key(serviceName.value)
+ return ResponseMessage.success(code=CommonCode.SUCCESS_DELETE_API_KEY)
diff --git a/app/api/api_router.py b/app/api/api_router.py
new file mode 100644
index 0000000..45e22f4
--- /dev/null
+++ b/app/api/api_router.py
@@ -0,0 +1,28 @@
+# app/api/api_router.py
+
+from fastapi import APIRouter
+
+from app.api import (
+ annotation_api,
+ api_key_api,
+ chat_tab_api,
+ driver_api,
+ query_api,
+ test_api,
+ user_db_api,
+ chat_messages_api,
+)
+
+api_router = APIRouter()
+
+# 테스트 라우터
+api_router.include_router(test_api.router, prefix="/test", tags=["Test"])
+
+# 라우터
+api_router.include_router(driver_api.router, prefix="/driver", tags=["Driver"])
+api_router.include_router(user_db_api.router, prefix="/user/db", tags=["UserDb"])
+api_router.include_router(api_key_api.router, prefix="/keys", tags=["API Key"])
+api_router.include_router(chat_tab_api.router, prefix="/chatTabs", tags=["Chat Tab"])
+api_router.include_router(annotation_api.router, prefix="/annotations", tags=["Annotation"])
+api_router.include_router(query_api.router, prefix="/query", tags=["query"])
+api_router.include_router(chat_messages_api.router, prefix="/chatMessages", tags=["Chat Message"])
diff --git a/app/api/chat_messages_api.py b/app/api/chat_messages_api.py
new file mode 100644
index 0000000..d63ca92
--- /dev/null
+++ b/app/api/chat_messages_api.py
@@ -0,0 +1,39 @@
+from fastapi import APIRouter, Depends
+
+from app.core.enum.sender import SenderEnum
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.chat_message.request_model import ChatMessagesReqeust
+from app.schemas.chat_message.response_model import ChatMessagesResponse
+from app.services.chat_message_service import ChatMessageService, chat_message_service
+
+chat_message_service_dependency = Depends(lambda: chat_message_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/create",
+ response_model=ResponseMessage[ChatMessagesResponse],
+ summary="새로운 사용자 질의 생성",
+)
+async def create_chat_message(
+ request: ChatMessagesReqeust, service: ChatMessageService = chat_message_service_dependency
+) -> ResponseMessage[ChatMessagesResponse]:
+ """
+ `tabId`, `message`를 받아 DB에 저장하고 AI를 통해 사용자 질의를 분석하고 답변을 생성하여 반환합니다.
+ """
+ new_messages = await service.create_chat_message(request)
+
+ print(ChatMessagesResponse.model_json_schema())
+
+ response_data = ChatMessagesResponse(
+ id=new_messages.id,
+ chat_tab_id=new_messages.chat_tab_id,
+ sender=SenderEnum(new_messages.sender),
+ message=new_messages.message,
+ created_at=new_messages.created_at,
+ updated_at=new_messages.updated_at,
+ )
+
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_CREATE_CHAT_MESSAGES)
diff --git a/app/api/chat_tab_api.py b/app/api/chat_tab_api.py
new file mode 100644
index 0000000..de08f37
--- /dev/null
+++ b/app/api/chat_tab_api.py
@@ -0,0 +1,129 @@
+from fastapi import APIRouter, Depends, Path
+
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.chat_tab.base_model import ChatTabBase
+from app.schemas.chat_tab.response_model import ChatMessagesResponse, ChatTabResponse
+from app.schemas.chat_tab.update_model import ChatTabUpdate
+from app.services.chat_tab_service import ChatTabService, chat_tab_service
+
+chat_tab_service_dependency = Depends(lambda: chat_tab_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/create",
+ response_model=ResponseMessage[ChatTabResponse],
+ summary="새로운 Chat Tab 생성",
+ description="새로운 Chat Tab을 생성하여 로컬 데이터베이스에 저장합니다.",
+)
+def create_chat_tab(
+ chatName: ChatTabBase, service: ChatTabService = chat_tab_service_dependency
+) -> ResponseMessage[ChatTabResponse]:
+ """
+ - **name**: 새로운 Chat_tab 이름 (예: "채팅 타이틀")
+ """
+ created_chat_tab = service.create_chat_tab(chatName)
+
+ response_data = ChatTabResponse(
+ id=created_chat_tab.id,
+ name=created_chat_tab.name,
+ created_at=created_chat_tab.created_at,
+ updated_at=created_chat_tab.updated_at,
+ )
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_CHAT_TAB_CREATE)
+
+
+@router.get(
+ "/find",
+ response_model=ResponseMessage[list[ChatTabResponse]],
+ summary="저장된 모든 Chat_tab 정보 조회",
+ description="""
+ chat_tab 테이블에 저장된 모든 chat tab들을 확인합니다.
+ """,
+)
+def get_all_chat_tab(
+ service: ChatTabService = chat_tab_service_dependency,
+) -> ResponseMessage[list[ChatTabResponse]]:
+ """저장된 모든 chat_tab의 메타데이터를 조회하여 등록 여부를 확인합니다."""
+ chat_tabs_in_db = service.get_all_chat_tab()
+
+ response_data = [
+ ChatTabResponse(
+ id=chat_tab.id,
+ name=chat_tab.name,
+ created_at=chat_tab.created_at,
+ updated_at=chat_tab.updated_at,
+ )
+ for chat_tab in chat_tabs_in_db
+ ]
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_GET_CHAT_TAB)
+
+
+@router.get(
+ "/find/{tabId}",
+ response_model=ResponseMessage[ChatMessagesResponse],
+ summary="특정 탭의 메시지 전체 조회",
+)
+def get_chat_messages_by_tabId(
+ tabId: str = Path(..., description="채팅 탭 고유 ID"), service: ChatTabService = chat_tab_service_dependency
+) -> ResponseMessage[list[ChatMessagesResponse]]:
+ """tabId를 기준으로 해당 chat_tab의 전체 메시지를 가져옵니다."""
+ chat_tab = service.get_chat_tab_by_tabId(tabId)
+
+ chat_messages = service.get_chat_messages_by_tabId(tabId)
+
+ response_data = ChatMessagesResponse(
+ id=chat_tab.id,
+ name=chat_tab.name,
+ created_at=chat_tab.created_at,
+ updated_at=chat_tab.updated_at,
+ messages=chat_messages,
+ )
+
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_GET_CHAT_MESSAGES)
+
+
+@router.put(
+ "/modify/{tabId}",
+ response_model=ResponseMessage[ChatTabResponse],
+ summary="특정 Chat Tab Name 수정",
+)
+def updated_chat_tab(
+ chatName: ChatTabUpdate,
+ tabId: str = Path(..., description="수정할 채팅 탭의 고유 ID"),
+ service: ChatTabService = chat_tab_service_dependency,
+) -> ResponseMessage[ChatTabResponse]:
+ """
+ 채팅 탭 ID를 기준으로 채팅 탭의 이름을 새로운 값으로 수정합니다.
+ - **id**: 수정할 채팅 탭 ID
+ - **name**: 새로운 채팅 탭의 이름
+ """
+ updated_chat_tab = service.updated_chat_tab(tabId, chatName)
+
+ response_data = ChatTabResponse(
+ id=updated_chat_tab.id,
+ name=updated_chat_tab.name,
+ created_at=updated_chat_tab.created_at,
+ updated_at=updated_chat_tab.updated_at,
+ )
+
+ return ResponseMessage.success(value=response_data, code=CommonCode.SUCCESS_CHAT_TAB_UPDATE)
+
+
+@router.delete(
+ "/remove/{tabId}",
+ response_model=ResponseMessage,
+ summary="특정 Chat Tab 삭제",
+)
+def delete_chat_tab(
+ tabId: str = Path(..., description="수정할 채팅 탭의 고유 ID"),
+ service: ChatTabService = chat_tab_service_dependency,
+) -> ResponseMessage:
+ """
+ 채팅 탭 ID를 기준으로 채팅 탭을 삭제합니다.
+ - **id**: 삭제할 채팅 탭 ID
+ """
+ service.delete_chat_tab(tabId)
+ return ResponseMessage.success(code=CommonCode.SUCCESS_CHAT_TAB_DELETE)
diff --git a/app/api/driver_api.py b/app/api/driver_api.py
new file mode 100644
index 0000000..00f8822
--- /dev/null
+++ b/app/api/driver_api.py
@@ -0,0 +1,34 @@
+# app/api/driver_api.py
+
+from fastapi import APIRouter, Depends
+
+from app.core.enum.db_driver import DBTypesEnum
+from app.core.exceptions import APIException
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.driver.driver_info_model import DriverInfo
+from app.services.driver_service import DriverService, driver_service
+
+driver_service_dependency = Depends(lambda: driver_service)
+
+router = APIRouter()
+
+
+@router.get(
+ "/info/{driver_id}",
+ response_model=ResponseMessage[DriverInfo],
+ summary="DB 드라이버 정보 조회",
+)
+def read_driver_info(
+ driver_id: str,
+ service: DriverService = driver_service_dependency,
+) -> ResponseMessage[DriverInfo]:
+ """경로 파라미터로 받은 driver_id에 해당하는 DB 드라이버의 지원 정보를 조회합니다."""
+ try:
+ db_type_enum = DBTypesEnum[driver_id.lower()]
+ except KeyError as e:
+ raise APIException(CommonCode.INVALID_DB_DRIVER, *e.args) from e
+ driver_info_data = DriverInfo.from_enum(db_type_enum)
+ return ResponseMessage.success(
+ value=service.read_driver_info(driver_info_data), code=CommonCode.SUCCESS_DRIVER_INFO
+ )
diff --git a/app/api/health_api.py b/app/api/health_api.py
new file mode 100644
index 0000000..22d2dfc
--- /dev/null
+++ b/app/api/health_api.py
@@ -0,0 +1,9 @@
+# app/api/health_api.py
+from fastapi import APIRouter
+
+router = APIRouter(tags=["Health"])
+
+
+@router.get("/health")
+async def health_check():
+ return {"status": "ok", "message": "Service is healthy"}
diff --git a/app/api/query_api.py b/app/api/query_api.py
new file mode 100644
index 0000000..f924591
--- /dev/null
+++ b/app/api/query_api.py
@@ -0,0 +1,67 @@
+# app/api/query_api.py
+
+
+from typing import Any
+
+from fastapi import APIRouter, Depends
+
+from app.core.exceptions import APIException
+from app.core.response import ResponseMessage
+from app.schemas.query.query_model import QueryInfo, RequestExecutionQuery
+from app.services.query_service import QueryService, query_service
+from app.services.user_db_service import UserDbService, user_db_service
+
+query_service_dependency = Depends(lambda: query_service)
+user_db_service_dependency = Depends(lambda: user_db_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/execute",
+ response_model=ResponseMessage[dict | str | None],
+ summary="쿼리 실행",
+)
+def execution(
+ query_info: RequestExecutionQuery,
+ service: QueryService = query_service_dependency,
+ userDbservice: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[dict | str | None]:
+ db_info = userDbservice.find_profile(query_info.user_db_id)
+ result = service.execution(query_info, db_info)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.data, code=result.code)
+
+
+@router.post(
+ "/execute/test",
+ response_model=ResponseMessage[Any],
+ summary="쿼리 실행",
+)
+def execution_test(
+ query_info: QueryInfo,
+ service: QueryService = query_service_dependency,
+ userDbservice: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[Any]:
+ db_info = userDbservice.find_profile(query_info.user_db_id)
+ result = service.execution_test(query_info, db_info)
+
+ return ResponseMessage.success(value=result.data, code=result.code)
+
+
+@router.get(
+ "/find/{chat_tab_id}",
+ response_model=ResponseMessage[dict],
+ summary="쿼리 실행 내역 조회",
+)
+def find_query_history(
+ chat_tab_id: str,
+ service: QueryService = query_service_dependency,
+) -> ResponseMessage[dict]:
+ result = service.find_query_history(chat_tab_id)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.data, code=result.code)
diff --git a/app/api/test_api.py b/app/api/test_api.py
new file mode 100644
index 0000000..49672b9
--- /dev/null
+++ b/app/api/test_api.py
@@ -0,0 +1,45 @@
+from fastapi import APIRouter
+
+from app.core.exceptions import APIException
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+
+router = APIRouter()
+
+
+@router.get("", response_model=ResponseMessage, summary="타입 변환을 이용한 성공/실패/버그 테스트")
+def simple_test(mode: str):
+ """
+ curl 테스트 시 아래 명령어 사용
+ curl -i -X GET "http://localhost:/api/test?mode=1"
+ curl -i -X GET "http://localhost:8000/api/test?mode=1"
+
+ 쿼리 파라미터 'mode' 값에 따라 다른 응답을 반환합니다.
+
+ - **mode=1**: 성공 응답 (200 OK)
+ - **mode=2**: 커스텀 성공 응답 (200 OK)
+ - **mode=기타 숫자**: 예상된 실패 (404 Not Found)
+ - **mode=문자열**: 예상치 못한 서버 버그 (500 Internal Server Error)
+ """
+ try:
+ # 1. 입력받은 mode를 정수(int)로 변환 시도
+ mode_int = int(mode)
+
+ # 2. 정수로 변환 성공 시, 값에 따라 분기
+ if mode_int == 1:
+ # 기본 성공 코드(SUCCESS)로 응답
+ return ResponseMessage.success(value={"detail": "기본 성공 테스트입니다."})
+ elif mode_int == 2:
+ # 커스텀 성공 코드(CREATED)로 응답
+ return ResponseMessage.success(
+ value={"detail": "커스텀 성공 코드(CREATED) 테스트입니다."}, code=CommonCode.CREATED
+ )
+ else:
+ # 그 외 숫자는 '데이터 없음' 오류로 처리
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+
+ except ValueError:
+ # 3. 정수로 변환 실패 시 (문자열이 들어온 경우)
+ # 예상치 못한 버그를 강제로 발생시킵니다.
+ # 이 에러는 generic_exception_handler가 처리하게 됩니다.
+ raise TypeError("의도적으로 발생시킨 타입 에러입니다.") from ValueError
diff --git a/app/api/user_db_api.py b/app/api/user_db_api.py
new file mode 100644
index 0000000..f837dc3
--- /dev/null
+++ b/app/api/user_db_api.py
@@ -0,0 +1,158 @@
+# app/api/user_db_api.py
+
+
+from fastapi import APIRouter, Depends
+
+from app.core.exceptions import APIException
+from app.core.response import ResponseMessage
+from app.core.status import CommonCode
+from app.schemas.user_db.db_profile_model import DBProfileInfo, UpdateOrCreateDBProfile
+from app.schemas.user_db.result_model import ColumnInfo, DBProfile, TableInfo
+from app.services.user_db_service import UserDbService, user_db_service
+
+user_db_service_dependency = Depends(lambda: user_db_service)
+
+router = APIRouter()
+
+
+@router.post(
+ "/connect/test",
+ response_model=ResponseMessage[bool],
+ summary="DB 연결 테스트",
+)
+def connection_test(
+ db_info: DBProfileInfo,
+ service: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[bool]:
+ db_info.validate_required_fields()
+ result = service.connection_test(db_info)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.is_successful, code=result.code)
+
+
+@router.post(
+ "/create/profile",
+ response_model=ResponseMessage[str],
+ summary="DB 프로필 저장",
+)
+def create_profile(
+ create_db_info: UpdateOrCreateDBProfile,
+ service: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[str]:
+ create_db_info.validate_required_fields()
+ result = service.create_profile(create_db_info)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.view_name, code=result.code)
+
+
+@router.put(
+ "/modify/profile",
+ response_model=ResponseMessage[str],
+ summary="DB 프로필 업데이트",
+)
+def update_profile(
+ update_db_info: UpdateOrCreateDBProfile,
+ service: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[str]:
+ update_db_info.validate_required_fields()
+ result = service.update_profile(update_db_info)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.view_name, code=result.code)
+
+
+@router.delete(
+ "/remove/{profile_id}",
+ response_model=ResponseMessage[str],
+ summary="DB 프로필 삭제",
+)
+def delete_profile(
+ profile_id: str,
+ service: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[str]:
+ result = service.delete_profile(profile_id)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.view_name, code=result.code)
+
+
+@router.get(
+ "/find/all",
+ response_model=ResponseMessage[list[DBProfile]],
+ summary="DB 프로필 전체 조회",
+)
+def find_all_profile(
+ service: UserDbService = user_db_service_dependency,
+) -> ResponseMessage[list[DBProfile]]:
+ result = service.find_all_profile()
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.profiles, code=result.code)
+
+
+@router.get(
+ "/find/schemas/{profile_id}",
+ response_model=ResponseMessage[list[str]],
+ summary="특정 DB의 전체 스키마 조회",
+)
+def find_schemas(profile_id: str, service: UserDbService = user_db_service_dependency) -> ResponseMessage[list[str]]:
+ db_info = service.find_profile(profile_id)
+ result = service.find_schemas(db_info)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.schemas, code=result.code)
+
+
+@router.get(
+ "/find/tables/{profile_id}/{schema_name}",
+ response_model=ResponseMessage[list[str]],
+ summary="특정 스키마의 전체 테이블 조회",
+)
+def find_tables(
+ profile_id: str, schema_name: str, service: UserDbService = user_db_service_dependency
+) -> ResponseMessage[list[str]]:
+ db_info = service.find_profile(profile_id)
+ result = service.find_tables(db_info, schema_name)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.tables, code=result.code)
+
+
+@router.get(
+ "/find/columns/{profile_id}/{schema_name}/{table_name}",
+ response_model=ResponseMessage[list[ColumnInfo]],
+ summary="특정 테이블의 전체 컬럼 조회",
+)
+def find_columns(
+ profile_id: str, schema_name: str, table_name: str, service: UserDbService = user_db_service_dependency
+) -> ResponseMessage[list[ColumnInfo]]:
+ db_info = service.find_profile(profile_id)
+ result = service.find_columns(db_info, schema_name, table_name)
+
+ if not result.is_successful:
+ raise APIException(result.code)
+ return ResponseMessage.success(value=result.columns, code=result.code)
+
+
+@router.get(
+ "/find/all-schemas/{profile_id}",
+ response_model=ResponseMessage[list[TableInfo]],
+ summary="특정 DB의 전체 스키마의 상세 정보 조회",
+ description="테이블, 컬럼, 제약조건, 인덱스를 포함한 모든 스키마 정보를 반환합니다.",
+)
+def find_all_schema_info(
+ profile_id: str, service: UserDbService = user_db_service_dependency
+) -> ResponseMessage[list[TableInfo]]:
+ db_info = service.find_profile(profile_id)
+ full_schema_info = service.get_full_schema_info(db_info)
+
+ return ResponseMessage.success(value=full_schema_info, code=CommonCode.SUCCESS)
diff --git a/app/core/all_logging.py b/app/core/all_logging.py
new file mode 100644
index 0000000..796a01f
--- /dev/null
+++ b/app/core/all_logging.py
@@ -0,0 +1,33 @@
+# app/core/all_logging.py
+
+import logging
+
+from fastapi import Request
+
+# 로깅 기본 설정 (애플리케이션 시작 시 한 번만 구성)
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(levelname)s - %(message)s", # [수정] 로그 레벨(INFO, ERROR)을 포함
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+
+
+async def log_requests_middleware(request: Request, call_next):
+ """
+ 모든 API 요청과 에러에 대한 로그를 남기는 미들웨어입니다.
+ """
+ endpoint = f"{request.method} {request.url.path}"
+
+ # 일반 요청 로그를 남깁니다.
+ logging.info(f"엔드포인트: {endpoint}")
+
+ try:
+ # 다음 미들웨어 또는 실제 엔드포인트를 호출합니다.
+ response = await call_next(request)
+ return response
+ except Exception as e:
+ # [수정] 에러 발생 시, exc_info=True를 추가하여 전체 트레이스백을 함께 기록합니다.
+ # 메시지 형식도 "ERROR 엔드포인트:"로 변경합니다.
+ logging.error(f"ERROR 엔드포인트: {endpoint}", exc_info=True)
+ # 예외를 다시 발생시켜 FastAPI의 전역 예외 처리기가 최종 응답을 만들도록 합니다.
+ raise e
diff --git a/app/core/enum/constraint_type.py b/app/core/enum/constraint_type.py
new file mode 100644
index 0000000..d7a3bcb
--- /dev/null
+++ b/app/core/enum/constraint_type.py
@@ -0,0 +1,16 @@
+from enum import Enum
+
+
+class ConstraintTypeEnum(str, Enum):
+ """
+ 데이터베이스 제약 조건의 유형을 정의하는 Enum 클래스입니다.
+ - str을 상속하여 Enum 멤버를 문자열 값처럼 사용할 수 있습니다.
+ """
+
+ PRIMARY_KEY = "PRIMARY KEY"
+ FOREIGN_KEY = "FOREIGN KEY"
+ UNIQUE = "UNIQUE"
+ CHECK = "CHECK"
+ NOT_NULL = "NOT NULL" # 일부 DB에서는 제약조건으로 취급
+ DEFAULT = "DEFAULT" # 일부 DB에서는 제약조건으로 취급
+ INDEX = "INDEX" # 제약조건은 아니지만, 관련 정보로 포함
diff --git a/app/core/enum/db_driver.py b/app/core/enum/db_driver.py
new file mode 100644
index 0000000..e578d0a
--- /dev/null
+++ b/app/core/enum/db_driver.py
@@ -0,0 +1,13 @@
+# app/core/enum/db_driver.py
+from enum import Enum
+
+
+class DBTypesEnum(Enum):
+ """지원되는 데이터베이스 드라이버 타입"""
+
+ postgresql = "psycopg2"
+ mysql = "mysql.connector"
+ sqlite = "sqlite3"
+ oracle = "oracledb"
+ sqlserver = "pyodbc"
+ mariadb = "pymysql"
diff --git a/app/core/enum/db_key_prefix_name.py b/app/core/enum/db_key_prefix_name.py
new file mode 100644
index 0000000..252a801
--- /dev/null
+++ b/app/core/enum/db_key_prefix_name.py
@@ -0,0 +1,22 @@
+# app/core/enum/db_key_prefix_name.py
+from enum import Enum
+
+
+class DBSaveIdEnum(Enum):
+ """저장할 디비 ID 앞에 들어갈 이름"""
+
+ user_db = "USER-DB"
+ driver = "DRIVER"
+ api_key = "API-KEY"
+ chat_tab = "CHAT-TAB"
+ query = "QUERY"
+ chat_message = "CHAT-MESSAGE"
+
+ database_annotation = "DB-ANNO"
+ table_annotation = "TBL-ANNO"
+ column_annotation = "COL-ANNO"
+ table_constraint = "TC-ANNO"
+ constraint_column = "CC-ANNO"
+ index_annotation = "IDX-ANNO"
+ index_column = "IC-ANNO"
+ table_relationship = "TR-ANNO"
diff --git a/app/core/enum/llm_service_info.py b/app/core/enum/llm_service_info.py
new file mode 100644
index 0000000..c568e8d
--- /dev/null
+++ b/app/core/enum/llm_service_info.py
@@ -0,0 +1,10 @@
+from enum import Enum
+
+
+class LLMServiceEnum(str, Enum):
+ """지원하는 외부 LLM 서비스 목록"""
+
+ OPENAI = "OpenAI"
+ ANTHROPIC = "Anthropic"
+ GEMINI = "Gemini"
+ # TODO: 다른 지원 서비스를 여기에 추가
diff --git a/app/core/enum/sender.py b/app/core/enum/sender.py
new file mode 100644
index 0000000..a6394b2
--- /dev/null
+++ b/app/core/enum/sender.py
@@ -0,0 +1,8 @@
+from enum import Enum
+
+
+class SenderEnum(str, Enum):
+ """채팅 메시지 발신자 구분"""
+
+ user = "U"
+ ai = "A"
diff --git a/app/core/exceptions.py b/app/core/exceptions.py
new file mode 100644
index 0000000..9322f76
--- /dev/null
+++ b/app/core/exceptions.py
@@ -0,0 +1,68 @@
+import traceback
+from typing import Any
+
+from fastapi import Request
+from fastapi.exceptions import RequestValidationError
+from fastapi.responses import JSONResponse
+
+from app.core.status import CommonCode
+
+
+def _create_error_response(code: CommonCode, data: Any | None = None) -> JSONResponse:
+ """
+ 모든 에러 응답에 사용될 표준 JSONResponse 객체를 생성하는 헬퍼 함수.
+ """
+ error_content = {
+ "code": code.code,
+ "message": code.message,
+ "data": data,
+ }
+ return JSONResponse(
+ status_code=code.http_status,
+ content=error_content,
+ )
+
+
+class APIException(Exception):
+ """
+ API 로직 내에서 발생하는 모든 예상된 오류에 사용할 기본 예외 클래스입니다.
+ """
+
+ def __init__(self, code: CommonCode, *args):
+ self.code_enum = code
+ self.message = code.message
+ self.args = args
+ super().__init__(self.message)
+
+
+async def validation_exception_handler(request: Request, exc: RequestValidationError):
+ """
+ Pydantic 모델의 유효성 검사 실패(RequestValidationError)를 감지하여
+ 표준화된 JSON 오류 응답을 반환합니다.
+ """
+ error_details = []
+ for error in exc.errors():
+ field_name = ".".join(map(str, error["loc"][1:]))
+ error_details.append({"field": field_name, "message": error["msg"]})
+
+ return _create_error_response(code=CommonCode.INVALID_PARAMETER, data={"details": error_details})
+
+
+async def api_exception_handler(request: Request, exc: APIException):
+ """
+ APIException이 발생했을 때, 이를 감지하여 표준화된 JSON 오류 응답을 반환합니다.
+ """
+ return _create_error_response(code=exc.code_enum, data=exc.args)
+
+
+async def generic_exception_handler(request: Request, exc: Exception):
+ """
+ 처리되지 않은 모든 예외를 잡아, 일관된 500 서버 오류를 반환합니다.
+ """
+ error_traceback = traceback.format_exc()
+
+ print("=" * 20, "UNEXPECTED ERROR", "=" * 20)
+ print(error_traceback)
+ print("=" * 50)
+
+ return _create_error_response(code=CommonCode.FAIL, data={"traceback": error_traceback})
diff --git a/app/core/response.py b/app/core/response.py
new file mode 100644
index 0000000..ba7bf61
--- /dev/null
+++ b/app/core/response.py
@@ -0,0 +1,24 @@
+from typing import Generic, TypeVar
+
+from pydantic import BaseModel, Field
+
+from app.core.status import CommonCode
+
+T = TypeVar("T")
+
+
+class ResponseMessage(BaseModel, Generic[T]):
+ """
+ 모든 API 응답에 사용될 공용 스키마입니다.
+ """
+
+ code: str = Field(..., description="응답을 나타내는 고유 상태 코드")
+ message: str = Field(..., description="응답 메시지")
+ data: T | None = Field(None, description="반환될 실제 데이터")
+
+ @classmethod
+ def success(cls, value: T | None = None, code: CommonCode = CommonCode.SUCCESS, *args) -> "ResponseMessage[T]":
+ """
+ 성공 응답을 생성하는 팩토리 메서드입니다.
+ """
+ return cls(code=code.code, message=code.get_message(*args), data=value)
diff --git a/app/core/security.py b/app/core/security.py
new file mode 100644
index 0000000..d598030
--- /dev/null
+++ b/app/core/security.py
@@ -0,0 +1,46 @@
+import base64
+import os
+
+from Crypto.Cipher import AES
+from Crypto.Random import get_random_bytes
+from Crypto.Util.Padding import pad, unpad
+
+"""
+보안 원칙을 적용한 AES-256 암호화 및 복호화 클래스입니다.
+- 암호화 시 매번 새로운 랜덤 IV를 생성합니다.
+"""
+
+
+class AES256:
+ _key = base64.b64decode(os.getenv("ENV_AES256_KEY"))
+
+ @staticmethod
+ def encrypt(text: str) -> str:
+ iv = get_random_bytes(AES.block_size)
+
+ cipher = AES.new(AES256._key, AES.MODE_CBC, iv)
+
+ data_bytes = text.encode("utf-8")
+ padded_bytes = pad(data_bytes, AES.block_size)
+
+ encrypted_bytes = cipher.encrypt(padded_bytes)
+
+ combined_bytes = iv + encrypted_bytes
+ return base64.b64encode(combined_bytes).decode("utf-8")
+
+ @staticmethod
+ def decrypt(cipher_text: str) -> str:
+ """
+ AES-256으로 암호화된 텍스트를 복호화합니다.
+ """
+ combined_bytes = base64.b64decode(cipher_text)
+
+ iv = combined_bytes[: AES.block_size]
+ encrypted_bytes = combined_bytes[AES.block_size :]
+
+ cipher = AES.new(AES256._key, AES.MODE_CBC, iv)
+
+ decrypted_padded_bytes = cipher.decrypt(encrypted_bytes)
+ decrypted_bytes = unpad(decrypted_padded_bytes, AES.block_size)
+
+ return decrypted_bytes.decode("utf-8")
diff --git a/app/core/status.py b/app/core/status.py
new file mode 100644
index 0000000..130614f
--- /dev/null
+++ b/app/core/status.py
@@ -0,0 +1,171 @@
+from enum import Enum
+
+from fastapi import status
+
+
+class CommonCode(Enum):
+ """
+ 애플리케이션의 모든 상태 코드를 중앙에서 관리합니다.
+ 각 멤버는 (HTTP 상태 코드, 고유 비즈니스 코드, 기본 메시지) 튜플을 값으로 가집니다.
+ 상태 코드 참고: https://developer.mozilla.org/ko/docs/Web/HTTP/Status
+ """
+
+ # =======================================
+ # 성공 (Success) - 2xxx
+ # =======================================
+ """ 기본 성공 코드 - 20xx """
+ SUCCESS = (status.HTTP_200_OK, "2000", "성공적으로 처리되었습니다.")
+ CREATED = (status.HTTP_201_CREATED, "2001", "성공적으로 생성되었습니다.")
+
+ """ DRIVER, DB 성공 코드 - 21xx """
+ SUCCESS_DRIVER_INFO = (status.HTTP_200_OK, "2100", "드라이버 정보 조회를 성공하였습니다.")
+ SUCCESS_USER_DB_CONNECT_TEST = (status.HTTP_200_OK, "2101", "테스트 연결을 성공하였습니다.")
+ SUCCESS_FIND_PROFILE = (status.HTTP_200_OK, "2102", "디비 정보 조회를 성공하였습니다.")
+ SUCCESS_FIND_SCHEMAS = (status.HTTP_200_OK, "2103", "디비 스키마 정보 조회를 성공하였습니다.")
+ SUCCESS_FIND_TABLES = (status.HTTP_200_OK, "2104", "디비 테이블 정보 조회를 성공하였습니다.")
+ SUCCESS_FIND_COLUMNS = (status.HTTP_200_OK, "2105", "디비 컬럼 정보 조회를 성공하였습니다.")
+ SUCCESS_SAVE_PROFILE = (status.HTTP_200_OK, "2130", "디비 연결 정보를 저장하였습니다.")
+ SUCCESS_UPDATE_PROFILE = (status.HTTP_200_OK, "2150", "디비 연결 정보를 업데이트 하였습니다.")
+ SUCCESS_DELETE_PROFILE = (status.HTTP_200_OK, "2170", "디비 연결 정보를 삭제 하였습니다.")
+
+ """ KEY 성공 코드 - 22xx """
+ SUCCESS_DELETE_API_KEY = (status.HTTP_204_NO_CONTENT, "2200", "API KEY가 성공적으로 삭제되었습니다.")
+ SUCCESS_UPDATE_API_KEY = (status.HTTP_200_OK, "2201", "API KEY가 성공적으로 수정되었습니다.")
+ SUCCESS_GET_API_KEY = (status.HTTP_200_OK, "2202", "API KEY 정보를 성공적으로 조회했습니다.")
+
+ """ CHAT TAB 성공 코드 - 23xx """
+ SUCCESS_CHAT_TAB_CREATE = (status.HTTP_200_OK, "2300", "새로운 채팅 탭이 성공적으로 생성하였습니다.")
+ SUCCESS_CHAT_TAB_UPDATE = (status.HTTP_200_OK, "2301", "채팅 탭 이름이 성공적으로 수정되었습니다.")
+ SUCCESS_CHAT_TAB_DELETE = (status.HTTP_200_OK, "2302", "채팅 탭을 성공적으로 삭제되었습니다.")
+ SUCCESS_GET_CHAT_TAB = (status.HTTP_200_OK, "2303", "모든 채팅 탭을 성공적으로 조회하였습니다.")
+ SUCCESS_GET_CHAT_MESSAGES = (status.HTTP_200_OK, "2304", "채팅 탭의 모든 메시지를 성공적으로 불러왔습니다.")
+
+ """ ANNOTATION 성공 코드 - 24xx """
+ SUCCESS_CREATE_ANNOTATION = (status.HTTP_201_CREATED, "2400", "어노테이션을 성공적으로 생성하였습니다.")
+ SUCCESS_FIND_ANNOTATION = (status.HTTP_200_OK, "2401", "어노테이션 정보를 성공적으로 조회하였습니다.")
+ SUCCESS_DELETE_ANNOTATION = (status.HTTP_200_OK, "2402", "어노테이션을 성공적으로 삭제하였습니다.")
+
+ """ SQL 성공 코드 - 25xx """
+ SUCCESS_EXECUTION = (status.HTTP_201_CREATED, "2400", "쿼리를 성공적으로 수행하였습니다.")
+ SUCCESS_FIND_QUERY_HISTORY = (status.HTTP_200_OK, "2102", "쿼리 이력 조회를 성공하였습니다.")
+ SUCCESS_EXECUTION_TEST = (status.HTTP_201_CREATED, "2400", "쿼리 TEST를 성공적으로 수행하였습니다.")
+
+ """ ChAT MESSAGE 성공 코드 - 26xx """
+ SUCCESS_CREATE_CHAT_MESSAGES = (status.HTTP_201_CREATED, "2600", "메시지를 성공적으로 요청하였습니다.")
+
+ # =======================================
+ # 클라이언트 에러 (Client Error) - 4xxx
+ # =======================================
+ """ 기본 클라이언트 에러 코드 - 40xx """
+ NO_VALUE = (status.HTTP_400_BAD_REQUEST, "4000", "필수 값이 존재하지 않습니다.")
+ DUPLICATION = (status.HTTP_409_CONFLICT, "4001", "이미 존재하는 데이터입니다.")
+ NO_SEARCH_DATA = (status.HTTP_404_NOT_FOUND, "4002", "요청한 데이터를 찾을 수 없습니다.")
+ INVALID_PARAMETER = (status.HTTP_422_UNPROCESSABLE_ENTITY, "4003", "필수 값이 누락되었습니다.")
+
+ """ DRIVER, DB 클라이언트 에러 코드 - 41xx """
+ INVALID_DB_DRIVER = (status.HTTP_409_CONFLICT, "4100", "지원하지 않는 데이터베이스입니다.")
+ NO_DB_DRIVER = (status.HTTP_400_BAD_REQUEST, "4101", "데이터베이스는 필수 값입니다.")
+ NO_DB_PROFILE_FOUND = (status.HTTP_404_NOT_FOUND, "4102", "해당 ID의 DB 프로필을 찾을 수 없습니다.")
+
+ """ KEY 클라이언트 에러 코드 - 42xx """
+ INVALID_API_KEY_FORMAT = (status.HTTP_400_BAD_REQUEST, "4200", "API 키의 형식이 올바르지 않습니다.")
+ INVALID_API_KEY_PREFIX = (
+ status.HTTP_400_BAD_REQUEST,
+ "4201",
+ "API 키가 선택한 서비스의 올바른 형식이 아닙니다. (예: OpenAI는 sk-로 시작)",
+ )
+
+ """ AI CHAT TAB 클라이언트 오류 코드 - 43xx """
+ INVALID_CHAT_TAB_NAME_FORMAT = (status.HTTP_400_BAD_REQUEST, "4300", "채팅 탭 이름의 형식이 올바르지 않습니다.")
+ INVALID_CHAT_TAB_NAME_LENGTH = (
+ status.HTTP_400_BAD_REQUEST,
+ "4301",
+ "채팅 탭 이름의 길이는 128자를 초과할 수 없습니다.",
+ )
+ INVALID_CHAT_TAB_NAME_CONTENT = (
+ status.HTTP_400_BAD_REQUEST,
+ "4302",
+ "채팅 탭 이름에 SQL 예약어나 허용되지 않는 특수문자가 포함되어 있습니다. "
+ "허용되지 않는 특수 문자: 큰따옴표(\"), 작은따옴표('), 세미콜론(;), 꺾쇠괄호(<, >)",
+ )
+ INVALID_CHAT_TAB_ID_FORMAT = (status.HTTP_400_BAD_REQUEST, "4303", "채팅 탭 ID의 형식이 올바르지 않습니다.")
+ NO_CHAT_TAB_DATA = (status.HTTP_404_NOT_FOUND, "4304", "해당 ID를 가진 채팅 탭을 찾을 수 없습니다.")
+
+ """ ANNOTATION 클라이언트 에러 코드 - 44xx """
+ INVALID_ANNOTATION_REQUEST = (status.HTTP_400_BAD_REQUEST, "4400", "어노테이션 요청 데이터가 유효하지 않습니다.")
+ NO_ANNOTATION_FOR_PROFILE = (status.HTTP_404_NOT_FOUND, "4401", "해당 DB 프로필에 연결된 어노테이션이 없습니다.")
+
+ """ SQL 클라이언트 에러 코드 - 45xx """
+ NO_CHAT_KEY = (status.HTTP_400_BAD_REQUEST, "4501", "CHAT 키는 필수 값입니다.")
+ NO_QUERY = (status.HTTP_400_BAD_REQUEST, "4500", "쿼리는 필수 값입니다.")
+
+ """ CHAT MESSAGE 에러 코드 - 46xx """
+ INVALID_CHAT_MESSAGE_REQUEST = (status.HTTP_400_BAD_REQUEST, "4600", "AI 채팅 요청 데이터가 유효하지 않습니다.")
+
+ # ==================================
+ # 서버 에러 (Server Error) - 5xx
+ # ==================================
+ """ 기본 서버 에러 코드 - 50xx """
+ FAIL = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5000", "서버 처리 중 에러가 발생했습니다.")
+ DB_BUSY = (
+ status.HTTP_503_SERVICE_UNAVAILABLE,
+ "5001",
+ "데이터베이스가 현재 사용 중입니다. 잠시 후 다시 시도해주세요.",
+ )
+ FAIL_TO_VERIFY_CREATION = (
+ status.HTTP_500_INTERNAL_SERVER_ERROR,
+ "5002",
+ "데이터 생성 후 검증 과정에서 에러가 발생했습니다.",
+ )
+
+ """ DRIVER, DB 서버 에러 코드 - 51xx """
+ FAIL_CONNECT_DB = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5100", "디비 연결 중 에러가 발생했습니다.")
+ FAIL_FIND_PROFILE = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5101", "디비 정보 조회 중 에러가 발생했습니다.")
+ FAIL_FIND_SCHEMAS = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5102", "디비 스키마 정보 조회 중 에러가 발생했습니다.")
+ FAIL_FIND_TABLES = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5103", "디비 테이블 정보 조회 중 에러가 발생했습니다.")
+ FAIL_FIND_COLUMNS = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5104", "디비 컬럼 정보 조회 중 에러가 발생했습니다.")
+ FAIL_FIND_CONSTRAINTS_OR_INDEXES = (
+ status.HTTP_500_INTERNAL_SERVER_ERROR,
+ "5105",
+ "디비 제약조건 또는 인덱스 정보 조회 중 에러가 발생했습니다.",
+ )
+ FAIL_FIND_SAMPLE_ROWS = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5106", "샘플 데이터 조회 중 에러가 발생했습니다.")
+ FAIL_SAVE_PROFILE = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5130", "디비 정보 저장 중 에러가 발생했습니다.")
+ FAIL_UPDATE_PROFILE = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5150", "디비 정보 업데이트 중 에러가 발생했습니다.")
+ FAIL_DELETE_PROFILE = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5170", "디비 정보 삭제 중 에러가 발생했습니다.")
+
+ """ KEY 서버 에러 코드 - 52xx """
+
+ """ AI CHAT, DB 서버 에러 코드 - 53xx """
+
+ """ ANNOTATION 서버 에러 코드 - 54xx """
+ FAIL_CREATE_ANNOTATION = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5400", "어노테이션 생성 중 에러가 발생했습니다.")
+ FAIL_FIND_ANNOTATION = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5401", "어노테이션 조회 중 에러가 발생했습니다.")
+ FAIL_DELETE_ANNOTATION = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5402", "어노테이션 삭제 중 에러가 발생했습니다.")
+ FAIL_AI_SERVER_CONNECTION = (status.HTTP_503_SERVICE_UNAVAILABLE, "5403", "AI 서버 연결에 실패했습니다.")
+ FAIL_AI_SERVER_PROCESSING = (
+ status.HTTP_500_INTERNAL_SERVER_ERROR,
+ "5404",
+ "AI 서버가 요청을 처리하는 데 실패했습니다.",
+ )
+
+ """ SQL 서버 에러 코드 - 55xx """
+
+ FAIL_CREATE_QUERY = (status.HTTP_500_INTERNAL_SERVER_ERROR, "5170", "쿼리 실행 정보 저장 중 에러가 발생했습니다.")
+
+ """ CHAT MESSAGE 에러 코드 - 56xx """
+
+ def __init__(self, http_status: int, code: str, message: str):
+ """Enum 멤버가 생성될 때 각 값을 속성으로 할당합니다."""
+ self.http_status = http_status
+ self.code = code
+ self.message = message
+
+ def get_message(self, *args) -> str:
+ """
+ 메시지 포맷팅이 필요한 경우, 인자를 받아 완성된 메시지를 반환합니다.
+ """
+ try:
+ return self.message % args if args else self.message
+ except Exception:
+ return self.message
diff --git a/app/core/utils.py b/app/core/utils.py
new file mode 100644
index 0000000..571817a
--- /dev/null
+++ b/app/core/utils.py
@@ -0,0 +1,25 @@
+import uuid
+from pathlib import Path
+
+# 앱 데이터를 저장할 폴더 이름
+APP_DATA_DIR_NAME = ".qgenie"
+
+
+def get_db_path() -> Path:
+ """
+ 사용자 홈 디렉터리 내에 앱 데이터 폴더를 만들고,
+ SQLite DB 파일의 전체 경로를 반환합니다.
+ """
+ home_dir = Path.home()
+ app_data_dir = home_dir / APP_DATA_DIR_NAME
+ app_data_dir.mkdir(exist_ok=True)
+ db_path = app_data_dir / "local_storage.sqlite"
+ return db_path
+
+
+def generate_uuid() -> str:
+ return uuid.uuid4().hex.upper()
+
+
+def generate_prefixed_uuid(prefix: str) -> str:
+ return f"{prefix.upper()}-{uuid.uuid4().hex.upper()}"
diff --git a/app/db/init_db.py b/app/db/init_db.py
new file mode 100644
index 0000000..c514815
--- /dev/null
+++ b/app/db/init_db.py
@@ -0,0 +1,424 @@
+# db/init_db.py
+import logging
+import sqlite3
+
+from app.core.utils import get_db_path
+
+
+def _synchronize_table(cursor, table_name: str, target_columns: dict):
+ """
+ 테이블 스키마를 확인하고, 코드와 다를 경우 테이블을 재생성하여 동기화합니다.
+ """
+ try:
+ # 외래 키 제약 조건 비활성화
+ cursor.execute("PRAGMA foreign_keys=off;")
+ cursor.execute(f"PRAGMA table_info({table_name})")
+ current_schema_rows = cursor.fetchall()
+ current_columns = {row[1]: row[2].upper() for row in current_schema_rows}
+
+ target_schema_simple = {
+ name: definition.split()[0].upper()
+ for name, definition in target_columns.items()
+ if not name.startswith("FOREIGN KEY")
+ }
+
+ if current_columns == target_schema_simple:
+ # 외래 키 제약 조건 다시 활성화
+ cursor.execute("PRAGMA foreign_keys=on;")
+ return
+
+ logging.warning(
+ f"'{table_name}' 테이블의 스키마 변경을 감지했습니다. 마이그레이션을 시작합니다. (데이터 손실 위험)"
+ )
+
+ temp_table_name = f"{table_name}_temp_old"
+ cursor.execute(f"DROP TABLE IF EXISTS {temp_table_name}") # DROP 먼저 실행
+ cursor.execute(f"ALTER TABLE {table_name} RENAME TO {temp_table_name}")
+
+ columns_with_definitions = ", ".join([f"{name} {definition}" for name, definition in target_columns.items()])
+ cursor.execute(f"CREATE TABLE {table_name} ({columns_with_definitions})")
+
+ cursor.execute(f"PRAGMA table_info({temp_table_name})")
+ temp_columns = {row[1] for row in cursor.fetchall()}
+ common_columns = ", ".join(target_columns.keys() & temp_columns)
+
+ if common_columns:
+ cursor.execute(
+ f"INSERT INTO {table_name} ({common_columns}) SELECT {common_columns} FROM {temp_table_name}"
+ )
+ logging.info(f"'{temp_table_name}'에서 '{table_name}'으로 데이터를 복사했습니다.")
+
+ cursor.execute(f"DROP TABLE {temp_table_name}")
+ logging.info(f"임시 테이블 '{temp_table_name}'을(를) 삭제했습니다.")
+
+ except sqlite3.Error as e:
+ logging.error(f"'{table_name}' 테이블 마이그레이션 중 오류 발생: {e}")
+ raise e
+ finally:
+ # 외래 키 제약 조건 다시 활성화
+ cursor.execute("PRAGMA foreign_keys=on;")
+
+
+def initialize_database():
+ """
+ 데이터베이스에 연결하고, 테이블 스키마를 최신 상태로 동기화합니다.
+ """
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(db_path)
+ conn.execute("BEGIN")
+ cursor = conn.cursor()
+
+ # --- db_profile 테이블 처리 ---
+ db_profile_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "type": "VARCHAR(32) NOT NULL",
+ "host": "VARCHAR(255)",
+ "port": "INTEGER",
+ "name": "VARCHAR(64)",
+ "username": "VARCHAR(128)",
+ "password": "VARCHAR(128)",
+ "view_name": "VARCHAR(64)",
+ "annotation_id": "VARCHAR(64)",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (annotation_id)": "REFERENCES database_annotation(id) ON DELETE SET NULL",
+ }
+ create_sql = (
+ f"CREATE TABLE IF NOT EXISTS db_profile ({', '.join([f'{k} {v}' for k, v in db_profile_cols.items()])})"
+ )
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor, "db_profile", {k: v for k, v in db_profile_cols.items() if not k.startswith("FOREIGN KEY")}
+ )
+
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_db_profile_updated_at
+ BEFORE UPDATE ON db_profile FOR EACH ROW
+ BEGIN UPDATE db_profile SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- ai_credential 테이블 처리 ---
+ ai_credential_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "service_name": "VARCHAR(32) NOT NULL UNIQUE",
+ "api_key": "VARCHAR(256) NOT NULL",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS ai_credential ({', '.join([f'{k} {v}' for k, v in ai_credential_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(cursor, "ai_credential", ai_credential_cols)
+
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_ai_credential_updated_at
+ BEFORE UPDATE ON ai_credential FOR EACH ROW
+ BEGIN UPDATE ai_credential SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- chat_tab 테이블 처리 ---
+ chat_tab_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "name": "VARCHAR(128)",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ }
+ create_sql = (
+ f"CREATE TABLE IF NOT EXISTS chat_tab ({', '.join([f'{k} {v}' for k, v in chat_tab_cols.items()])})"
+ )
+ cursor.execute(create_sql)
+ _synchronize_table(cursor, "chat_tab", chat_tab_cols)
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_chat_tab_updated_at
+ BEFORE UPDATE ON chat_tab FOR EACH ROW
+ BEGIN UPDATE chat_tab SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- chat_message 테이블 처리 ---
+ chat_message_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "chat_tab_id": "VARCHAR(64) NOT NULL",
+ "sender": "VARCHAR(1) NOT NULL",
+ "message": "TEXT NOT NULL",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (chat_tab_id)": "REFERENCES chat_tab(id) ON DELETE CASCADE",
+ }
+ create_sql = (
+ f"CREATE TABLE IF NOT EXISTS chat_message ({', '.join([f'{k} {v}' for k, v in chat_message_cols.items()])})"
+ )
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor, "chat_message", {k: v for k, v in chat_message_cols.items() if not k.startswith("FOREIGN KEY")}
+ )
+
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_chat_message_updated_at
+ BEFORE UPDATE ON chat_message FOR EACH ROW
+ BEGIN UPDATE chat_message SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- query_history 테이블 처리 ---
+ query_history_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "user_db_id": "VARCHAR(64) NOT NULL",
+ "chat_message_id": "VARCHAR(64) NOT NULL",
+ "database": "VARCHAR(256) NOT NULL",
+ "query_text": "TEXT NOT NULL",
+ "type": "VARCHAR(32)",
+ "is_success": "VARCHAR(1)",
+ "error_message": "TEXT",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (chat_message_id)": "REFERENCES chat_message(id) ON DELETE CASCADE",
+ "FOREIGN KEY (user_db_id)": "REFERENCES db_profile(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS query_history ({', '.join([f'{k} {v}' for k, v in query_history_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor, "query_history", {k: v for k, v in query_history_cols.items() if not k.startswith("FOREIGN KEY")}
+ )
+
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_query_history_updated_at
+ BEFORE UPDATE ON query_history FOR EACH ROW
+ BEGIN UPDATE query_history SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- database_annotation 테이블 처리 ---
+ database_annotation_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "db_profile_id": "VARCHAR(64) NOT NULL",
+ "database_name": "VARCHAR(255) NOT NULL",
+ "description": "TEXT",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (db_profile_id)": "REFERENCES db_profile(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS database_annotation ({', '.join([f'{k} {v}' for k, v in database_annotation_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "database_annotation",
+ {k: v for k, v in database_annotation_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_database_annotation_updated_at
+ BEFORE UPDATE ON database_annotation FOR EACH ROW
+ BEGIN UPDATE database_annotation SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- table_annotation 테이블 처리 ---
+ table_annotation_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "database_annotation_id": "VARCHAR(64) NOT NULL",
+ "table_name": "VARCHAR(255) NOT NULL",
+ "description": "TEXT",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (database_annotation_id)": "REFERENCES database_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS table_annotation ({', '.join([f'{k} {v}' for k, v in table_annotation_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "table_annotation",
+ {k: v for k, v in table_annotation_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_table_annotation_updated_at
+ BEFORE UPDATE ON table_annotation FOR EACH ROW
+ BEGIN UPDATE table_annotation SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- column_annotation 테이블 처리 ---
+ column_annotation_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "table_annotation_id": "VARCHAR(64) NOT NULL",
+ "column_name": "VARCHAR(255) NOT NULL",
+ "data_type": "VARCHAR(64)",
+ "is_nullable": "INTEGER NOT NULL DEFAULT 1",
+ "default_value": "TEXT",
+ "check_expression": "TEXT",
+ "ordinal_position": "INTEGER",
+ "description": "TEXT",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (table_annotation_id)": "REFERENCES table_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS column_annotation ({', '.join([f'{k} {v}' for k, v in column_annotation_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "column_annotation",
+ {k: v for k, v in column_annotation_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_column_annotation_updated_at
+ BEFORE UPDATE ON column_annotation FOR EACH ROW
+ BEGIN UPDATE column_annotation SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- table_relationship 테이블 처리 ---
+ table_relationship_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "database_annotation_id": "VARCHAR(64) NOT NULL",
+ "from_table_id": "VARCHAR(64) NOT NULL",
+ "to_table_id": "VARCHAR(64) NOT NULL",
+ "relationship_type": "VARCHAR(32) NOT NULL",
+ "description": "TEXT",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (database_annotation_id)": "REFERENCES database_annotation(id) ON DELETE CASCADE",
+ "FOREIGN KEY (from_table_id)": "REFERENCES table_annotation(id) ON DELETE CASCADE",
+ "FOREIGN KEY (to_table_id)": "REFERENCES table_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS table_relationship ({', '.join([f'{k} {v}' for k, v in table_relationship_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "table_relationship",
+ {k: v for k, v in table_relationship_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_table_relationship_updated_at
+ BEFORE UPDATE ON table_relationship FOR EACH ROW
+ BEGIN UPDATE table_relationship SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- table_constraint 테이블 처리 ---
+ table_constraint_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "table_annotation_id": "VARCHAR(64) NOT NULL",
+ "constraint_type": "VARCHAR(16) NOT NULL",
+ "name": "VARCHAR(255)",
+ "description": "TEXT",
+ "expression": "TEXT",
+ "ref_table": "VARCHAR(255)",
+ "on_update_action": "VARCHAR(16)",
+ "on_delete_action": "VARCHAR(16)",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (table_annotation_id)": "REFERENCES table_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS table_constraint ({', '.join([f'{k} {v}' for k, v in table_constraint_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "table_constraint",
+ {k: v for k, v in table_constraint_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_table_constraint_updated_at
+ BEFORE UPDATE ON table_constraint FOR EACH ROW
+ BEGIN UPDATE table_constraint SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- constraint_column 테이블 처리 ---
+ constraint_column_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "constraint_id": "VARCHAR(64) NOT NULL",
+ "column_annotation_id": "VARCHAR(64) NOT NULL",
+ "position": "INTEGER",
+ "referenced_column_name": "VARCHAR(255)",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (constraint_id)": "REFERENCES table_constraint(id) ON DELETE CASCADE",
+ "FOREIGN KEY (column_annotation_id)": "REFERENCES column_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS constraint_column ({', '.join([f'{k} {v}' for k, v in constraint_column_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "constraint_column",
+ {k: v for k, v in constraint_column_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_constraint_column_updated_at
+ BEFORE UPDATE ON constraint_column FOR EACH ROW
+ BEGIN UPDATE constraint_column SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- index_annotation 테이블 처리 ---
+ index_annotation_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "table_annotation_id": "VARCHAR(64) NOT NULL",
+ "name": "VARCHAR(255)",
+ "is_unique": "INTEGER NOT NULL DEFAULT 0",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (table_annotation_id)": "REFERENCES table_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = f"CREATE TABLE IF NOT EXISTS index_annotation ({', '.join([f'{k} {v}' for k, v in index_annotation_cols.items()])})"
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor,
+ "index_annotation",
+ {k: v for k, v in index_annotation_cols.items() if not k.startswith("FOREIGN KEY")},
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_index_annotation_updated_at
+ BEFORE UPDATE ON index_annotation FOR EACH ROW
+ BEGIN UPDATE index_annotation SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ # --- index_column 테이블 처리 ---
+ index_column_cols = {
+ "id": "VARCHAR(64) PRIMARY KEY NOT NULL",
+ "index_id": "VARCHAR(64) NOT NULL",
+ "column_annotation_id": "VARCHAR(64) NOT NULL",
+ "position": "INTEGER",
+ "created_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "updated_at": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
+ "FOREIGN KEY (index_id)": "REFERENCES index_annotation(id) ON DELETE CASCADE",
+ "FOREIGN KEY (column_annotation_id)": "REFERENCES column_annotation(id) ON DELETE CASCADE",
+ }
+ create_sql = (
+ f"CREATE TABLE IF NOT EXISTS index_column ({', '.join([f'{k} {v}' for k, v in index_column_cols.items()])})"
+ )
+ cursor.execute(create_sql)
+ _synchronize_table(
+ cursor, "index_column", {k: v for k, v in index_column_cols.items() if not k.startswith("FOREIGN KEY")}
+ )
+ cursor.execute(
+ """
+ CREATE TRIGGER IF NOT EXISTS update_index_column_updated_at
+ BEFORE UPDATE ON index_column FOR EACH ROW
+ BEGIN UPDATE index_column SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END;
+ """
+ )
+
+ conn.commit()
+
+ except sqlite3.Error as e:
+ logging.error(f"데이터베이스 초기화 중 오류 발생: {e}. 변경 사항을 롤백합니다.")
+ if conn:
+ conn.rollback()
+ finally:
+ if conn:
+ conn.close()
diff --git a/app/main.py b/app/main.py
index ea2ecbe..01e9da2 100644
--- a/app/main.py
+++ b/app/main.py
@@ -1,39 +1,39 @@
# main.py
-import os
-import socket # 소켓 모듈 임포트
import uvicorn
from fastapi import FastAPI
+from fastapi.exceptions import RequestValidationError
-app = FastAPI()
+from app.api import health_api
+from app.api.api_router import api_router
+from app.core.exceptions import (
+ APIException,
+ api_exception_handler,
+ generic_exception_handler,
+ validation_exception_handler,
+)
+from app.db.init_db import initialize_database
+
+from starlette.middleware.base import BaseHTTPMiddleware
+from app.core.all_logging import log_requests_middleware
+app = FastAPI()
-@app.get("/")
-async def read_root():
- return {"message": "Hello, FastAPI Backend!"}
+# 전체 로그 찍는 부분
+app.add_middleware(BaseHTTPMiddleware, dispatch=log_requests_middleware)
+# 전역 예외 처리기 등록
+app.add_exception_handler(Exception, generic_exception_handler)
+app.add_exception_handler(APIException, api_exception_handler)
+app.add_exception_handler(RequestValidationError, validation_exception_handler)
-@app.get("/health")
-async def health_check():
- return {"status": "ok", "message": "Service is healthy"}
+# 라우터
+app.include_router(health_api.router)
+app.include_router(api_router, prefix="/api")
+# initialize_database 함수가 호출되어 테이블이 생성되거나 이미 존재함을 확인합니다.
+initialize_database()
-# 이 부분이 추가된 동적 포트 할당 로직입니다.
if __name__ == "__main__":
- # 1. 환경 변수 'PORT'가 있으면 해당 포트를 사용합니다.
- # 2. 없으면 사용 가능한 임시 포트를 찾습니다.
- port_from_env = os.getenv("PORT")
-
- if port_from_env:
- port = int(port_from_env)
- print(f"Using port from environment variable: {port}")
- else:
- # 시스템에서 사용 가능한 임시 포트를 찾습니다.
- # 포트 0을 바인딩하면 운영체제가 사용 가능한 포트를 할당해 줍니다.
- with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
- s.bind(("0.0.0.0", 0)) # 0.0.0.0에 포트 0을 바인딩
- port = s.getsockname()[1] # 할당된 포트 번호 가져오기
- print(f"Dynamically assigned port: {port}")
-
# Uvicorn 서버를 시작합니다.
- uvicorn.run(app, host="0.0.0.0", port=port)
+ uvicorn.run(app, host="0.0.0.0", port=39722)
diff --git a/app/repository/annotation_repository.py b/app/repository/annotation_repository.py
new file mode 100644
index 0000000..8ad55c3
--- /dev/null
+++ b/app/repository/annotation_repository.py
@@ -0,0 +1,302 @@
+import sqlite3
+
+from app.core.utils import get_db_path
+from app.schemas.annotation.db_model import (
+ ColumnAnnotationInDB,
+ ConstraintColumnInDB,
+ DatabaseAnnotationInDB,
+ IndexAnnotationInDB,
+ IndexColumnInDB,
+ TableAnnotationInDB,
+ TableConstraintInDB,
+)
+from app.schemas.annotation.response_model import (
+ ColumnAnnotationDetail,
+ ConstraintDetail,
+ FullAnnotationResponse,
+ IndexDetail,
+ TableAnnotationDetail,
+)
+
+
+class AnnotationRepository:
+ """
+ 어노테이션 데이터에 대한 데이터베이스 CRUD 작업을 처리합니다.
+ 모든 메서드는 내부적으로 `sqlite3`를 사용하여 로컬 DB와 상호작용합니다.
+ """
+
+ def create_full_annotation(
+ self,
+ db_conn: sqlite3.Connection,
+ db_annotation: DatabaseAnnotationInDB,
+ table_annotations: list[TableAnnotationInDB],
+ column_annotations: list[ColumnAnnotationInDB],
+ constraint_annotations: list[TableConstraintInDB],
+ constraint_column_annotations: list[ConstraintColumnInDB],
+ index_annotations: list[IndexAnnotationInDB],
+ index_column_annotations: list[IndexColumnInDB],
+ ) -> None:
+ """
+ 하나의 트랜잭션 내에서 전체 어노테이션 데이터를 저장합니다.
+ - 서비스 계층에서 트랜잭션을 관리하므로 connection을 인자로 받습니다.
+ - 실패 시 sqlite3.Error를 발생시킵니다.
+ """
+ cursor = db_conn.cursor()
+
+ # Database, Table, Column Annotations 저장
+ db_data = (
+ db_annotation.id,
+ db_annotation.db_profile_id,
+ db_annotation.database_name,
+ db_annotation.description,
+ db_annotation.created_at,
+ db_annotation.updated_at,
+ )
+ cursor.execute(
+ """
+ INSERT INTO database_annotation (id, db_profile_id, database_name, description, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?)
+ """,
+ db_data,
+ )
+ table_data = [
+ (t.id, t.database_annotation_id, t.table_name, t.description, t.created_at, t.updated_at)
+ for t in table_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO table_annotation (id, database_annotation_id, table_name, description, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?)
+ """,
+ table_data,
+ )
+ column_data = [
+ (
+ c.id,
+ c.table_annotation_id,
+ c.column_name,
+ c.data_type,
+ c.is_nullable,
+ c.default_value,
+ c.check_expression,
+ c.ordinal_position,
+ c.description,
+ c.created_at,
+ c.updated_at,
+ )
+ for c in column_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO column_annotation (id, table_annotation_id, column_name, data_type, is_nullable, default_value, check_expression, ordinal_position, description, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ """,
+ column_data,
+ )
+
+ # Constraint Annotations 저장
+ constraint_data = [
+ (
+ c.id,
+ c.table_annotation_id,
+ c.constraint_type,
+ c.name,
+ c.description,
+ c.expression,
+ c.ref_table,
+ c.on_update_action,
+ c.on_delete_action,
+ c.created_at,
+ c.updated_at,
+ )
+ for c in constraint_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO table_constraint (id, table_annotation_id, constraint_type, name, description, expression, ref_table, on_update_action, on_delete_action, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ """,
+ constraint_data,
+ )
+ constraint_column_data = [
+ (
+ cc.id,
+ cc.constraint_id,
+ cc.column_annotation_id,
+ cc.position,
+ cc.referenced_column_name,
+ cc.created_at,
+ cc.updated_at,
+ )
+ for cc in constraint_column_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO constraint_column (id, constraint_id, column_annotation_id, position, referenced_column_name, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?)
+ """,
+ constraint_column_data,
+ )
+
+ # Index Annotations 저장
+ index_data = [
+ (i.id, i.table_annotation_id, i.name, i.is_unique, i.created_at, i.updated_at) for i in index_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO index_annotation (id, table_annotation_id, name, is_unique, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?)
+ """,
+ index_data,
+ )
+ index_column_data = [
+ (ic.id, ic.index_id, ic.column_annotation_id, ic.position, ic.created_at, ic.updated_at)
+ for ic in index_column_annotations
+ ]
+ cursor.executemany(
+ """
+ INSERT INTO index_column (id, index_id, column_annotation_id, position, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?)
+ """,
+ index_column_data,
+ )
+
+ def update_db_profile_annotation_id(
+ self, db_conn: sqlite3.Connection, db_profile_id: str, annotation_id: str
+ ) -> None:
+ """
+ 주어진 db_profile_id에 해당하는 레코드의 annotation_id를 업데이트합니다.
+ - 서비스 계층에서 트랜잭션을 관리하므로 connection을 인자로 받습니다.
+ - 실패 시 sqlite3.Error를 발생시킵니다.
+ """
+ cursor = db_conn.cursor()
+ cursor.execute(
+ "UPDATE db_profile SET annotation_id = ? WHERE id = ?",
+ (annotation_id, db_profile_id),
+ )
+
+ def find_full_annotation_by_id(self, annotation_id: str) -> FullAnnotationResponse | None:
+ """
+ annotationId로 전체 어노테이션 상세 정보를 조회합니다.
+ - 여러 테이블을 JOIN하여 구조화된 데이터를 반환합니다.
+ - 실패 시 sqlite3.Error를 발생시킵니다.
+ """
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM database_annotation WHERE id = ?", (annotation_id,))
+ db_row = cursor.fetchone()
+ if not db_row:
+ return None
+
+ cursor.execute("SELECT * FROM table_annotation WHERE database_annotation_id = ?", (annotation_id,))
+ table_rows = cursor.fetchall()
+
+ tables_details = []
+ for table_row in table_rows:
+ table_id = table_row["id"]
+
+ # 컬럼 정보
+ cursor.execute(
+ "SELECT id, column_name, description, data_type, is_nullable, default_value FROM column_annotation WHERE table_annotation_id = ?",
+ (table_id,),
+ )
+ columns = []
+ for c in cursor.fetchall():
+ c_dict = dict(c)
+ c_dict["is_nullable"] = (
+ bool(c_dict["is_nullable"]) if c_dict.get("is_nullable") is not None else None
+ )
+ columns.append(ColumnAnnotationDetail.model_validate(c_dict))
+
+ # 제약조건 정보
+ cursor.execute(
+ """
+ SELECT tc.name, tc.constraint_type, tc.description, ca.column_name
+ FROM table_constraint tc
+ LEFT JOIN constraint_column cc ON tc.id = cc.constraint_id
+ LEFT JOIN column_annotation ca ON cc.column_annotation_id = ca.id
+ WHERE tc.table_annotation_id = ?
+ """,
+ (table_id,),
+ )
+ constraint_map = {}
+ for row in cursor.fetchall():
+ if row["name"] not in constraint_map:
+ constraint_map[row["name"]] = {
+ "type": row["constraint_type"],
+ "columns": [],
+ "description": row["description"],
+ }
+ if row["column_name"]:
+ constraint_map[row["name"]]["columns"].append(row["column_name"])
+ constraints = [
+ ConstraintDetail(name=k, type=v["type"], columns=v["columns"], description=v["description"])
+ for k, v in constraint_map.items()
+ ]
+
+ # 인덱스 정보
+ cursor.execute(
+ """
+ SELECT ia.name, ia.is_unique, ca.column_name
+ FROM index_annotation ia
+ JOIN index_column ic ON ia.id = ic.index_id
+ JOIN column_annotation ca ON ic.column_annotation_id = ca.id
+ WHERE ia.table_annotation_id = ?
+ """,
+ (table_id,),
+ )
+ index_map = {}
+ for row in cursor.fetchall():
+ if row["name"] not in index_map:
+ index_map[row["name"]] = {"is_unique": bool(row["is_unique"]), "columns": []}
+ index_map[row["name"]]["columns"].append(row["column_name"])
+ indexes = [
+ IndexDetail(name=k, is_unique=v["is_unique"], columns=v["columns"]) for k, v in index_map.items()
+ ]
+
+ tables_details.append(
+ TableAnnotationDetail(
+ id=table_id,
+ table_name=table_row["table_name"],
+ description=table_row["description"],
+ created_at=table_row["created_at"],
+ updated_at=table_row["updated_at"],
+ columns=columns,
+ constraints=constraints,
+ indexes=indexes,
+ )
+ )
+
+ db_row_dict = dict(db_row)
+ db_row_dict["tables"] = tables_details
+ return FullAnnotationResponse.model_validate(db_row_dict)
+ finally:
+ if conn:
+ conn.close()
+
+ def delete_annotation_by_id(self, annotation_id: str) -> bool:
+ """
+ annotationId로 특정 어노테이션을 삭제합니다.
+ ON DELETE CASCADE에 의해 하위 데이터도 모두 삭제됩니다.
+ 성공 시 True, 대상이 없으면 False를 반환합니다.
+ 실패 시 sqlite3.Error를 발생시킵니다.
+ """
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ cursor = conn.cursor()
+ cursor.execute("DELETE FROM database_annotation WHERE id = ?", (annotation_id,))
+ conn.commit()
+ return cursor.rowcount > 0
+ finally:
+ if conn:
+ conn.close()
+
+
+annotation_repository = AnnotationRepository()
diff --git a/app/repository/api_key_repository.py b/app/repository/api_key_repository.py
new file mode 100644
index 0000000..83d3854
--- /dev/null
+++ b/app/repository/api_key_repository.py
@@ -0,0 +1,138 @@
+import sqlite3
+
+from app.core.utils import get_db_path
+from app.schemas.api_key.db_model import APIKeyInDB
+
+
+class APIKeyRepository:
+ def create_api_key(self, new_id: str, service_name: str, encrypted_key: str) -> APIKeyInDB:
+ """
+ 암호화된 API Key 정보를 받아 데이터베이스에 저장하고,
+ 저장된 객체를 반환합니다.
+ """
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute(
+ """
+ INSERT INTO ai_credential (id, service_name, api_key)
+ VALUES (?, ?, ?)
+ """,
+ (new_id, service_name, encrypted_key),
+ )
+ conn.commit()
+
+ cursor.execute("SELECT * FROM ai_credential WHERE id = ?", (new_id,))
+ created_row = cursor.fetchone()
+
+ if not created_row:
+ return None
+
+ return APIKeyInDB.model_validate(dict(created_row))
+
+ finally:
+ if conn:
+ conn.close()
+
+ def get_all_api_keys(self) -> list[APIKeyInDB]:
+ """데이터베이스에 저장된 모든 API Key를 조회합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM ai_credential")
+ rows = cursor.fetchall()
+
+ return [APIKeyInDB.model_validate(dict(row)) for row in rows]
+ finally:
+ if conn:
+ conn.close()
+
+ def get_api_key_by_service_name(self, service_name: str) -> APIKeyInDB | None:
+ """서비스 이름으로 특정 API Key를 조회합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM ai_credential WHERE service_name = ?", (service_name,))
+ row = cursor.fetchone()
+
+ if not row:
+ return None
+
+ return APIKeyInDB.model_validate(dict(row))
+ finally:
+ if conn:
+ conn.close()
+
+ def update_api_key(self, service_name: str, encrypted_key: str) -> APIKeyInDB | None:
+ """서비스 이름에 해당하는 API Key를 수정하고, 수정된 객체를 반환합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ # 먼저 해당 서비스의 데이터가 존재하는지 확인
+ cursor.execute("SELECT id FROM ai_credential WHERE service_name = ?", (service_name,))
+ if not cursor.fetchone():
+ return None
+
+ # 데이터 업데이트
+ cursor.execute(
+ "UPDATE ai_credential SET api_key = ?, updated_at = datetime('now', 'localtime') WHERE service_name = ?",
+ (encrypted_key, service_name),
+ )
+ conn.commit()
+
+ # rowcount가 0이면 업데이트된 행이 없음 (정상적인 경우 발생하기 어려움)
+ if cursor.rowcount == 0:
+ return None
+
+ cursor.execute("SELECT * FROM ai_credential WHERE service_name = ?", (service_name,))
+ updated_row = cursor.fetchone()
+
+ return APIKeyInDB.model_validate(dict(updated_row))
+ finally:
+ if conn:
+ conn.close()
+
+ def delete_api_key(self, service_name: str) -> bool:
+ """서비스 이름에 해당하는 API Key를 삭제하고, 성공 여부를 반환합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ cursor = conn.cursor()
+
+ # 먼저 해당 서비스의 데이터가 존재하는지 확인
+ cursor.execute("SELECT id FROM ai_credential WHERE service_name = ?", (service_name,))
+ if not cursor.fetchone():
+ return False
+
+ # 데이터 삭제
+ cursor.execute("DELETE FROM ai_credential WHERE service_name = ?", (service_name,))
+ conn.commit()
+
+ # rowcount가 0이면 삭제된 행이 없음 (정상적인 경우 발생하기 어려움)
+ if cursor.rowcount == 0:
+ return False
+
+ return cursor.rowcount > 0
+ finally:
+ if conn:
+ conn.close()
+
+
+api_key_repository = APIKeyRepository()
diff --git a/app/repository/chat_message_repository.py b/app/repository/chat_message_repository.py
new file mode 100644
index 0000000..4539b0c
--- /dev/null
+++ b/app/repository/chat_message_repository.py
@@ -0,0 +1,71 @@
+import sqlite3
+
+from app.core.utils import get_db_path
+from app.schemas.chat_message.db_model import ChatMessageInDB
+
+
+class ChatMessageRepository:
+ def create_chat_message(self, new_id: str, sender: str, chat_tab_id: str, message: str) -> ChatMessageInDB:
+ """
+ 새로운 채팅을 데이터베이스에 저장하고, 저장된 객체를 반환합니다.
+ """
+
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute(
+ """
+ INSERT INTO chat_message (id, chat_tab_id, sender, message)
+ VALUES (?, ?, ?, ?)
+ """,
+ (
+ new_id,
+ chat_tab_id,
+ sender,
+ message,
+ ),
+ )
+ conn.commit()
+
+ cursor.execute("SELECT * FROM chat_message WHERE id = ?", (new_id,))
+ created_row = cursor.fetchone()
+
+ if not created_row:
+ return None
+
+ return ChatMessageInDB.model_validate(dict(created_row))
+
+ finally:
+ if conn:
+ conn.close()
+
+ def get_chat_messages_by_tabId(self, id: str) -> list[ChatMessageInDB]:
+ """주어진 chat_tab_id에 해당하는 모든 메시지를 가져옵니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ # chat_message 테이블에서 chat_tab_id에 해당하는 모든 메시지를 조회합니다.'
+ # 메시지가 없을 경우, 빈 리스트를 반환합니다.
+ cursor.execute(
+ "SELECT * FROM chat_message WHERE chat_tab_id = ? ORDER BY created_at ASC",
+ (id,),
+ )
+ rows = cursor.fetchall()
+
+ # 조회된 모든 행을 ChatMessageInDB 객체 리스트로 변환
+ return [ChatMessageInDB.model_validate(dict(row)) for row in rows]
+
+ finally:
+ if conn:
+ conn.close()
+
+
+chat_message_repository = ChatMessageRepository()
diff --git a/app/repository/chat_tab_repository.py b/app/repository/chat_tab_repository.py
new file mode 100644
index 0000000..eb732d6
--- /dev/null
+++ b/app/repository/chat_tab_repository.py
@@ -0,0 +1,141 @@
+import sqlite3
+
+from app.core.utils import get_db_path
+from app.schemas.chat_tab.db_model import ChatTabInDB
+
+
+class ChatTabRepository:
+ def create_chat_tab(self, new_id: str, name: str) -> ChatTabInDB:
+ """
+ 새로운 채팅 탭 이름을 데이터베이스에 저장하고, 저장된 객체를 반환합니다.
+ """
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute(
+ """
+ INSERT INTO chat_tab (id, name)
+ VALUES (?, ?)
+ """,
+ (
+ new_id,
+ name,
+ ),
+ )
+ conn.commit()
+
+ cursor.execute("SELECT * FROM chat_tab WHERE id = ?", (new_id,))
+ created_row = cursor.fetchone()
+
+ if not created_row:
+ return None
+
+ return ChatTabInDB.model_validate(dict(created_row))
+
+ finally:
+ if conn:
+ conn.close()
+
+ def updated_chat_tab(self, id: str, new_name: str | None) -> ChatTabInDB | None:
+ """채팅 탭ID에 해당하는 ChatName를 수정하고, 수정된 객체를 반환합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ # 먼저 해당 서비스의 데이터가 존재하는지 확인
+ cursor.execute("SELECT id FROM chat_tab WHERE id = ?", (id,))
+ if not cursor.fetchone():
+ return None
+
+ # 데이터 업데이트
+ cursor.execute(
+ "UPDATE chat_tab SET name = ?, updated_at = datetime('now', 'localtime') WHERE id = ?",
+ (new_name, id),
+ )
+ conn.commit()
+
+ # rowcount가 0이면 업데이트된 행이 없음 (정상적인 경우 발생하기 어려움)
+ if cursor.rowcount == 0:
+ return None
+
+ cursor.execute("SELECT * FROM chat_tab WHERE id = ?", (id,))
+ updated_row = cursor.fetchone()
+
+ return ChatTabInDB.model_validate(dict(updated_row))
+ finally:
+ if conn:
+ conn.close()
+
+ def delete_chat_tab(self, id: str) -> bool:
+ """채팅 탭ID에 해당하는 ChatTab을 삭제하고, 성공 여부를 반환합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ cursor = conn.cursor()
+
+ # 먼저 해당 서비스의 데이터가 존재하는지 확인
+ cursor.execute("SELECT id FROM chat_tab WHERE id = ?", (id,))
+ if not cursor.fetchone():
+ return False
+
+ # 데이터 삭제
+ cursor.execute("DELETE FROM chat_tab WHERE id = ?", (id,))
+ conn.commit()
+
+ # rowcount가 0이면 삭제된 행이 없음 (정상적인 경우 발생하기 어려움)
+ if cursor.rowcount == 0:
+ return False
+
+ return cursor.rowcount > 0
+ finally:
+ if conn:
+ conn.close()
+
+ def get_all_chat_tab(self) -> list[ChatTabInDB]:
+ """데이터베이스에 저장된 모든 API Key를 조회합니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM chat_tab")
+ rows = cursor.fetchall()
+
+ return [ChatTabInDB.model_validate(dict(row)) for row in rows]
+ finally:
+ if conn:
+ conn.close()
+
+ def get_chat_tab_by_id(self, id: str | None) -> ChatTabInDB | None:
+ """ID에 해당하는 채팅 탭 정보를 가져옵니다."""
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+
+ cursor.execute("SELECT * FROM chat_tab WHERE id = ?", (id,))
+ row = cursor.fetchone()
+
+ if not row:
+ return None
+
+ return ChatTabInDB.model_validate(dict(row))
+
+ finally:
+ if conn:
+ conn.close()
+
+
+chat_tab_repository = ChatTabRepository()
diff --git a/app/repository/query_repository.py b/app/repository/query_repository.py
new file mode 100644
index 0000000..e309e8b
--- /dev/null
+++ b/app/repository/query_repository.py
@@ -0,0 +1,183 @@
+import sqlite3
+from typing import Any
+
+import oracledb
+
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import get_db_path
+from app.schemas.query.result_model import (
+ BasicResult,
+ ExecutionResult,
+ ExecutionSelectResult,
+ InsertLocalDBResult,
+ QueryTestResult,
+ SelectQueryHistoryResult,
+)
+
+
+class QueryRepository:
+ def execution(
+ self,
+ query: str,
+ driver_module: Any,
+ **kwargs: Any,
+ ) -> ExecutionSelectResult | ExecutionResult | BasicResult:
+ """
+ 쿼리 수행합니다.
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ cursor.execute(query)
+
+ if self._is_select_query(query):
+ rows = cursor.fetchall()
+
+ if cursor.description:
+ columns = [desc[0] for desc in cursor.description]
+ data = [dict(zip(columns, row, strict=False)) for row in rows]
+ else:
+ columns = []
+ data = []
+ result = {"columns": columns, "data": data}
+
+ return ExecutionSelectResult(is_successful=True, code=CommonCode.SUCCESS_EXECUTION, data=result)
+
+ connection.commit()
+ return ExecutionResult(is_successful=True, code=CommonCode.SUCCESS_EXECUTION, data=cursor.rowcount)
+ except (AttributeError, driver_module.OperationalError, driver_module.DatabaseError):
+ return BasicResult(is_successful=False, code=CommonCode.FAIL_CONNECT_DB)
+ except Exception:
+ return BasicResult(is_successful=False, code=CommonCode.FAIL)
+ finally:
+ if connection:
+ connection.close()
+
+ def execution_test(
+ self,
+ query: str,
+ driver_module: Any,
+ **kwargs: Any,
+ ) -> QueryTestResult:
+ """
+ 쿼리가 문법적으로 유효한지 테스트합니다.
+ 실제 데이터는 변경되지 않습니다. (모든 작업은 롤백됩니다).
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+ cursor.execute(query)
+
+ if not self._is_select_query(query):
+ return QueryTestResult(is_successful=True, code=CommonCode.SUCCESS_EXECUTION_TEST, data=True)
+
+ rows = cursor.fetchall()
+ if cursor.description:
+ columns = [desc[0] for desc in cursor.description]
+ data = [dict(zip(columns, row, strict=False)) for row in rows]
+ else:
+ columns = []
+ data = []
+
+ result = {"columns": columns, "data": data}
+ return QueryTestResult(is_successful=True, code=CommonCode.SUCCESS_EXECUTION, data=result)
+ except (AttributeError, driver_module.OperationalError, driver_module.DatabaseError) as e:
+ return QueryTestResult(is_successful=False, code=CommonCode.FAIL_CONNECT_DB, data=str(e))
+ except Exception as e:
+ return QueryTestResult(is_successful=False, code=CommonCode.FAIL, data=str(e))
+ finally:
+ if connection:
+ connection.rollback()
+ connection.close()
+
+ def create_query_history(
+ self,
+ sql: str,
+ data: tuple,
+ query: str,
+ ) -> InsertLocalDBResult:
+ """
+ 쿼리 실행 결과를 저장합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ cursor = connection.cursor()
+ cursor.execute(sql, data)
+ connection.commit()
+
+ return ExecutionResult(is_successful=True, code=CommonCode.SUCCESS_EXECUTION, data=query)
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL_CONNECT_DB) from e
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_CREATE_QUERY) from e
+ finally:
+ if connection:
+ connection.close()
+
+ def find_query_history(self, chat_tab_id: int) -> SelectQueryHistoryResult:
+ """
+ 전달받은 쿼리를 실행하여 모든 DB 연결 정보를 조회합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ connection.row_factory = sqlite3.Row
+ cursor = connection.cursor()
+
+ sql = """
+ SELECT qh.*
+ FROM query_history AS qh
+ LEFT JOIN chat_message AS cm ON qh.chat_message_id = cm.id
+ WHERE cm.chat_tab_id = ?
+ ORDER BY qh.created_at DESC
+ LIMIT 5;
+ """
+ data = (chat_tab_id,)
+
+ cursor.execute(sql, data)
+ rows = cursor.fetchall()
+
+ columns = [desc[0] for desc in cursor.description]
+ data = [dict(zip(columns, row, strict=False)) for row in rows]
+ result = {"columns": columns, "data": data}
+
+ return SelectQueryHistoryResult(is_successful=True, code=CommonCode.SUCCESS_FIND_QUERY_HISTORY, data=result)
+ except sqlite3.Error:
+ return SelectQueryHistoryResult(is_successful=False, code=CommonCode.FAIL_CONNECT_DB)
+ except Exception:
+ return SelectQueryHistoryResult(is_successful=False, code=CommonCode.FAIL)
+ finally:
+ if connection:
+ connection.close()
+
+ # ─────────────────────────────
+ # DB 연결 메서드
+ # ─────────────────────────────
+ def _connect(self, driver_module: Any, **kwargs):
+ if driver_module is oracledb:
+ if kwargs.get("user", "").lower() == "sys":
+ kwargs["mode"] = oracledb.AUTH_MODE_SYSDBA
+ return driver_module.connect(**kwargs)
+ elif "connection_string" in kwargs:
+ return driver_module.connect(kwargs["connection_string"])
+ elif "db_name" in kwargs:
+ return driver_module.connect(kwargs["db_name"])
+ else:
+ return driver_module.connect(**kwargs)
+
+ def _is_select_query(self, query_text: str) -> bool:
+ for stmt in query_text.split(";"):
+ cleaned_stmt = stmt.strip().lower()
+ if cleaned_stmt and not cleaned_stmt.startswith("--") and cleaned_stmt.startswith("select"):
+ return True
+ return False
+
+
+query_repository = QueryRepository()
diff --git a/app/repository/user_db_repository.py b/app/repository/user_db_repository.py
new file mode 100644
index 0000000..d30d7c7
--- /dev/null
+++ b/app/repository/user_db_repository.py
@@ -0,0 +1,602 @@
+import sqlite3
+from typing import Any
+
+import oracledb
+
+from app.core.enum.db_driver import DBTypesEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import get_db_path
+from app.schemas.user_db.db_profile_model import AllDBProfileInfo, UpdateOrCreateDBProfile
+from app.schemas.user_db.result_model import (
+ AllDBProfileResult,
+ BasicResult,
+ ChangeProfileResult,
+ ColumnInfo,
+ ColumnListResult,
+ ConstraintInfo,
+ DBProfile,
+ IndexInfo,
+ SchemaListResult,
+ TableListResult,
+)
+
+
+class UserDbRepository:
+ def connection_test(self, driver_module: Any, **kwargs: Any) -> BasicResult:
+ """
+ DB 드라이버와 연결에 필요한 매개변수들을 받아 연결을 테스트합니다.
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ return BasicResult(is_successful=True, code=CommonCode.SUCCESS_USER_DB_CONNECT_TEST)
+ except (AttributeError, driver_module.OperationalError, driver_module.DatabaseError):
+ return BasicResult(is_successful=False, code=CommonCode.FAIL_CONNECT_DB)
+ except Exception:
+ return BasicResult(is_successful=False, code=CommonCode.FAIL)
+ finally:
+ if connection:
+ connection.close()
+
+ def create_profile(self, sql: str, data: tuple, create_db_info: UpdateOrCreateDBProfile) -> ChangeProfileResult:
+ """
+ DB 드라이버와 연결에 필요한 매개변수들을 받아 저장합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ cursor = connection.cursor()
+ cursor.execute(sql, data)
+ connection.commit()
+ name = create_db_info.view_name if create_db_info.view_name else create_db_info.type
+ return ChangeProfileResult(is_successful=True, code=CommonCode.SUCCESS_SAVE_PROFILE, view_name=name)
+ except sqlite3.Error:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_SAVE_PROFILE)
+ except Exception:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_SAVE_PROFILE)
+ finally:
+ if connection:
+ connection.close()
+
+ def update_profile(self, sql: str, data: tuple, update_db_info: UpdateOrCreateDBProfile) -> ChangeProfileResult:
+ """
+ DB 드라이버와 연결에 필요한 매개변수들을 받아 업데이트합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ cursor = connection.cursor()
+ cursor.execute(sql, data)
+ connection.commit()
+ name = update_db_info.view_name if update_db_info.view_name else update_db_info.type
+ return ChangeProfileResult(is_successful=True, code=CommonCode.SUCCESS_UPDATE_PROFILE, view_name=name)
+ except sqlite3.Error:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_UPDATE_PROFILE)
+ except Exception:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_UPDATE_PROFILE)
+ finally:
+ if connection:
+ connection.close()
+
+ def delete_profile(
+ self,
+ sql: str,
+ data: tuple,
+ profile_id: str,
+ ) -> ChangeProfileResult:
+ """
+ DB 드라이버와 연결에 필요한 매개변수들을 받아 삭제합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ cursor = connection.cursor()
+ cursor.execute(sql, data)
+ connection.commit()
+ return ChangeProfileResult(is_successful=True, code=CommonCode.SUCCESS_DELETE_PROFILE, view_name=profile_id)
+ except sqlite3.Error:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_DELETE_PROFILE)
+ except Exception:
+ return ChangeProfileResult(is_successful=False, code=CommonCode.FAIL_DELETE_PROFILE)
+ finally:
+ if connection:
+ connection.close()
+
+ def find_all_profile(self, sql: str) -> AllDBProfileResult:
+ """
+ 전달받은 쿼리를 실행하여 모든 DB 연결 정보를 조회합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ connection.row_factory = sqlite3.Row
+ cursor = connection.cursor()
+ cursor.execute(sql)
+ rows = cursor.fetchall()
+ profiles = [DBProfile(**row) for row in rows]
+ return AllDBProfileResult(is_successful=True, code=CommonCode.SUCCESS_FIND_PROFILE, profiles=profiles)
+ except sqlite3.Error:
+ return AllDBProfileResult(is_successful=False, code=CommonCode.FAIL_FIND_PROFILE)
+ except Exception:
+ return AllDBProfileResult(is_successful=False, code=CommonCode.FAIL_FIND_PROFILE)
+ finally:
+ if connection:
+ connection.close()
+
+ def find_profile(self, sql: str, data: tuple) -> AllDBProfileInfo:
+ """
+ 전달받은 쿼리를 실행하여 특정 DB 연결 정보를 조회합니다.
+ """
+ db_path = get_db_path()
+ connection = None
+ try:
+ connection = sqlite3.connect(db_path)
+ connection.row_factory = sqlite3.Row
+ cursor = connection.cursor()
+ cursor.execute(sql, data)
+ row = cursor.fetchone()
+
+ if not row:
+ raise APIException(CommonCode.NO_DB_PROFILE_FOUND)
+ return AllDBProfileInfo(**dict(row))
+ except APIException:
+ raise
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL_FIND_PROFILE) from e
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+ finally:
+ if connection:
+ connection.close()
+
+ # ─────────────────────────────
+ # 스키마 조회
+ # ─────────────────────────────
+ def find_schemas(self, driver_module: Any, schema_query: str, **kwargs: Any) -> SchemaListResult:
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if not schema_query:
+ return SchemaListResult(is_successful=True, code=CommonCode.SUCCESS_FIND_SCHEMAS, schemas=["main"])
+
+ cursor.execute(schema_query)
+ schemas = [row[0] for row in cursor.fetchall()]
+
+ return SchemaListResult(is_successful=True, code=CommonCode.SUCCESS_FIND_SCHEMAS, schemas=schemas)
+ except Exception:
+ return SchemaListResult(is_successful=False, code=CommonCode.FAIL_FIND_SCHEMAS, schemas=[])
+ finally:
+ if connection:
+ connection.close()
+
+ # ─────────────────────────────
+ # 테이블 조회
+ # ─────────────────────────────
+ def find_tables(self, driver_module: Any, table_query: str, schema_name: str, **kwargs: Any) -> TableListResult:
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if "%s" in table_query or "?" in table_query:
+ cursor.execute(table_query, (schema_name,))
+ elif ":owner" in table_query:
+ cursor.execute(table_query, {"owner": schema_name})
+ else:
+ cursor.execute(table_query)
+
+ tables = [row[0] for row in cursor.fetchall()]
+
+ return TableListResult(is_successful=True, code=CommonCode.SUCCESS_FIND_TABLES, tables=tables)
+ except Exception:
+ return TableListResult(is_successful=False, code=CommonCode.FAIL_FIND_TABLES, tables=[])
+ finally:
+ if connection:
+ connection.close()
+
+ # ─────────────────────────────
+ # 컬럼 조회
+ # ─────────────────────────────
+ def find_columns(
+ self, driver_module: Any, column_query: str, schema_name: str, db_type, table_name: str, **kwargs: Any
+ ) -> ColumnListResult:
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if db_type == DBTypesEnum.sqlite.name:
+ columns = self._find_columns_for_sqlite(cursor, table_name)
+ elif db_type == DBTypesEnum.postgresql.name:
+ columns = self._find_columns_for_postgresql(cursor, schema_name, table_name)
+ else:
+ columns = self._find_columns_for_general(cursor, column_query, schema_name, table_name)
+
+ return ColumnListResult(is_successful=True, code=CommonCode.SUCCESS_FIND_COLUMNS, columns=columns)
+ except Exception:
+ return ColumnListResult(is_successful=False, code=CommonCode.FAIL_FIND_COLUMNS, columns=[])
+ finally:
+ if connection:
+ connection.close()
+
+ def _find_columns_for_sqlite(self, cursor: Any, table_name: str) -> list[ColumnInfo]:
+ pragma_sql = f"PRAGMA table_info('{table_name}')"
+ cursor.execute(pragma_sql)
+ columns_raw = cursor.fetchall()
+ # SQLite는 pragma에서 순서(cid)를 반환하지만, ordinal_position은 1부터 시작하는 표준이므로 +1
+ return [
+ ColumnInfo(
+ name=c[1],
+ type=c[2],
+ nullable=(c[3] == 0),
+ default=c[4],
+ comment=None,
+ is_pk=(c[5] == 1),
+ ordinal_position=c[0] + 1,
+ )
+ for c in columns_raw
+ ]
+
+ def _find_columns_for_postgresql(self, cursor: Any, schema_name: str, table_name: str) -> list[ColumnInfo]:
+ sql = """
+ SELECT
+ c.column_name,
+ c.udt_name,
+ c.is_nullable,
+ c.column_default,
+ c.ordinal_position,
+ (SELECT pg_catalog.col_description(cls.oid, c.dtd_identifier::int)
+ FROM pg_catalog.pg_class cls
+ JOIN pg_catalog.pg_namespace n ON n.oid = cls.relnamespace
+ WHERE cls.relname = c.table_name AND n.nspname = c.table_schema) as comment,
+ CASE WHEN kcu.column_name IS NOT NULL THEN TRUE ELSE FALSE END as is_pk
+ FROM
+ information_schema.columns c
+ LEFT JOIN information_schema.key_column_usage kcu
+ ON c.table_schema = kcu.table_schema
+ AND c.table_name = kcu.table_name
+ AND c.column_name = kcu.column_name
+ AND kcu.constraint_name IN (
+ SELECT constraint_name
+ FROM information_schema.table_constraints
+ WHERE table_schema = %s
+ AND table_name = %s
+ AND constraint_type = 'PRIMARY KEY'
+ )
+ WHERE
+ c.table_schema = %s AND c.table_name = %s
+ ORDER BY
+ c.ordinal_position;
+ """
+ cursor.execute(sql, (schema_name, table_name, schema_name, table_name))
+ columns_raw = cursor.fetchall()
+ return [
+ ColumnInfo(
+ name=c[0],
+ type=c[1],
+ nullable=(c[2] == "YES"),
+ default=c[3],
+ ordinal_position=c[4],
+ comment=c[5],
+ is_pk=c[6],
+ )
+ for c in columns_raw
+ ]
+
+ def _find_columns_for_general(
+ self, cursor: Any, column_query: str, schema_name: str, table_name: str
+ ) -> list[ColumnInfo]:
+ if "%s" in column_query or "?" in column_query:
+ cursor.execute(column_query, (schema_name, table_name))
+ elif ":owner" in column_query and ":table" in column_query:
+ owner_bind = schema_name.upper() if schema_name else schema_name
+ table_bind = table_name.upper() if table_name else table_name
+ try:
+ cursor.execute(column_query, {"owner": owner_bind, "table": table_bind})
+ except Exception:
+ try:
+ pos_query = column_query.replace(":owner", ":1").replace(":table", ":2")
+ cursor.execute(pos_query, [owner_bind, table_bind])
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+ else:
+ cursor.execute(column_query)
+
+ columns = []
+ for c in cursor.fetchall():
+ data_type = c[1]
+ if c[6] is not None:
+ data_type = f"{data_type}({c[6]})"
+ elif c[7] is not None and c[8] is not None:
+ data_type = f"{data_type}({c[7]}, {c[8]})"
+
+ columns.append(
+ ColumnInfo(
+ name=c[0],
+ type=data_type,
+ nullable=(c[2] in ["YES", "Y", True]),
+ default=c[3],
+ comment=c[4] if len(c) > 4 else None,
+ is_pk=(c[5] in ["PRI", True] if len(c) > 5 else False),
+ )
+ )
+ return columns
+
+ def find_constraints(
+ self, driver_module: Any, db_type: str, schema_name: str, table_name: str, **kwargs: Any
+ ) -> list[ConstraintInfo]:
+ """
+ 테이블의 제약 조건 정보를 조회합니다.
+ - 현재는 SQLite, PostgreSQL만 지원합니다.
+ - 실패 시 DB 드라이버의 예외를 직접 발생시킵니다.
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if db_type == DBTypesEnum.sqlite.name:
+ return self._find_constraints_for_sqlite(cursor, table_name)
+ elif db_type == DBTypesEnum.postgresql.name:
+ return self._find_constraints_for_postgresql(cursor, schema_name, table_name)
+ # elif db_type == ...:
+ return []
+ finally:
+ if connection:
+ connection.close()
+
+ def _find_constraints_for_sqlite(self, cursor: Any, table_name: str) -> list[ConstraintInfo]:
+ constraints = []
+ fk_list_sql = f"PRAGMA foreign_key_list('{table_name}')"
+ cursor.execute(fk_list_sql)
+ fks = cursor.fetchall()
+
+ # Foreign Key 정보를 그룹화
+ fk_groups = {}
+ for fk in fks:
+ fk_id = fk[0]
+ if fk_id not in fk_groups:
+ fk_groups[fk_id] = {"referenced_table": fk[2], "columns": [], "referenced_columns": []}
+ fk_groups[fk_id]["columns"].append(fk[3])
+ fk_groups[fk_id]["referenced_columns"].append(fk[4])
+
+ for _, group in fk_groups.items():
+ constraints.append(
+ ConstraintInfo(
+ name=f"fk_{table_name}_{'_'.join(group['columns'])}",
+ type="FOREIGN KEY",
+ columns=group["columns"],
+ referenced_table=group["referenced_table"],
+ referenced_columns=group["referenced_columns"],
+ )
+ )
+ return constraints
+
+ def _find_constraints_for_postgresql(self, cursor: Any, schema_name: str, table_name: str) -> list[ConstraintInfo]:
+ sql = """
+ SELECT
+ tc.constraint_name,
+ tc.constraint_type,
+ kcu.column_name,
+ rc.update_rule,
+ rc.delete_rule,
+ ccu.table_name AS foreign_table_name,
+ ccu.column_name AS foreign_column_name,
+ chk.check_clause
+ FROM
+ information_schema.table_constraints tc
+ LEFT JOIN information_schema.key_column_usage kcu
+ ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema AND tc.table_name = kcu.table_name
+ LEFT JOIN information_schema.referential_constraints rc
+ ON tc.constraint_name = rc.constraint_name AND tc.table_schema = rc.constraint_schema
+ LEFT JOIN information_schema.constraint_column_usage ccu
+ ON rc.unique_constraint_name = ccu.constraint_name AND rc.unique_constraint_schema = ccu.table_schema
+ LEFT JOIN information_schema.check_constraints chk
+ ON tc.constraint_name = chk.constraint_name AND tc.table_schema = chk.constraint_schema
+ WHERE
+ tc.table_schema = %s AND tc.table_name = %s;
+ """
+ cursor.execute(sql, (schema_name, table_name))
+ raw_constraints = cursor.fetchall()
+
+ constraint_map = {}
+ for row in raw_constraints:
+ # Filter out 'NOT NULL' constraints which are handled by `is_nullable` in column info
+ const_type = row[1]
+ check_clause = row[7]
+ if const_type == "CHECK" and check_clause and "IS NOT NULL" in check_clause.upper():
+ continue
+
+ (name, _, column, on_update, on_delete, ref_table, ref_column, check_expr) = row
+ if name not in constraint_map:
+ constraint_map[name] = {
+ "type": const_type,
+ "columns": [],
+ "referenced_table": ref_table,
+ "referenced_columns": [],
+ "check_expression": check_expr,
+ "on_update": on_update,
+ "on_delete": on_delete,
+ }
+ if column and column not in constraint_map[name]["columns"]:
+ constraint_map[name]["columns"].append(column)
+ if ref_column and ref_column not in constraint_map[name]["referenced_columns"]:
+ constraint_map[name]["referenced_columns"].append(ref_column)
+
+ return [
+ ConstraintInfo(
+ name=name,
+ type=data["type"],
+ columns=data["columns"],
+ referenced_table=data["referenced_table"],
+ referenced_columns=data["referenced_columns"] if data["referenced_columns"] else None,
+ check_expression=data["check_expression"],
+ on_update=data["on_update"],
+ on_delete=data["on_delete"],
+ )
+ for name, data in constraint_map.items()
+ ]
+
+ def find_indexes(
+ self, driver_module: Any, db_type: str, schema_name: str, table_name: str, **kwargs: Any
+ ) -> list[IndexInfo]:
+ """
+ 테이블의 인덱스 정보를 조회합니다.
+ - 실패 시 DB 드라이버의 예외를 직접 발생시킵니다.
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if db_type == DBTypesEnum.sqlite.name:
+ return self._find_indexes_for_sqlite(cursor, table_name)
+ elif db_type == DBTypesEnum.postgresql.name:
+ return self._find_indexes_for_postgresql(cursor, schema_name, table_name)
+ # elif db_type == ...:
+ return []
+ finally:
+ if connection:
+ connection.close()
+
+ def _find_indexes_for_sqlite(self, cursor: Any, table_name: str) -> list[IndexInfo]:
+ indexes = []
+ index_list_sql = f"PRAGMA index_list('{table_name}')"
+ cursor.execute(index_list_sql)
+ raw_indexes = cursor.fetchall()
+
+ for idx in raw_indexes:
+ index_name = idx[1]
+ is_unique = idx[2] == 1
+
+ # "sqlite_autoindex_"로 시작하는 인덱스는 PK에 의해 자동 생성된 것이므로 제외
+ if index_name.startswith("sqlite_autoindex_"):
+ continue
+
+ index_info_sql = f"PRAGMA index_info('{index_name}')"
+ cursor.execute(index_info_sql)
+ index_columns = [row[2] for row in cursor.fetchall()]
+
+ if index_columns:
+ indexes.append(IndexInfo(name=index_name, columns=index_columns, is_unique=is_unique))
+ return indexes
+
+ def _find_indexes_for_postgresql(self, cursor: Any, schema_name: str, table_name: str) -> list[IndexInfo]:
+ sql = """
+ SELECT
+ i.relname as index_name,
+ a.attname as column_name,
+ ix.indisunique as is_unique,
+ ix.indisprimary as is_primary
+ FROM
+ pg_class t,
+ pg_class i,
+ pg_index ix,
+ pg_attribute a,
+ pg_namespace n
+ WHERE
+ t.oid = ix.indrelid
+ and i.oid = ix.indexrelid
+ and a.attrelid = t.oid
+ and a.attnum = ANY(ix.indkey)
+ and t.relkind = 'r'
+ and n.oid = t.relnamespace
+ and n.nspname = %s
+ and t.relname = %s
+ ORDER BY
+ i.relname, a.attnum;
+ """
+ cursor.execute(sql, (schema_name, table_name))
+ raw_indexes = cursor.fetchall()
+
+ index_map = {}
+ for row in raw_indexes:
+ index_name, column_name, is_unique, is_primary = row
+ if is_primary: # Exclude indexes created for PRIMARY KEY constraints
+ continue
+ if index_name not in index_map:
+ index_map[index_name] = {"columns": [], "is_unique": is_unique}
+ index_map[index_name]["columns"].append(column_name)
+
+ return [
+ IndexInfo(name=name, columns=data["columns"], is_unique=data["is_unique"])
+ for name, data in index_map.items()
+ ]
+
+ def find_sample_rows(
+ self, driver_module: Any, db_type: str, schema_name: str, table_names: list[str], **kwargs: Any
+ ) -> dict[str, list[dict[str, Any]]]:
+ """
+ 주어진 테이블 목록에 대해 상위 3개의 샘플 행을 조회합니다.
+ - 실패 시 DB 드라이버의 예외를 직접 발생시킵니다.
+ """
+ connection = None
+ try:
+ connection = self._connect(driver_module, **kwargs)
+ cursor = connection.cursor()
+
+ if db_type == DBTypesEnum.sqlite.name:
+ return self._find_sample_rows_for_sqlite(cursor, table_names)
+ elif db_type == DBTypesEnum.postgresql.name:
+ return self._find_sample_rows_for_postgresql(cursor, schema_name, table_names)
+ # elif db_type == ...:
+ return {table_name: [] for table_name in table_names}
+ finally:
+ if connection:
+ connection.close()
+
+ def _find_sample_rows_for_sqlite(self, cursor: Any, table_names: list[str]) -> dict[str, list[dict[str, Any]]]:
+ sample_rows_map = {}
+ for table_name in table_names:
+ try:
+ # 컬럼명 조회를 위해 PRAGMA 사용
+ cursor.execute(f"PRAGMA table_info('{table_name}')")
+ columns = [row[1] for row in cursor.fetchall()]
+
+ # 데이터 조회
+ cursor.execute(f'SELECT * FROM "{table_name}" LIMIT 3')
+ rows = cursor.fetchall()
+ sample_rows_map[table_name] = [dict(zip(columns, row, strict=False)) for row in rows]
+ except Exception:
+ sample_rows_map[table_name] = [] # 오류 발생 시 빈 리스트 할당
+ return sample_rows_map
+
+ def _find_sample_rows_for_postgresql(
+ self, cursor: Any, schema_name: str, table_names: list[str]
+ ) -> dict[str, list[dict[str, Any]]]:
+ sample_rows_map = {}
+ for table_name in table_names:
+ try:
+ # PostgreSQL은 cursor.description을 통해 컬럼명을 바로 얻을 수 있음
+ cursor.execute(f'SELECT * FROM "{schema_name}"."{table_name}" LIMIT 3')
+ columns = [desc[0] for desc in cursor.description]
+ rows = cursor.fetchall()
+ sample_rows_map[table_name] = [dict(zip(columns, row, strict=False)) for row in rows]
+ except Exception:
+ sample_rows_map[table_name] = []
+ return sample_rows_map
+
+ # ─────────────────────────────
+ # DB 연결 메서드
+ # ─────────────────────────────
+ def _connect(self, driver_module: Any, **kwargs):
+ if driver_module is oracledb:
+ if kwargs.get("user", "").lower() == "sys":
+ kwargs["mode"] = oracledb.AUTH_MODE_SYSDBA
+ return driver_module.connect(**kwargs)
+ elif "connection_string" in kwargs:
+ return driver_module.connect(kwargs["connection_string"])
+ elif "db_name" in kwargs:
+ return driver_module.connect(kwargs["db_name"])
+ else:
+ return driver_module.connect(**kwargs)
+
+
+user_db_repository = UserDbRepository()
diff --git a/app/schemas/annotation/ai_model.py b/app/schemas/annotation/ai_model.py
new file mode 100644
index 0000000..f902699
--- /dev/null
+++ b/app/schemas/annotation/ai_model.py
@@ -0,0 +1,64 @@
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+
+class AIColumnInfo(BaseModel):
+ """AI 요청을 위한 컬럼 정보 모델"""
+
+ column_name: str = Field(..., description="컬럼 이름")
+ data_type: str = Field(..., description="데이터 타입")
+ is_pk: bool = Field(False, description="기본 키(Primary Key) 여부")
+ is_nullable: bool = Field(..., description="NULL 허용 여부")
+ default_value: Any | None = Field(None, description="기본값")
+
+
+class AIConstraintInfo(BaseModel):
+ """AI 요청을 위한 제약 조건 정보 모델 (FK 제외)"""
+
+ name: str | None = Field(None, description="제약 조건 이름")
+ type: str = Field(..., description="제약 조건 타입 (PRIMARY KEY, UNIQUE, CHECK)")
+ columns: list[str] = Field(..., description="제약 조건에 포함된 컬럼 목록")
+ check_expression: str | None = Field(None, description="CHECK 제약 조건 표현식")
+
+
+class AIIndexInfo(BaseModel):
+ """AI 요청을 위한 인덱스 정보 모델"""
+
+ name: str | None = Field(None, description="인덱스 이름")
+ columns: list[str] = Field(..., description="인덱스에 포함된 컬럼 목록")
+ is_unique: bool = Field(False, description="고유 인덱스 여부")
+
+
+class AITableInfo(BaseModel):
+ """AI 요청을 위한 테이블 정보 모델"""
+
+ table_name: str = Field(..., description="테이블 이름")
+ columns: list[AIColumnInfo] = Field(..., description="컬럼 목록")
+ constraints: list[AIConstraintInfo] = Field([], description="제약 조건 목록 (FK 제외)")
+ indexes: list[AIIndexInfo] = Field([], description="인덱스 목록")
+ sample_rows: list[dict[str, Any]] = Field([], description="테이블 샘플 데이터")
+
+
+class AIRelationship(BaseModel):
+ """AI 요청을 위한 관계(FK) 정보 모델"""
+
+ from_table: str = Field(..., description="관계를 시작하는 테이블")
+ from_columns: list[str] = Field(..., description="관계를 시작하는 컬럼")
+ to_table: str = Field(..., description="관계를 맺는 대상 테이블")
+ to_columns: list[str] = Field(..., description="관계를 맺는 대상 컬럼")
+
+
+class AIDatabaseInfo(BaseModel):
+ """AI 요청을 위한 데이터베이스 정보 모델"""
+
+ database_name: str = Field(..., description="데이터베이스 이름")
+ tables: list[AITableInfo] = Field(..., description="테이블 목록")
+ relationships: list[AIRelationship] = Field([], description="관계(FK) 목록")
+
+
+class AIAnnotationRequest(BaseModel):
+ """AI 어노테이션 생성 요청 최상위 모델"""
+
+ dbms_type: str = Field(..., description="DBMS 종류")
+ databases: list[AIDatabaseInfo] = Field(..., description="데이터베이스 목록")
diff --git a/app/schemas/annotation/base_model.py b/app/schemas/annotation/base_model.py
new file mode 100644
index 0000000..970ea5e
--- /dev/null
+++ b/app/schemas/annotation/base_model.py
@@ -0,0 +1,25 @@
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+
+
+class AnnotationBase(BaseModel):
+ """어노테이션 스키마의 기본 모델"""
+
+ id: str = Field(..., description="고유 ID")
+ created_at: datetime = Field(..., description="생성 시각")
+ updated_at: datetime = Field(..., description="마지막 수정 시각")
+
+
+class RequestBase(BaseModel):
+ """요청 스키마의 기본 모델"""
+
+ def validate_required_fields(self, fields: list[str]):
+ """필수 필드가 비어있는지 검사하는 공통 유효성 검사 메서드"""
+ for field_name in fields:
+ value = getattr(self, field_name, None)
+ if not value or (isinstance(value, str) and not value.strip()):
+ raise APIException(CommonCode.INVALID_PARAMETER, detail=f"'{field_name}' 필드는 비워둘 수 없습니다.")
diff --git a/app/schemas/annotation/db_model.py b/app/schemas/annotation/db_model.py
new file mode 100644
index 0000000..8a73aa9
--- /dev/null
+++ b/app/schemas/annotation/db_model.py
@@ -0,0 +1,65 @@
+from pydantic import Field
+
+from app.core.enum.constraint_type import ConstraintTypeEnum
+from app.schemas.annotation.base_model import AnnotationBase
+
+
+class DatabaseAnnotationInDB(AnnotationBase):
+ db_profile_id: str
+ database_name: str
+ description: str | None = Field(None, description="AI가 생성한 설명")
+
+
+class TableAnnotationInDB(AnnotationBase):
+ database_annotation_id: str
+ table_name: str
+ description: str | None = Field(None, description="AI가 생성한 설명")
+
+
+class ColumnAnnotationInDB(AnnotationBase):
+ table_annotation_id: str
+ column_name: str
+ data_type: str | None = None
+ is_nullable: int = 1
+ default_value: str | None = None
+ check_expression: str | None = None
+ ordinal_position: int | None = None
+ description: str | None = Field(None, description="AI가 생성한 설명")
+
+
+class TableRelationshipInDB(AnnotationBase):
+ database_annotation_id: str
+ from_table_id: str
+ to_table_id: str
+ relationship_type: str
+ description: str | None = Field(None, description="AI가 생성한 설명")
+
+
+class TableConstraintInDB(AnnotationBase):
+ table_annotation_id: str
+ constraint_type: ConstraintTypeEnum
+ name: str | None = None
+ description: str | None = None
+ expression: str | None = None
+ ref_table: str | None = None
+ on_update_action: str | None = None
+ on_delete_action: str | None = None
+
+
+class ConstraintColumnInDB(AnnotationBase):
+ constraint_id: str
+ column_annotation_id: str
+ position: int | None = None
+ referenced_column_name: str | None = None
+
+
+class IndexAnnotationInDB(AnnotationBase):
+ table_annotation_id: str
+ name: str | None = None
+ is_unique: int = 0
+
+
+class IndexColumnInDB(AnnotationBase):
+ index_id: str
+ column_annotation_id: str
+ position: int | None = None
diff --git a/app/schemas/annotation/request_model.py b/app/schemas/annotation/request_model.py
new file mode 100644
index 0000000..1c5c40e
--- /dev/null
+++ b/app/schemas/annotation/request_model.py
@@ -0,0 +1,12 @@
+from pydantic import Field
+
+from app.schemas.annotation.base_model import RequestBase
+
+
+class AnnotationCreateRequest(RequestBase):
+ """어노테이션 생성 요청 스키마"""
+
+ db_profile_id: str = Field(..., description="어노테이션을 생성할 DB 프로필의 고유 ID")
+
+ def validate(self):
+ self.validate_required_fields(["db_profile_id"])
diff --git a/app/schemas/annotation/response_model.py b/app/schemas/annotation/response_model.py
new file mode 100644
index 0000000..5ec4556
--- /dev/null
+++ b/app/schemas/annotation/response_model.py
@@ -0,0 +1,64 @@
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+from app.schemas.annotation.base_model import AnnotationBase
+
+
+# 상세 정보 모델 (조회 시 사용)
+class ColumnAnnotationDetail(BaseModel):
+ id: str
+ column_name: str
+ description: str | None = None
+ data_type: str | None = None
+ is_nullable: bool | None = None
+ default_value: str | None = None
+
+
+class ConstraintDetail(BaseModel):
+ name: str | None = None
+ type: str
+ columns: list[str]
+ description: str | None = None # AI가 생성해줄 수 있음
+
+
+class IndexDetail(BaseModel):
+ name: str | None = None
+ columns: list[str]
+ is_unique: bool
+ description: str | None = None # AI가 생성해줄 수 있음
+
+
+class TableAnnotationDetail(AnnotationBase):
+ table_name: str
+ description: str | None = None
+ columns: list[ColumnAnnotationDetail]
+ constraints: list[ConstraintDetail]
+ indexes: list[IndexDetail]
+
+
+class FullAnnotationResponse(AnnotationBase):
+ """전체 어노테이션 상세 정보 응답 스키마"""
+
+ db_profile_id: str = Field(..., description="DB 프로필의 고유 ID")
+ database_name: str = Field(..., description="데이터베이스 이름")
+ description: str | None = Field(None, description="데이터베이스 전체에 대한 설명")
+ tables: list[TableAnnotationDetail] = Field([], description="테이블 어노테이션 목록")
+
+
+# 간단한 생성/삭제 응답 모델
+# 필요할지는 모르겠음
+class AnnotationCreationSummary(BaseModel):
+ """어노테이션 생성 결과 요약 응답 스키마"""
+
+ id: str = Field(..., description="생성된 어노테이션의 고유 ID")
+ db_profile_id: str = Field(..., description="DB 프로필의 고유 ID")
+ database_name: str = Field(..., description="데이터베이스 이름")
+ created_at: datetime = Field(..., description="어노테이션 생성 시각")
+
+
+class AnnotationDeleteResponse(BaseModel):
+ """어노테이션 삭제 API 응답 스키마"""
+
+ id: str = Field(..., description="삭제된 어노테이션의 고유 ID")
+ message: str = Field("성공적으로 삭제되었습니다.", description="삭제 결과 메시지")
diff --git a/app/schemas/api_key/base_model.py b/app/schemas/api_key/base_model.py
new file mode 100644
index 0000000..bb2c3f0
--- /dev/null
+++ b/app/schemas/api_key/base_model.py
@@ -0,0 +1,9 @@
+from pydantic import BaseModel, Field
+
+from app.core.enum.llm_service_info import LLMServiceEnum
+
+
+class APIKeyBase(BaseModel):
+ """API Key 도메인의 모든 스키마가 상속하는 기본 모델"""
+
+ service_name: LLMServiceEnum = Field(..., description="외부 서비스 이름")
diff --git a/app/schemas/api_key/create_model.py b/app/schemas/api_key/create_model.py
new file mode 100644
index 0000000..c53e3de
--- /dev/null
+++ b/app/schemas/api_key/create_model.py
@@ -0,0 +1,27 @@
+from pydantic import Field
+
+from app.core.enum.llm_service_info import LLMServiceEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.schemas.api_key.base_model import APIKeyBase
+
+
+class APIKeyCreate(APIKeyBase):
+ """API Key 생성을 위한 스키마"""
+
+ api_key: str = Field(..., description="암호화하여 저장할 실제 API Key")
+
+ def validate_with_service(self) -> None:
+ """서비스 종류에 따라 API Key의 유효성을 검증합니다."""
+ # 1. 기본 형식 검증 (공백 또는 빈 문자열)
+ if not self.api_key or self.api_key.isspace():
+ raise APIException(CommonCode.INVALID_API_KEY_FORMAT)
+
+ # 2. 서비스별 접두사 검증
+ key_prefix_map = {
+ LLMServiceEnum.OPENAI: "sk-",
+ }
+ required_prefix = key_prefix_map.get(self.service_name)
+
+ if required_prefix and not self.api_key.startswith(required_prefix):
+ raise APIException(CommonCode.INVALID_API_KEY_PREFIX)
diff --git a/app/schemas/api_key/db_model.py b/app/schemas/api_key/db_model.py
new file mode 100644
index 0000000..251c95c
--- /dev/null
+++ b/app/schemas/api_key/db_model.py
@@ -0,0 +1,15 @@
+from datetime import datetime
+
+from app.schemas.api_key.base_model import APIKeyBase
+
+
+class APIKeyInDB(APIKeyBase):
+ """데이터베이스에 저장된 형태의 스키마 (내부용)"""
+
+ id: str
+ api_key: str # DB 모델에서는 암호화된 키를 의미
+ created_at: datetime
+ updated_at: datetime
+
+ class Config:
+ from_attributes = True
diff --git a/app/schemas/api_key/decrypted_response_model.py b/app/schemas/api_key/decrypted_response_model.py
new file mode 100644
index 0000000..65c730e
--- /dev/null
+++ b/app/schemas/api_key/decrypted_response_model.py
@@ -0,0 +1,7 @@
+from pydantic import BaseModel, Field
+
+
+class DecryptedAPIKeyResponse(BaseModel):
+ """Decrypted API Key 응답용 스키마"""
+
+ api_key: str = Field(..., description="복호화된 실제 API Key")
diff --git a/app/schemas/api_key/response_model.py b/app/schemas/api_key/response_model.py
new file mode 100644
index 0000000..236c3b1
--- /dev/null
+++ b/app/schemas/api_key/response_model.py
@@ -0,0 +1,11 @@
+from datetime import datetime
+
+from app.schemas.api_key.base_model import APIKeyBase
+
+
+class APIKeyResponse(APIKeyBase):
+ """API 응답용 스키마"""
+
+ id: str
+ created_at: datetime
+ updated_at: datetime
diff --git a/app/schemas/api_key/update_model.py b/app/schemas/api_key/update_model.py
new file mode 100644
index 0000000..89f2b16
--- /dev/null
+++ b/app/schemas/api_key/update_model.py
@@ -0,0 +1,16 @@
+from pydantic import BaseModel, Field
+
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+
+
+class APIKeyUpdate(BaseModel):
+ """API Key 수정을 위한 스키마"""
+
+ api_key: str = Field(..., description="새로운 API Key")
+
+ def validate_with_api_key(self) -> None:
+ """API Key의 유효성을 검증합니다."""
+ # 기본 형식 검증 (공백 또는 빈 문자열)
+ if not self.api_key or self.api_key.isspace():
+ raise APIException(CommonCode.INVALID_API_KEY_FORMAT)
diff --git a/app/schemas/chat_message/base_model.py b/app/schemas/chat_message/base_model.py
new file mode 100644
index 0000000..049770c
--- /dev/null
+++ b/app/schemas/chat_message/base_model.py
@@ -0,0 +1,24 @@
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+
+
+class ChatMessagesBase(BaseModel):
+ id: str = Field(..., description="고유 ID")
+ created_at: datetime = Field(..., description="생성 시각")
+ updated_at: datetime = Field(..., description="마지막 수정 시각")
+
+
+class RequestBase(BaseModel):
+ """요청 스키마의 기본 모델"""
+
+ def validate_chat_tab_id(self) -> None:
+ """채팅 탭 ID에 대한 유효성 검증 로직을 수행합니다."""
+
+ required_prefix = DBSaveIdEnum.chat_tab.value + "-"
+ if not self.chat_tab_id.startswith(required_prefix):
+ raise APIException(CommonCode.INVALID_CHAT_TAB_ID_FORMAT)
diff --git a/app/schemas/chat_message/db_model.py b/app/schemas/chat_message/db_model.py
new file mode 100644
index 0000000..2833efd
--- /dev/null
+++ b/app/schemas/chat_message/db_model.py
@@ -0,0 +1,13 @@
+from pydantic import Field
+
+from app.core.enum.sender import SenderEnum
+from app.schemas.chat_message.base_model import ChatMessagesBase
+
+
+class ChatMessageInDB(ChatMessagesBase):
+ chat_tab_id: str = Field(..., description="해당 메시지가 속한 채팅 탭의 ID")
+ sender: SenderEnum = Field(..., description="메시지 발신자 ('A' 또는 'U')")
+ message: str = Field(..., description="메시지 내용")
+
+ class Config:
+ use_enum_values = True
diff --git a/app/schemas/chat_message/request_model.py b/app/schemas/chat_message/request_model.py
new file mode 100644
index 0000000..fef913a
--- /dev/null
+++ b/app/schemas/chat_message/request_model.py
@@ -0,0 +1,13 @@
+from pydantic import Field
+
+from app.schemas.chat_message.base_model import RequestBase
+
+
+class ChatMessagesReqeust(RequestBase):
+ """채팅 메시지 생성 요청 스키마"""
+
+ chat_tab_id: str = Field(..., description="채팅 탭의 고유 ID")
+ message: str = Field(..., description="메시지 내용")
+
+ def validate(self):
+ self.validate_chat_tab_id()
diff --git a/app/schemas/chat_message/response_model.py b/app/schemas/chat_message/response_model.py
new file mode 100644
index 0000000..f1ce682
--- /dev/null
+++ b/app/schemas/chat_message/response_model.py
@@ -0,0 +1,13 @@
+from pydantic import Field
+
+from app.core.enum.sender import SenderEnum
+from app.schemas.chat_message.base_model import ChatMessagesBase
+
+
+class ChatMessagesResponse(ChatMessagesBase):
+ chat_tab_id: str = Field(..., description="해당 메시지가 속한 채팅 탭의 ID")
+ sender: SenderEnum = Field(..., description="메시지 발신자 ('AI' 또는 'User')")
+ message: str = Field(..., description="메시지 내용")
+
+ class Config:
+ use_enum_values = True
diff --git a/app/schemas/chat_tab/base_model.py b/app/schemas/chat_tab/base_model.py
new file mode 100644
index 0000000..f642c19
--- /dev/null
+++ b/app/schemas/chat_tab/base_model.py
@@ -0,0 +1,46 @@
+import re
+
+from pydantic import BaseModel, Field
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+
+
+class ChatTabBase(BaseModel):
+ """
+ 모든 AI Chat Tab 스키마의 기본 모델
+ - 새로운 Chat Tab 생성을 위한 스키마
+ - 채팅 탭 이름 수정을 위한 스키마
+ """
+
+ name: str | None = Field(..., description="새로운 채팅 탭 이름")
+
+ def validate_chat_tab_name(self) -> None:
+ """채팅 탭 이름에 대한 유효성 검증 로직을 수행합니다."""
+ # 1. 문자열이 None, 문자열 전체가 공백 문자인지 확인
+ if not self.name or self.name.isspace():
+ raise APIException(CommonCode.INVALID_CHAT_TAB_NAME_FORMAT)
+
+ # 2. 길이 제한
+ if len(self.name) > 128:
+ raise APIException(CommonCode.INVALID_CHAT_TAB_NAME_LENGTH)
+
+ # 3. 특수문자 및 SQL 예약어 확인
+ # SQL 예약어와 위험한 특수문자를 검사합니다.
+ sql_keywords = ["SELECT", "INSERT", "UPDATE", "DELETE", "DROP", "OR", "AND"]
+ for keyword in sql_keywords:
+ if keyword in self.name.upper():
+ raise APIException(CommonCode.INVALID_CHAT_TAB_NAME_CONTENT)
+
+ # 특정 특수문자를 검사하는 예시
+ if re.search(r"[;\"'`<>]", self.name):
+ raise APIException(CommonCode.INVALID_CHAT_TAB_NAME_CONTENT)
+
+ def validate_chat_tab_id(self) -> None:
+ """채팅 탭 ID에 대한 유효성 검증 로직을 수행합니다."""
+
+ # 1. 'CHAT-TAB-' 접두사 검증
+ required_prefix = DBSaveIdEnum.chat_tab.value + "-"
+ if not self.id.startswith(required_prefix):
+ raise APIException(CommonCode.INVALID_CHAT_TAB_ID_FORMAT)
diff --git a/app/schemas/chat_tab/db_model.py b/app/schemas/chat_tab/db_model.py
new file mode 100644
index 0000000..d548797
--- /dev/null
+++ b/app/schemas/chat_tab/db_model.py
@@ -0,0 +1,25 @@
+from datetime import datetime
+
+from pydantic import Field
+
+from app.schemas.chat_tab.base_model import ChatTabBase
+
+
+class ChatTabInDB(ChatTabBase):
+ """데이터베이스에 저장된 형태의 스키마 (내부용)"""
+
+ id: str
+ name: str
+ created_at: datetime
+ updated_at: datetime
+
+
+class ChatMessageInDB(ChatTabBase):
+ """데이터베이스에 저장된 형태의 메시지 스키마 (내부용)"""
+
+ id: str = Field(..., description="메시지의 고유 ID (서버에서 생성)")
+ chat_tab_id: str = Field(..., description="해당 메시지가 속한 채팅 탭의 ID")
+ sender: str = Field(..., description="메시지 발신자 ('AI' 또는 'User')")
+ message: str = Field(..., description="메시지 내용")
+ created_at: datetime
+ updated_at: datetime
diff --git a/app/schemas/chat_tab/response_model.py b/app/schemas/chat_tab/response_model.py
new file mode 100644
index 0000000..0f017d5
--- /dev/null
+++ b/app/schemas/chat_tab/response_model.py
@@ -0,0 +1,22 @@
+from datetime import datetime
+
+from pydantic import Field
+
+from app.schemas.chat_tab.base_model import ChatTabBase
+from app.schemas.chat_tab.db_model import ChatMessageInDB
+
+
+class ChatTabResponse(ChatTabBase):
+ """AI 채팅 탭 정보 API 응답용 스키마"""
+
+ id: str = Field(..., description="채팅 탭의 고유 ID (서버에서 생성)")
+ name: str = Field(..., description="채팅 탭의 이름")
+ created_at: datetime
+ updated_at: datetime
+
+
+class ChatMessagesResponse(ChatTabResponse):
+ """AI 채팅 탭의 메타데이터와 전체 메시지 목록을 담는 API 응답 스키마"""
+
+ # 해당 탭의 모든 메시지를 리스트로 담습니다.
+ messages: list[ChatMessageInDB] = Field(..., description="해당 채팅 탭에 속한 모든 메시지 목록")
diff --git a/app/schemas/chat_tab/update_model.py b/app/schemas/chat_tab/update_model.py
new file mode 100644
index 0000000..c42c56b
--- /dev/null
+++ b/app/schemas/chat_tab/update_model.py
@@ -0,0 +1,12 @@
+from pydantic import Field
+
+from app.schemas.chat_tab.base_model import ChatTabBase
+
+
+class ChatTabUpdate(ChatTabBase):
+ """채팅 탭 이름 수정을 위한 스키마"""
+
+ name: str = Field(None, description="수정할 채팅 탭 이름")
+
+ def validate(self):
+ self.validate_chat_tab_name(["name"])
diff --git a/app/schemas/driver/driver_info_model.py b/app/schemas/driver/driver_info_model.py
new file mode 100644
index 0000000..2cdf03e
--- /dev/null
+++ b/app/schemas/driver/driver_info_model.py
@@ -0,0 +1,38 @@
+# app/schemas/driver/driver_info_model.py
+from pydantic import BaseModel
+
+from app.core.enum.db_driver import DBTypesEnum
+
+
+class DriverInfo(BaseModel):
+ db_type: str
+ is_installed: bool
+ driver_name: str | None
+ driver_version: str | None
+ driver_size_bytes: int | None
+
+ def update_from_module(self, version: str | None, size: int | None):
+ """
+ 객체 자신의 속성을 직접 업데이트하여 설치된 드라이버 정보를 채웁니다.
+ """
+ self.is_installed = True
+ self.driver_version = version
+ self.driver_size_bytes = size
+
+ return self
+
+ @classmethod
+ def from_enum(cls, db_type_enum: DBTypesEnum):
+ """
+ DBTypesEnum 객체를 인자로 받아, db_type, driver_name만으로 driverInfo 객체를 생성합니다.
+ `is_installed`는 False로 설정됩니다.
+ """
+ db_type = db_type_enum.name
+ driver_name = db_type_enum.value
+ return cls(
+ db_type=db_type,
+ is_installed=False,
+ driver_name=driver_name,
+ driver_version=None,
+ driver_size_bytes=None,
+ )
diff --git a/app/schemas/query/query_model.py b/app/schemas/query/query_model.py
new file mode 100644
index 0000000..bec85a2
--- /dev/null
+++ b/app/schemas/query/query_model.py
@@ -0,0 +1,70 @@
+# app/schemas/query/query_model.py
+
+from typing import Any
+
+from pydantic import BaseModel, Field, model_validator
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid
+
+
+def _is_empty(value: Any | None) -> bool:
+ """값이 None, 빈 문자열, 공백 문자열인지 검사"""
+ if value is None:
+ return True
+ if isinstance(value, str) and not value.strip():
+ return True
+ return False
+
+
+class QueryInfo(BaseModel):
+ user_db_id: str = Field(..., description="DB Key")
+ database: str | None = Field(None, description="database 명")
+ query_text: str | None = Field(None, description="쿼리 내용")
+
+ @model_validator(mode="after")
+ def validate_required_fields(self) -> "QueryInfo":
+ """QueryInfo 모델에 대한 필수 필드 유효성 검사"""
+ if _is_empty(self.user_db_id):
+ raise APIException(CommonCode.NO_DB_DRIVER)
+
+ if _is_empty(self.query_text):
+ raise APIException(CommonCode.NO_QUERY)
+
+ return self
+
+
+class RequestExecutionQuery(QueryInfo):
+ chat_message_id: str | None = Field(None, description="연결된 메시지 Key")
+
+ @model_validator(mode="after")
+ def validate_chat_message_id(self) -> "RequestExecutionQuery":
+ """RequestExecutionQuery 모델에만 필요한 추가 필드 유효성 검사"""
+ if _is_empty(self.chat_message_id):
+ raise APIException(CommonCode.NO_CHAT_KEY)
+
+ return self
+
+
+class ExecutionQuery(RequestExecutionQuery):
+ id: str | None = Field(None, description="Query Key 값")
+ type: str | None = Field(None, description="디비 타입")
+ is_success: str | None = Field(None, description="성공 여부")
+ error_message: str | None = Field(None, description="에러 메시지")
+
+ @classmethod
+ def from_query_info(
+ cls, query_info: RequestExecutionQuery, type: str, is_success: bool, error_message: str | None = None
+ ):
+ return cls(
+ id=generate_prefixed_uuid(DBSaveIdEnum.query.value),
+ user_db_id=query_info.user_db_id,
+ chat_message_id=query_info.chat_message_id,
+ database=query_info.database,
+ query_text=query_info.query_text,
+ type=type,
+ is_success="Y" if is_success else "N",
+ error_message=error_message,
+ )
diff --git a/app/schemas/query/result_model.py b/app/schemas/query/result_model.py
new file mode 100644
index 0000000..e4d0c62
--- /dev/null
+++ b/app/schemas/query/result_model.py
@@ -0,0 +1,43 @@
+# app/schemas/user_db/result_model.py
+
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from app.core.status import CommonCode
+
+
+# 기본 반환 모델
+class BasicResult(BaseModel):
+ is_successful: bool = Field(..., description="성공 여부")
+ code: CommonCode = Field(None, description="결과 코드")
+
+
+class ExecutionSelectResult(BasicResult):
+ """DB 조회 결과를 위한 확장 모델"""
+
+ data: dict = Field(..., description="쿼리 조회 후 결과 - 데이터")
+
+
+class ExecutionResult(BasicResult):
+ """DB 결과를 위한 확장 모델"""
+
+ data: str = Field(..., description="쿼리 수행 후 결과")
+
+
+class InsertLocalDBResult(BasicResult):
+ """DB 결과를 위한 확장 모델"""
+
+ data: str = Field(..., description="쿼리 수행 후 결과")
+
+
+class SelectQueryHistoryResult(BasicResult):
+ """DB 결과를 위한 확장 모델"""
+
+ data: dict = Field(..., description="쿼리 이력 조회")
+
+
+class QueryTestResult(BasicResult):
+ """DB Test 결과를 위한 확장 모델"""
+
+ data: Any = Field(..., description="쿼리 수행 결과")
diff --git a/app/schemas/user_db/db_profile_model.py b/app/schemas/user_db/db_profile_model.py
new file mode 100644
index 0000000..32d666a
--- /dev/null
+++ b/app/schemas/user_db/db_profile_model.py
@@ -0,0 +1,68 @@
+# app/schemas/user_db/db_profile_model.py
+
+from datetime import datetime
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+
+
+# 사용자가 직접 입력해야 하는 정보만 포함합니다.
+class DBProfileInfo(BaseModel):
+ type: str = Field(..., description="DB 종류")
+ host: str | None = Field(None, description="호스트 주소")
+ port: int | None = Field(None, description="포트 번호")
+ name: str | None = Field(None, description="연결할 데이터베이스명")
+ username: str | None = Field(None, description="사용자 이름")
+ password: str | None = Field(None, description="비밀번호")
+
+ def validate_required_fields(self) -> None:
+ """DB 종류별 필수 필드 유효성 검사"""
+ required_fields_by_type = {
+ "sqlite": ["name"],
+ "mysql": ["host", "port", "username", "password"],
+ "mariadb": ["host", "port", "username", "password"],
+ "postgresql": ["host", "port", "username", "password"],
+ "oracle": ["host", "port", "username", "password", "name"],
+ }
+
+ if not self.type:
+ raise APIException(CommonCode.NO_DB_DRIVER)
+
+ db_type = self.type.lower()
+ if db_type not in required_fields_by_type:
+ raise APIException(CommonCode.INVALID_DB_DRIVER)
+
+ missing = [
+ field_name
+ for field_name in required_fields_by_type[db_type]
+ if self._is_empty(getattr(self, field_name, None))
+ ]
+
+ if missing:
+ raise APIException(CommonCode.NO_VALUE)
+
+ @staticmethod
+ def _is_empty(value: Any | None) -> bool:
+ """값이 None, 빈 문자열, 공백 문자열인지 검사"""
+ if value is None:
+ return True
+ if isinstance(value, str) and not value.strip():
+ return True
+ return False
+
+
+class UpdateOrCreateDBProfile(DBProfileInfo):
+ id: str | None = Field(None, description="DB Key 값")
+ view_name: str | None = Field(None, description="DB 노출명")
+ annotation_id: str | None = Field(None, description="연결된 어노테이션 ID")
+
+
+class AllDBProfileInfo(DBProfileInfo):
+ id: str | None = Field(..., description="DB Key 값")
+ view_name: str | None = Field(None, description="DB 노출명")
+ annotation_id: str | None = Field(None, description="연결된 어노테이션 ID")
+ created_at: datetime = Field(..., description="profile 저장일")
+ updated_at: datetime = Field(..., description="profile 수정일")
diff --git a/app/schemas/user_db/result_model.py b/app/schemas/user_db/result_model.py
new file mode 100644
index 0000000..d2a5b2a
--- /dev/null
+++ b/app/schemas/user_db/result_model.py
@@ -0,0 +1,108 @@
+# app/schemas/user_db/result_model.py
+
+from datetime import datetime
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+from app.core.status import CommonCode
+
+
+# 기본 반환 모델
+class BasicResult(BaseModel):
+ is_successful: bool = Field(..., description="성공 여부")
+ code: CommonCode = Field(None, description="결과 코드")
+
+
+# 디비 정보 후 반환되는 저장 모델
+class ChangeProfileResult(BasicResult):
+ """DB 조회 결과를 위한 확장 모델"""
+
+ view_name: str = Field(..., description="저장된 디비명")
+
+
+# DB Profile 조회되는 정보를 담는 모델입니다.
+class DBProfile(BaseModel):
+ id: str
+ type: str
+ host: str | None
+ port: int | None
+ name: str | None
+ username: str | None
+ view_name: str | None
+ annotation_id: str | None = None
+ created_at: datetime
+ updated_at: datetime
+
+ class Config:
+ from_attributes = True
+
+
+# DB Profile 전체 조회 결과를 담는 새로운 모델
+class AllDBProfileResult(BasicResult):
+ """DB 프로필 전체 조회 결과를 위한 확장 모델"""
+
+ profiles: list[DBProfile] = Field([], description="DB 프로필 목록")
+
+
+class ColumnInfo(BaseModel):
+ """단일 컬럼의 상세 정보를 담는 모델"""
+
+ name: str = Field(..., description="컬럼 이름")
+ type: str = Field(..., description="데이터 타입")
+ nullable: bool = Field(..., description="NULL 허용 여부")
+ default: Any | None = Field(None, description="기본값")
+ comment: str | None = Field(None, description="코멘트")
+ is_pk: bool = Field(False, description="기본 키(Primary Key) 여부")
+ ordinal_position: int | None = Field(None, description="컬럼 순서")
+
+
+class ConstraintInfo(BaseModel):
+ """테이블 제약 조건 정보를 담는 모델"""
+
+ name: str | None = Field(None, description="제약 조건 이름")
+ type: str = Field(..., description="제약 조건 타입 (PRIMARY KEY, FOREIGN KEY, UNIQUE, CHECK)")
+ columns: list[str] = Field(..., description="제약 조건에 포함된 컬럼 목록")
+ # FOREIGN KEY 관련 필드
+ referenced_table: str | None = Field(None, description="참조하는 테이블 (FK)")
+ referenced_columns: list[str] | None = Field(None, description="참조하는 테이블의 컬럼 (FK)")
+ on_update: str | None = Field(None, description="UPDATE 시 동작 (FK)")
+ on_delete: str | None = Field(None, description="DELETE 시 동작 (FK)")
+ # CHECK 관련 필드
+ check_expression: str | None = Field(None, description="CHECK 제약 조건 표현식")
+
+
+class IndexInfo(BaseModel):
+ """테이블 인덱스 정보를 담는 모델"""
+
+ name: str | None = Field(None, description="인덱스 이름")
+ columns: list[str] = Field(..., description="인덱스에 포함된 컬럼 목록")
+ is_unique: bool = Field(False, description="고유 인덱스 여부")
+
+
+class TableInfo(BaseModel):
+ """단일 테이블의 이름과 상세 정보를 담는 모델"""
+
+ name: str = Field(..., description="테이블 이름")
+ columns: list[ColumnInfo] = Field([], description="컬럼 목록")
+ constraints: list[ConstraintInfo] = Field([], description="제약 조건 목록")
+ indexes: list[IndexInfo] = Field([], description="인덱스 목록")
+ comment: str | None = Field(None, description="테이블 코멘트")
+
+
+class SchemaInfoResult(BasicResult):
+ """DB 스키마 상세 정보 조회 결과를 위한 확장 모델"""
+
+ schema: list[TableInfo] = Field([], description="테이블 및 컬럼 정보 목록")
+
+
+class SchemaListResult(BasicResult):
+ schemas: list[str] = Field([], description="스키마 이름 목록")
+
+
+class TableListResult(BasicResult):
+ tables: list[str] = Field([], description="테이블 이름 목록")
+
+
+class ColumnListResult(BasicResult):
+ columns: list[ColumnInfo] = Field([], description="컬럼 정보 목록")
diff --git a/app/services/annotation_service.py b/app/services/annotation_service.py
new file mode 100644
index 0000000..e10fe0b
--- /dev/null
+++ b/app/services/annotation_service.py
@@ -0,0 +1,492 @@
+import sqlite3
+from datetime import datetime
+from typing import Any
+
+from fastapi import Depends
+
+from app.core.enum.constraint_type import ConstraintTypeEnum
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid, get_db_path
+from app.repository.annotation_repository import AnnotationRepository, annotation_repository
+from app.schemas.annotation.ai_model import (
+ AIAnnotationRequest,
+ AIColumnInfo,
+ AIConstraintInfo,
+ AIDatabaseInfo,
+ AIIndexInfo,
+ AIRelationship,
+ AITableInfo,
+)
+from app.schemas.annotation.db_model import (
+ ColumnAnnotationInDB,
+ ConstraintColumnInDB,
+ DatabaseAnnotationInDB,
+ IndexAnnotationInDB,
+ IndexColumnInDB,
+ TableAnnotationInDB,
+ TableConstraintInDB,
+)
+from app.schemas.annotation.request_model import AnnotationCreateRequest
+from app.schemas.annotation.response_model import AnnotationDeleteResponse, FullAnnotationResponse
+from app.schemas.user_db.db_profile_model import AllDBProfileInfo
+from app.schemas.user_db.result_model import TableInfo as UserDBTableInfo
+from app.services.user_db_service import UserDbService, user_db_service
+
+annotation_repository_dependency = Depends(lambda: annotation_repository)
+user_db_service_dependency = Depends(lambda: user_db_service)
+
+# AI 서버의 주소 (임시)
+AI_SERVER_URL = "http://localhost:8001/api/v1/annotate/database"
+
+
+class AnnotationService:
+ def __init__(
+ self, repository: AnnotationRepository = annotation_repository, user_db_serv: UserDbService = user_db_service
+ ):
+ """
+ AnnotationService를 초기화합니다.
+
+ Args:
+ repository (AnnotationRepository): 어노테이션 레포지토리 의존성 주입.
+ user_db_serv (UserDbService): 사용자 DB 서비스 의존성 주입.
+ """
+ self.repository = repository
+ self.user_db_service = user_db_serv
+
+ async def create_annotation(self, request: AnnotationCreateRequest) -> FullAnnotationResponse:
+ """
+ 어노테이션 생성을 위한 전체 프로세스를 관장합니다.
+ 1. DB 프로필, 전체 스키마 정보, 샘플 데이터 조회
+ 2. AI 서버에 요청할 데이터 모델 생성
+ 3. TODO: AI 서버에 요청 (현재는 Mock 데이터 사용)
+ 4. 트랜잭션 내에서 전체 어노테이션 정보 저장 및 DB 프로필 업데이트
+ """
+ try:
+ request.validate()
+ except ValueError as e:
+ raise APIException(CommonCode.INVALID_ANNOTATION_REQUEST, detail=str(e)) from e
+
+ # 1. DB 프로필, 전체 스키마 정보, 샘플 데이터 조회
+ db_profile = self.user_db_service.find_profile(request.db_profile_id)
+ full_schema_info = self.user_db_service.get_full_schema_info(db_profile)
+ sample_rows = self.user_db_service.get_sample_rows(db_profile, full_schema_info)
+
+ # 2. AI 서버에 요청할 데이터 모델 생성
+ ai_request_body = self._prepare_ai_request_body(db_profile, full_schema_info, sample_rows)
+ print(ai_request_body.model_dump_json(indent=2))
+
+ # 3. AI 서버에 요청 (현재는 Mock 데이터 사용)
+ ai_response = await self._request_annotation_to_ai_server(ai_request_body)
+
+ # 4. 트랜잭션 내에서 전체 어노테이션 정보 저장 및 DB 프로필 업데이트
+ db_path = get_db_path()
+ conn = None
+ try:
+ conn = sqlite3.connect(str(db_path), timeout=10)
+ conn.execute("BEGIN")
+
+ db_models = self._transform_ai_response_to_db_models(
+ ai_response, db_profile, request.db_profile_id, full_schema_info
+ )
+ self.repository.create_full_annotation(db_conn=conn, **db_models)
+
+ annotation_id = db_models["db_annotation"].id
+ self.repository.update_db_profile_annotation_id(
+ db_conn=conn, db_profile_id=request.db_profile_id, annotation_id=annotation_id
+ )
+
+ conn.commit()
+
+ except sqlite3.Error as e:
+ if conn:
+ conn.rollback()
+ raise APIException(CommonCode.FAIL_CREATE_ANNOTATION, detail=f"Database transaction failed: {e}") from e
+ finally:
+ if conn:
+ conn.close()
+
+ return self.get_full_annotation(annotation_id)
+
+ def get_annotation_by_db_profile_id(self, db_profile_id: str) -> FullAnnotationResponse:
+ """
+ db_profile_id를 기반으로 완전한 어노테이션 정보를 조회합니다.
+ """
+ db_profile = self.user_db_service.find_profile(db_profile_id)
+ if not db_profile.annotation_id:
+ raise APIException(CommonCode.NO_ANNOTATION_FOR_PROFILE)
+
+ return self.get_full_annotation(db_profile.annotation_id)
+
+ def _prepare_ai_request_body(
+ self,
+ db_profile: AllDBProfileInfo,
+ full_schema_info: list[UserDBTableInfo],
+ sample_rows: dict[str, list[dict[str, Any]]],
+ ) -> AIAnnotationRequest:
+ """
+ AI 서버에 보낼 요청 본문을 Pydantic 모델로 생성합니다.
+ """
+ ai_tables = []
+ ai_relationships = []
+
+ for table_info in full_schema_info:
+ # FK 제약조건을 분리하여 relationships 목록 생성
+ non_fk_constraints = []
+ for const in table_info.constraints:
+ if const.type == "FOREIGN KEY" and const.referenced_table and const.referenced_columns:
+ ai_relationships.append(
+ AIRelationship(
+ from_table=table_info.name,
+ from_columns=const.columns,
+ to_table=const.referenced_table,
+ to_columns=const.referenced_columns,
+ )
+ )
+ else:
+ non_fk_constraints.append(
+ AIConstraintInfo(
+ name=const.name,
+ type=const.type,
+ columns=const.columns,
+ check_expression=const.check_expression,
+ )
+ )
+
+ ai_table = AITableInfo(
+ table_name=table_info.name,
+ columns=[
+ AIColumnInfo(
+ column_name=col.name,
+ data_type=col.type,
+ is_pk=col.is_pk,
+ is_nullable=col.nullable,
+ default_value=col.default,
+ )
+ for col in table_info.columns
+ ],
+ constraints=non_fk_constraints,
+ indexes=[
+ AIIndexInfo(name=idx.name, columns=idx.columns, is_unique=idx.is_unique)
+ for idx in table_info.indexes
+ ],
+ sample_rows=sample_rows.get(table_info.name, []),
+ )
+ ai_tables.append(ai_table)
+
+ ai_database = AIDatabaseInfo(
+ database_name=db_profile.name or db_profile.username, tables=ai_tables, relationships=ai_relationships
+ )
+
+ return AIAnnotationRequest(dbms_type=db_profile.type, databases=[ai_database])
+
+ def _transform_ai_response_to_db_models(
+ self,
+ ai_response: dict[str, Any],
+ db_profile: AllDBProfileInfo,
+ db_profile_id: str,
+ full_schema_info: list[UserDBTableInfo],
+ ) -> dict[str, Any]:
+ """
+ AI 서버의 응답을 받아서 DB에 저장할 수 있는 모델 딕셔너리로 변환합니다.
+ """
+ now = datetime.now()
+ annotation_id = generate_prefixed_uuid(DBSaveIdEnum.database_annotation.value)
+
+ # 원본 스키마 정보를 쉽게 조회할 수 있도록 룩업 테이블 생성
+ schema_lookup: dict[str, UserDBTableInfo] = {table.name: table for table in full_schema_info}
+
+ db_anno = DatabaseAnnotationInDB(
+ id=annotation_id,
+ db_profile_id=db_profile_id,
+ database_name=db_profile.name or db_profile.username,
+ description=ai_response.get("database_annotation"),
+ created_at=now,
+ updated_at=now,
+ )
+
+ (
+ all_table_annos,
+ all_col_annos,
+ all_constraint_annos,
+ all_constraint_col_annos,
+ all_index_annos,
+ all_index_col_annos,
+ ) = (
+ [],
+ [],
+ [],
+ [],
+ [],
+ [],
+ )
+
+ for tbl_data in ai_response.get("tables", []):
+ original_table = schema_lookup.get(tbl_data["table_name"])
+ if not original_table:
+ continue
+
+ (
+ table_anno,
+ col_annos,
+ constraint_annos,
+ constraint_col_annos,
+ index_annos,
+ index_col_annos,
+ ) = self._create_annotations_for_table(tbl_data, original_table, annotation_id, now)
+
+ all_table_annos.append(table_anno)
+ all_col_annos.extend(col_annos)
+ all_constraint_annos.extend(constraint_annos)
+ all_constraint_col_annos.extend(constraint_col_annos)
+ all_index_annos.extend(index_annos)
+ all_index_col_annos.extend(index_col_annos)
+
+ return {
+ "db_annotation": db_anno,
+ "table_annotations": all_table_annos,
+ "column_annotations": all_col_annos,
+ "constraint_annotations": all_constraint_annos,
+ "constraint_column_annotations": all_constraint_col_annos,
+ "index_annotations": all_index_annos,
+ "index_column_annotations": all_index_col_annos,
+ }
+
+ def _create_annotations_for_table(
+ self,
+ tbl_data: dict[str, Any],
+ original_table: UserDBTableInfo,
+ database_annotation_id: str,
+ now: datetime,
+ ) -> tuple:
+ """
+ 단일 테이블에 대한 모든 하위 어노테이션(컬럼, 제약조건, 인덱스)을 생성합니다.
+ """
+ table_id = generate_prefixed_uuid(DBSaveIdEnum.table_annotation.value)
+ table_anno = TableAnnotationInDB(
+ id=table_id,
+ database_annotation_id=database_annotation_id,
+ table_name=original_table.name,
+ description=tbl_data.get("annotation"),
+ created_at=now,
+ updated_at=now,
+ )
+
+ col_map = {
+ col.name: generate_prefixed_uuid(DBSaveIdEnum.column_annotation.value) for col in original_table.columns
+ }
+
+ col_annos = self._process_columns(tbl_data, original_table, table_id, col_map, now)
+ constraint_annos, constraint_col_annos = self._process_constraints(
+ tbl_data, original_table, table_id, col_map, now
+ )
+ index_annos, index_col_annos = self._process_indexes(tbl_data, original_table, table_id, col_map, now)
+
+ return table_anno, col_annos, constraint_annos, constraint_col_annos, index_annos, index_col_annos
+
+ def _process_columns(
+ self, tbl_data: dict, original_table: UserDBTableInfo, table_id: str, col_map: dict, now: datetime
+ ) -> list[ColumnAnnotationInDB]:
+ """
+ 테이블의 컬럼 어노테이션 모델 리스트를 생성합니다.
+ """
+ col_annos = []
+ for col_data in tbl_data.get("columns", []):
+ original_column = next((c for c in original_table.columns if c.name == col_data["column_name"]), None)
+ if not original_column:
+ continue
+ col_annos.append(
+ ColumnAnnotationInDB(
+ id=col_map[original_column.name],
+ table_annotation_id=table_id,
+ column_name=original_column.name,
+ data_type=original_column.type,
+ is_nullable=1 if original_column.nullable else 0,
+ default_value=original_column.default,
+ description=col_data.get("annotation"),
+ ordinal_position=original_column.ordinal_position,
+ created_at=now,
+ updated_at=now,
+ )
+ )
+ return col_annos
+
+ def _process_constraints(
+ self, tbl_data: dict, original_table: UserDBTableInfo, table_id: str, col_map: dict, now: datetime
+ ) -> tuple[list[TableConstraintInDB], list[ConstraintColumnInDB]]:
+ """
+ 테이블의 제약조건 및 제약조건 컬럼 어노테이션 모델 리스트를 생성합니다.
+ """
+ constraint_annos, constraint_col_annos = [], []
+ for const_data in tbl_data.get("constraints", []):
+ original_constraint = next((c for c in original_table.constraints if c.name == const_data["name"]), None)
+ if not original_constraint:
+ continue
+ const_id = generate_prefixed_uuid(DBSaveIdEnum.table_constraint.value)
+ constraint_annos.append(
+ TableConstraintInDB(
+ id=const_id,
+ table_annotation_id=table_id,
+ name=original_constraint.name,
+ constraint_type=ConstraintTypeEnum(original_constraint.type),
+ description=const_data.get("annotation"),
+ ref_table=original_constraint.referenced_table,
+ expression=original_constraint.check_expression,
+ on_update_action=original_constraint.on_update,
+ on_delete_action=original_constraint.on_delete,
+ created_at=now,
+ updated_at=now,
+ )
+ )
+ for i, col_name in enumerate(original_constraint.columns):
+ if col_name not in col_map:
+ continue
+ constraint_col_annos.append(
+ ConstraintColumnInDB(
+ id=generate_prefixed_uuid(DBSaveIdEnum.constraint_column.value),
+ constraint_id=const_id,
+ column_annotation_id=col_map[col_name],
+ position=i + 1,
+ referenced_column_name=(
+ original_constraint.referenced_columns[i]
+ if original_constraint.referenced_columns
+ and i < len(original_constraint.referenced_columns)
+ else None
+ ),
+ created_at=now,
+ updated_at=now,
+ )
+ )
+ return constraint_annos, constraint_col_annos
+
+ def _process_indexes(
+ self, tbl_data: dict, original_table: UserDBTableInfo, table_id: str, col_map: dict, now: datetime
+ ) -> tuple[list[IndexAnnotationInDB], list[IndexColumnInDB]]:
+ """
+ 테이블의 인덱스 및 인덱스 컬럼 어노테이션 모델 리스트를 생성합니다.
+ """
+ index_annos, index_col_annos = [], []
+ for idx_data in tbl_data.get("indexes", []):
+ original_index = next((i for i in original_table.indexes if i.name == idx_data["name"]), None)
+ if not original_index:
+ continue
+ idx_id = generate_prefixed_uuid(DBSaveIdEnum.index_annotation.value)
+ index_annos.append(
+ IndexAnnotationInDB(
+ id=idx_id,
+ table_annotation_id=table_id,
+ name=original_index.name,
+ is_unique=1 if original_index.is_unique else 0,
+ created_at=now,
+ updated_at=now,
+ )
+ )
+ for i, col_name in enumerate(original_index.columns):
+ if col_name not in col_map:
+ continue
+ index_col_annos.append(
+ IndexColumnInDB(
+ id=generate_prefixed_uuid(DBSaveIdEnum.index_column.value),
+ index_id=idx_id,
+ column_annotation_id=col_map[col_name],
+ position=i + 1,
+ created_at=now,
+ updated_at=now,
+ )
+ )
+ return index_annos, index_col_annos
+
+ def get_full_annotation(self, annotation_id: str) -> FullAnnotationResponse:
+ """
+ ID를 기반으로 완전한 어노테이션 정보를 조회합니다.
+ """
+ try:
+ annotation = self.repository.find_full_annotation_by_id(annotation_id)
+ if not annotation:
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+ return annotation
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL_FIND_ANNOTATION) from e
+
+ def delete_annotation(self, annotation_id: str) -> AnnotationDeleteResponse:
+ """
+ ID를 기반으로 어노테이션 및 관련 하위 데이터를 모두 삭제합니다.
+ """
+ try:
+ is_deleted = self.repository.delete_annotation_by_id(annotation_id)
+ if not is_deleted:
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+ return AnnotationDeleteResponse(id=annotation_id)
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL_DELETE_ANNOTATION) from e
+
+ async def _request_annotation_to_ai_server(self, ai_request: AIAnnotationRequest) -> dict:
+ """AI 서버에 스키마 정보를 보내고 어노테이션을 받아옵니다."""
+ # 우선은 목업 데이터 활용
+ return self._get_mock_ai_response(ai_request)
+
+ # Real implementation below
+ # request_body = ai_request.model_dump()
+ # async with httpx.AsyncClient() as client:
+ # try:
+ # response = await client.post(AI_SERVER_URL, json=request_body, timeout=60.0)
+ # response.raise_for_status()
+ # return response.json()
+ # except httpx.HTTPStatusError as e:
+ # raise APIException(CommonCode.FAIL_AI_SERVER_PROCESSING, detail=f"AI server error: {e.response.text}") from e
+ # except httpx.RequestError as e:
+ # raise APIException(CommonCode.FAIL_AI_SERVER_CONNECTION, detail=f"AI server connection failed: {e}") from e
+
+ def _get_mock_ai_response(self, ai_request: AIAnnotationRequest) -> dict:
+ """테스트를 위한 Mock AI 서버 응답 생성"""
+ # 요청 데이터를 기반으로 동적으로 Mock 응답을 생성하도록 수정
+ db_info = ai_request.databases[0]
+ mock_response = {
+ "database_annotation": f"Mock: '{db_info.database_name}' 데이터베이스 전체에 대한 설명입니다.",
+ "tables": [],
+ "relationships": [],
+ }
+ for table in db_info.tables:
+ mock_table = {
+ "table_name": table.table_name,
+ "annotation": f"Mock: '{table.table_name}' 테이블에 대한 설명입니다.",
+ "columns": [
+ {"column_name": col.column_name, "annotation": f"Mock: '{col.column_name}' 컬럼에 대한 설명입니다."}
+ for col in table.columns
+ ],
+ "constraints": [
+ {
+ "name": c.name,
+ "type": c.type,
+ "columns": c.columns,
+ "annotation": f"Mock: 제약조건 '{c.name}' 설명.",
+ }
+ for c in table.constraints
+ ],
+ "indexes": [
+ {
+ "name": i.name,
+ "columns": i.columns,
+ "is_unique": i.is_unique,
+ "annotation": f"Mock: 인덱스 '{i.name}' 설명.",
+ }
+ for i in table.indexes
+ ],
+ }
+ mock_response["tables"].append(mock_table)
+
+ for rel in db_info.relationships:
+ mock_response["relationships"].append(
+ {
+ "from_table": rel.from_table,
+ "from_columns": rel.from_columns,
+ "to_table": rel.to_table,
+ "to_columns": rel.to_columns,
+ "annotation": f"Mock: '{rel.from_table}'과 '{rel.to_table}'의 관계 설명.",
+ }
+ )
+ return mock_response
+
+
+annotation_service = AnnotationService()
diff --git a/app/services/api_key_service.py b/app/services/api_key_service.py
new file mode 100644
index 0000000..88f1730
--- /dev/null
+++ b/app/services/api_key_service.py
@@ -0,0 +1,102 @@
+import sqlite3
+
+from fastapi import Depends
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.security import AES256
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid
+from app.repository.api_key_repository import APIKeyRepository, api_key_repository
+from app.schemas.api_key.create_model import APIKeyCreate
+from app.schemas.api_key.db_model import APIKeyInDB
+from app.schemas.api_key.update_model import APIKeyUpdate
+
+api_key_repository_dependency = Depends(lambda: api_key_repository)
+
+
+class APIKeyService:
+ def __init__(self, repository: APIKeyRepository = api_key_repository):
+ self.repository = repository
+
+ def store_api_key(self, api_key_data: APIKeyCreate) -> APIKeyInDB:
+ """API_KEY를 암호화하고 repository를 통해 데이터베이스에 저장합니다."""
+ api_key_data.validate_with_service()
+ try:
+ encrypted_key = AES256.encrypt(api_key_data.api_key)
+ new_id = generate_prefixed_uuid(DBSaveIdEnum.api_key.value)
+
+ created_row = self.repository.create_api_key(
+ new_id=new_id,
+ service_name=api_key_data.service_name.value,
+ encrypted_key=encrypted_key,
+ )
+
+ if not created_row:
+ raise APIException(CommonCode.FAIL_TO_VERIFY_CREATION)
+
+ return created_row
+
+ except sqlite3.IntegrityError as e:
+ raise APIException(CommonCode.DUPLICATION) from e
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_all_api_keys(self) -> list[APIKeyInDB]:
+ """데이터베이스에 저장된 모든 API Key를 조회합니다."""
+ try:
+ return self.repository.get_all_api_keys()
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_api_key_by_service_name(self, service_name: str) -> APIKeyInDB:
+ """서비스 이름으로 특정 API Key를 조회합니다."""
+ try:
+ api_key = self.repository.get_api_key_by_service_name(service_name)
+ if not api_key:
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+ return api_key
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_decrypted_api_key(self, service_name: str) -> str:
+ """서비스 이름으로 암호화된 API Key를 조회하고 복호화하여 반환합니다."""
+ api_key_in_db = self.get_api_key_by_service_name(service_name)
+ try:
+ decrypted_key = AES256.decrypt(api_key_in_db.api_key)
+ return decrypted_key
+ except Exception as e:
+ # 복호화 실패 시 서버 에러 발생
+ raise APIException(CommonCode.FAIL_DECRYPT_API_KEY) from e
+
+ def update_api_key(self, service_name: str, key_data: APIKeyUpdate) -> APIKeyInDB:
+ """서비스 이름에 해당하는 API Key를 수정합니다."""
+ key_data.validate_with_api_key()
+ try:
+ encrypted_key = AES256.encrypt(key_data.api_key)
+ updated_api_key = self.repository.update_api_key(service_name, encrypted_key)
+
+ if not updated_api_key:
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+
+ return updated_api_key
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+ def delete_api_key(self, service_name: str) -> None:
+ """서비스 이름에 해당하는 API Key를 삭제합니다."""
+ try:
+ is_deleted = self.repository.delete_api_key(service_name)
+ if not is_deleted:
+ raise APIException(CommonCode.NO_SEARCH_DATA)
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+
+api_key_service = APIKeyService()
diff --git a/app/services/chat_message_service.py b/app/services/chat_message_service.py
new file mode 100644
index 0000000..493554c
--- /dev/null
+++ b/app/services/chat_message_service.py
@@ -0,0 +1,146 @@
+import os
+import sqlite3
+
+import httpx
+from fastapi import Depends
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.enum.sender import SenderEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid
+from app.repository.chat_message_repository import ChatMessageRepository, chat_message_repository
+from app.schemas.chat_message.db_model import ChatMessageInDB
+from app.schemas.chat_message.request_model import ChatMessagesReqeust
+from app.schemas.chat_message.response_model import ChatMessagesResponse
+
+chat_message_repository_dependency = Depends(lambda: chat_message_repository)
+
+AI_SERVER_URL = os.getenv("ENV_AI_SERVER_URL")
+
+
+class ChatMessageService:
+ def __init__(self, repository: ChatMessageRepository = chat_message_repository):
+ self.repository = repository
+
+ def get_chat_messages_by_tabId(self, tabId: str) -> ChatMessageInDB:
+ """
+ 채팅 탭 메타데이터와 메시지 목록을 모두 가져와서 조합합니다.
+ 탭이 존재하지 않으면 예외를 발생시킵니다.
+ """
+ try:
+ return self.repository.get_chat_messages_by_tabId(tabId)
+
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ async def create_chat_message(self, request: ChatMessagesReqeust) -> ChatMessagesResponse:
+ # 1. tab_id 확인
+ chat_tab_id = request.chat_tab_id
+
+ # chat_tab_id 유효성 검사
+ try:
+ request.validate()
+ except ValueError as e:
+ raise APIException(CommonCode.INVALID_CHAT_MESSAGE_REQUEST, detail=str(e)) from e
+
+ try:
+ # 같은 서비스 메서드 호출
+ self.get_chat_messages_by_tabId(chat_tab_id)
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ # 2. 사용자 질의 저장
+ try:
+ user_request = self._transform_user_request_to_db_models(request)
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ # 3. AI 서버에 요청
+ ai_response = await self._request_chat_message_to_ai_server(user_request)
+
+ # 4. AI 서버 응답 저장
+ response = self._transform_ai_response_to_db_models(request, ai_response)
+
+ return response
+
+ def _transform_user_request_to_db_models(self, request: ChatMessagesReqeust) -> ChatMessageInDB:
+ """사용자 질의를 데이터베이스에 저장합니다."""
+
+ new_id = generate_prefixed_uuid(DBSaveIdEnum.chat_message.value)
+ sender = SenderEnum.user
+
+ chat_tab_id = request.chat_tab_id
+ message = request.message
+
+ try:
+ created_row = self.repository.create_chat_message(
+ new_id=new_id,
+ sender=sender,
+ chat_tab_id=chat_tab_id,
+ message=message,
+ )
+ if not created_row:
+ raise APIException(CommonCode.FAIL_TO_VERIFY_CREATION)
+
+ return created_row
+
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+ async def _request_chat_message_to_ai_server(self, user_request: ChatMessagesReqeust) -> dict:
+ """AI 서버에 사용자 질의를 보내고 답변을 받아옵니다."""
+ # 1. DB에서 해당 탭의 모든 메시지 조회
+ messages: list[ChatMessageInDB] = self.repository.get_chat_messages_by_tabId(user_request.chat_tab_id)
+
+ if not messages:
+ history = []
+ latest_message = user_request.message # DB에 없으면 요청 메시지 그대로
+ else:
+ history = [{"role": m.sender, "content": m.message} for m in messages[:-1]]
+ latest_message = messages[-1].message
+
+ # 3. AI 서버에 보내는 DATA
+ request_body = {"question": latest_message, "chat_history": history}
+
+ # 4. AI 서버에 POST 요청
+ async with httpx.AsyncClient() as client:
+ try:
+ response = await client.post(AI_SERVER_URL, json=request_body, timeout=60.0)
+ response.raise_for_status()
+ return response.json()
+ except httpx.HTTPStatusError as e:
+ raise APIException(CommonCode.FAIL_AI_SERVER_PROCESSING) from e
+ except httpx.RequestError as e:
+ raise APIException(CommonCode.FAIL_AI_SERVER_CONNECTION) from e
+
+ def _transform_ai_response_to_db_models(self, request: ChatMessagesReqeust, ai_response: str) -> ChatMessageInDB:
+ """AI 서버에서 받은 답변을 데이터베이스에 저장합니다."""
+
+ new_id = generate_prefixed_uuid(DBSaveIdEnum.chat_message.value)
+ sender = SenderEnum.ai
+
+ chat_tab_id = request.chat_tab_id
+ message = ai_response["answer"]
+
+ try:
+ created_row = self.repository.create_chat_message(
+ new_id=new_id,
+ sender=sender,
+ chat_tab_id=chat_tab_id,
+ message=message,
+ )
+ if not created_row:
+ raise APIException(CommonCode.FAIL_TO_VERIFY_CREATION)
+
+ return created_row
+
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+
+chat_message_service = ChatMessageService()
diff --git a/app/services/chat_tab_service.py b/app/services/chat_tab_service.py
new file mode 100644
index 0000000..f2e63ff
--- /dev/null
+++ b/app/services/chat_tab_service.py
@@ -0,0 +1,98 @@
+import sqlite3
+
+from fastapi import Depends
+
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid
+from app.repository.chat_tab_repository import ChatTabRepository, chat_tab_repository
+from app.schemas.chat_tab.base_model import ChatTabBase
+from app.schemas.chat_tab.db_model import ChatTabInDB
+from app.schemas.chat_tab.update_model import ChatTabUpdate
+
+chat_tab_repository_dependency = Depends(lambda: chat_tab_repository)
+
+
+class ChatTabService:
+ def __init__(
+ self,
+ repository: ChatTabRepository = chat_tab_repository,
+ ):
+ self.repository = repository
+
+ def create_chat_tab(self, chatName: ChatTabBase) -> ChatTabInDB:
+ """새로운 AI 채팅을 데이터베이스에 저장합니다."""
+ chatName.validate_chat_tab_name()
+
+ new_id = generate_prefixed_uuid(DBSaveIdEnum.chat_tab.value)
+
+ try:
+ created_row = self.repository.create_chat_tab(
+ new_id=new_id,
+ name=chatName.name,
+ )
+ if not created_row:
+ raise APIException(CommonCode.FAIL_TO_VERIFY_CREATION)
+
+ return created_row
+
+ except sqlite3.Error as e:
+ # "database is locked" 오류를 명시적으로 처리
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ # 기타 모든 sqlite3 오류
+ raise APIException(CommonCode.FAIL) from e
+
+ def updated_chat_tab(self, chatID: str, chatName: ChatTabUpdate) -> ChatTabInDB:
+ """TabID에 해당하는 AIChatTab name을 수정합니다."""
+ chatName.validate_chat_tab_name()
+ try:
+ updated_chat_tab = self.repository.updated_chat_tab(chatID, chatName.name)
+
+ if not updated_chat_tab:
+ raise APIException(CommonCode.NO_CHAT_TAB_DATA)
+
+ return updated_chat_tab
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+ def delete_chat_tab(self, tabId: str) -> None:
+ """TabID에 해당하는 AIChatTab을 삭제합니다."""
+ try:
+ is_deleted = self.repository.delete_chat_tab(tabId)
+ if not is_deleted:
+ raise APIException(CommonCode.NO_CHAT_TAB_DATA)
+ except sqlite3.Error as e:
+ if "database is locked" in str(e):
+ raise APIException(CommonCode.DB_BUSY) from e
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_all_chat_tab(self) -> ChatTabInDB:
+ """데이터베이스에 저장된 모든 Chat_tab을 조회합니다."""
+ try:
+ return self.repository.get_all_chat_tab()
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_chat_tab_by_tabId(self, tabId: str) -> ChatTabInDB:
+ """데이터베이스에 저장된 특정 Chat_tab을 조회합니다."""
+ try:
+ tabId.validate(tabId)
+ except ValueError as e:
+ raise APIException(CommonCode.INVALID_ANNOTATION_REQUEST, detail=str(e)) from e
+
+ try:
+ chat_tab = self.repository.get_chat_tab_by_id(tabId)
+
+ if not chat_tab:
+ raise APIException(CommonCode.NO_CHAT_TAB_DATA)
+ return chat_tab
+
+ except sqlite3.Error as e:
+ raise APIException(CommonCode.FAIL) from e
+
+
+chat_tab_service = ChatTabService()
diff --git a/app/services/driver_service.py b/app/services/driver_service.py
new file mode 100644
index 0000000..8ff5a17
--- /dev/null
+++ b/app/services/driver_service.py
@@ -0,0 +1,31 @@
+# app/service/driver_service.py
+import importlib
+import os
+import sqlite3
+
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.schemas.driver.driver_info_model import DriverInfo
+
+
+class DriverService:
+ def read_driver_info(self, driver_info: DriverInfo):
+ try:
+ driver_name = driver_info.driver_name
+
+ if driver_name == "sqlite3":
+ version = sqlite3.sqlite_version
+ path = sqlite3.__file__
+
+ else:
+ mod = importlib.import_module(driver_name)
+ version = getattr(mod, "__version__", None)
+ path = getattr(mod.__spec__, "origin", None)
+
+ size = os.path.getsize(path) if path else None
+ return driver_info.update_from_module(version, size)
+ except (ModuleNotFoundError, AttributeError, OSError) as e:
+ raise APIException(CommonCode.FAIL) from e
+
+
+driver_service = DriverService()
diff --git a/app/services/query_service.py b/app/services/query_service.py
new file mode 100644
index 0000000..4a4a651
--- /dev/null
+++ b/app/services/query_service.py
@@ -0,0 +1,116 @@
+# app/service/query_service.py
+
+import importlib
+import sqlite3
+from typing import Any
+
+from fastapi import Depends
+
+from app.core.enum.db_driver import DBTypesEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.repository.query_repository import QueryRepository, query_repository
+from app.schemas.query.query_model import ExecutionQuery, QueryInfo, RequestExecutionQuery
+from app.schemas.query.result_model import (
+ BasicResult,
+ ExecutionResult,
+ ExecutionSelectResult,
+ QueryTestResult,
+ SelectQueryHistoryResult,
+)
+from app.schemas.user_db.db_profile_model import AllDBProfileInfo, DBProfileInfo
+
+query_repository_dependency = Depends(lambda: query_repository)
+
+
+class QueryService:
+ def execution(
+ self,
+ query_info: RequestExecutionQuery,
+ db_info: AllDBProfileInfo,
+ repository: QueryRepository = query_repository,
+ ) -> ExecutionSelectResult | ExecutionResult | BasicResult:
+ """
+ 쿼리 수행 후 결과를 저장합니다.
+ """
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info, query_info.database)
+ result = repository.execution(query_info.query_text, driver_module, **connect_kwargs)
+ try:
+ query_history_info = ExecutionQuery.from_query_info(query_info, db_info.type, result.is_successful, None)
+ sql, data = self._get_create_query_and_data(query_history_info)
+ repository.create_query_history(sql, data, query_history_info.query_text)
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+ return result
+
+ def execution_test(
+ self, query_info: QueryInfo, db_info: AllDBProfileInfo, repository: QueryRepository = query_repository
+ ) -> QueryTestResult:
+ """
+ 쿼리 수행 후 결과를 저장합니다.
+ """
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info, query_info.database)
+ return repository.execution_test(query_info.query_text, driver_module, **connect_kwargs)
+
+ def find_query_history(
+ self, chat_tab_id: int, repository: QueryRepository = query_repository
+ ) -> SelectQueryHistoryResult:
+ """
+ 쿼리 기록을 조회합니다.
+ """
+ try:
+ return repository.find_query_history(chat_tab_id)
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def _get_driver_module(self, db_type: str):
+ """
+ DB 타입에 따라 동적으로 드라이버 모듈을 로드합니다.
+ """
+ driver_name = DBTypesEnum[db_type.lower()].value
+ if driver_name == "sqlite3":
+ return sqlite3
+ return importlib.import_module(driver_name)
+
+ def _prepare_connection_args(self, db_info: DBProfileInfo, database_name: str) -> dict[str, Any]:
+ """
+ DB 타입에 따라 연결에 필요한 매개변수를 딕셔너리로 구성합니다.
+ """
+ # SQLite는 별도 처리
+ if db_info.type == "sqlite":
+ return {"db_name": db_info.name}
+
+ # 그 외 DB들은 공통 파라미터로 시작
+ kwargs = {"host": db_info.host, "port": db_info.port, "user": db_info.username, "password": db_info.password}
+
+ # DB 이름이 없을 경우, 기본 파라미터만 반환
+ if not db_info.name and not database_name:
+ return kwargs
+
+ # DB 이름이 있다면, 타입에 따라 적절한 파라미터를 추가합니다.
+ final_db = database_name if database_name else db_info.name
+ if db_info.type == "postgresql":
+ kwargs["dbname"] = final_db
+ elif db_info.type in ["mysql", "mariadb"]:
+ kwargs["database"] = final_db
+ elif db_info.type == "oracle":
+ kwargs["dsn"] = f"{db_info.host}:{db_info.port}/{final_db}"
+
+ return kwargs
+
+ # ─────────────────────────────
+ # 프로필 CRUD 쿼리 생성 메서드
+ # ─────────────────────────────
+ def _get_create_query_and_data(self, query_info: ExecutionQuery) -> tuple[str, tuple]:
+ profile_dict = query_info.model_dump()
+ columns_to_insert = {k: v for k, v in profile_dict.items() if v is not None}
+ columns = ", ".join(columns_to_insert.keys())
+ placeholders = ", ".join(["?"] * len(columns_to_insert))
+ sql = f"INSERT INTO query_history ({columns}) VALUES ({placeholders})"
+ data = tuple(columns_to_insert.values())
+ return sql, data
+
+
+query_service = QueryService()
diff --git a/app/services/user_db_service.py b/app/services/user_db_service.py
new file mode 100644
index 0000000..782f396
--- /dev/null
+++ b/app/services/user_db_service.py
@@ -0,0 +1,441 @@
+# app/service/driver_service.py
+
+import importlib
+import sqlite3
+from typing import Any
+
+from fastapi import Depends
+
+from app.core.enum.db_driver import DBTypesEnum
+from app.core.enum.db_key_prefix_name import DBSaveIdEnum
+from app.core.exceptions import APIException
+from app.core.status import CommonCode
+from app.core.utils import generate_prefixed_uuid
+from app.repository.user_db_repository import UserDbRepository, user_db_repository
+from app.schemas.user_db.db_profile_model import AllDBProfileInfo, DBProfileInfo, UpdateOrCreateDBProfile
+from app.schemas.user_db.result_model import (
+ AllDBProfileResult,
+ BasicResult,
+ ChangeProfileResult,
+ ColumnListResult,
+ SchemaInfoResult,
+ TableInfo,
+ TableListResult,
+)
+
+user_db_repository_dependency = Depends(lambda: user_db_repository)
+
+
+class UserDbService:
+ def connection_test(self, db_info: DBProfileInfo, repository: UserDbRepository = user_db_repository) -> BasicResult:
+ """
+ DB 연결 정보를 받아 연결 테스트를 수행 후 결과를 반환합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+ result = repository.connection_test(driver_module, **connect_kwargs)
+ if not result.is_successful:
+ raise APIException(result.code)
+ return result
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def create_profile(
+ self, create_db_info: UpdateOrCreateDBProfile, repository: UserDbRepository = user_db_repository
+ ) -> ChangeProfileResult:
+ """
+ DB 연결 정보를 저장 후 결과를 반환합니다.
+ """
+ create_db_info.id = generate_prefixed_uuid(DBSaveIdEnum.user_db.value)
+ try:
+ sql, data = self._get_create_query_and_data(create_db_info)
+ result = repository.create_profile(sql, data, create_db_info)
+ if not result.is_successful:
+ raise APIException(result.code)
+ return result
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_SAVE_PROFILE) from e
+
+ def update_profile(
+ self, update_db_info: UpdateOrCreateDBProfile, repository: UserDbRepository = user_db_repository
+ ) -> ChangeProfileResult:
+ """
+ DB 연결 정보를 업데이트 후 결과를 반환합니다.
+ """
+ try:
+ sql, data = self._get_update_query_and_data(update_db_info)
+ result = repository.update_profile(sql, data, update_db_info)
+ if not result.is_successful:
+ raise APIException(result.code)
+ return result
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_UPDATE_PROFILE) from e
+
+ def delete_profile(self, profile_id: str, repository: UserDbRepository = user_db_repository) -> ChangeProfileResult:
+ """
+ DB 연결 정보를 삭제 후 결과를 반환합니다.
+ """
+ try:
+ sql, data = self._get_delete_query_and_data(profile_id)
+ result = repository.delete_profile(sql, data, profile_id)
+ if not result.is_successful:
+ raise APIException(result.code)
+ return result
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_DELETE_PROFILE) from e
+
+ def find_all_profile(self, repository: UserDbRepository = user_db_repository) -> AllDBProfileResult:
+ """
+ 모든 DB 연결 정보를 반환합니다.
+ """
+ try:
+ sql = self._get_find_all_query()
+ result = repository.find_all_profile(sql)
+ if not result.is_successful:
+ raise APIException(result.code)
+ return result
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_FIND_PROFILE) from e
+
+ def find_profile(self, profile_id, repository: UserDbRepository = user_db_repository) -> AllDBProfileInfo:
+ """
+ 특정 DB 연결 정보를 반환합니다.
+ """
+ try:
+ # [수정] 쿼리와 데이터를 서비스에서 생성하여 레포지토리로 전달합니다.
+ sql, data = self._get_find_one_query_and_data(profile_id)
+ return repository.find_profile(sql, data)
+ except APIException:
+ raise
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_FIND_PROFILE) from e
+
+ def find_schemas(
+ self, db_info: AllDBProfileInfo, repository: UserDbRepository = user_db_repository
+ ) -> SchemaInfoResult:
+ """
+ DB 스키마 정보를 조회를 수행합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+ schema_query = self._get_schema_query(db_info.type)
+
+ return repository.find_schemas(driver_module, schema_query, **connect_kwargs)
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def find_tables(
+ self, db_info: AllDBProfileInfo, schema_name: str, repository: UserDbRepository = user_db_repository
+ ) -> TableListResult:
+ """
+ 특정 스키마 내의 테이블 정보를 조회합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+ table_query = self._get_table_query(db_info.type, for_all_schemas=False)
+
+ return repository.find_tables(driver_module, table_query, schema_name, **connect_kwargs)
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def find_columns(
+ self,
+ db_info: AllDBProfileInfo,
+ schema_name: str,
+ table_name: str,
+ repository: UserDbRepository = user_db_repository,
+ ) -> ColumnListResult:
+ """
+ 특정 컬럼 정보를 조회합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+ column_query = self._get_column_query(db_info.type)
+ db_type = db_info.type
+
+ return repository.find_columns(
+ driver_module, column_query, schema_name, db_type, table_name, **connect_kwargs
+ )
+ except Exception as e:
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_full_schema_info(
+ self, db_info: AllDBProfileInfo, repository: UserDbRepository = user_db_repository
+ ) -> list[TableInfo]:
+ """
+ DB 프로필 정보를 받아 해당 데이터베이스의 전체 스키마 정보
+ (테이블, 컬럼, 제약조건, 인덱스)를 조회하여 반환합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+
+ # 1. 모든 스키마(DB) 목록 조회
+ schemas_result = repository.find_schemas(
+ driver_module, self._get_schema_query(db_info.type), **connect_kwargs
+ )
+ if not schemas_result.is_successful:
+ raise APIException(schemas_result.code)
+
+ full_schema_info = []
+
+ # 2. 각 스키마의 모든 테이블 목록 조회
+ for schema_name in schemas_result.schemas:
+ tables_result = repository.find_tables(
+ driver_module, self._get_table_query(db_info.type), schema_name, **connect_kwargs
+ )
+ if not tables_result.is_successful:
+ # 특정 스키마에서 테이블 조회 실패 시 건너뛰거나 로깅
+ continue
+
+ # 3. 각 테이블의 상세 정보 조회
+ for table_name in tables_result.tables:
+ columns_result = repository.find_columns(
+ driver_module,
+ self._get_column_query(db_info.type),
+ schema_name,
+ db_info.type,
+ table_name,
+ **connect_kwargs,
+ )
+
+ try:
+ constraints = repository.find_constraints(
+ driver_module, db_info.type, schema_name, table_name, **connect_kwargs
+ )
+ indexes = repository.find_indexes(
+ driver_module, db_info.type, schema_name, table_name, **connect_kwargs
+ )
+ except (sqlite3.Error, self._get_driver_module(db_info.type).Error) as e:
+ # 레포지토리에서 발생한 DB 예외를 서비스에서 처리
+ raise APIException(CommonCode.FAIL_FIND_CONSTRAINTS_OR_INDEXES) from e
+
+ table_info = TableInfo(
+ name=table_name,
+ columns=columns_result.columns if columns_result.is_successful else [],
+ constraints=constraints,
+ indexes=indexes,
+ comment=None, # 테이블 코멘트는 현재 조회 로직에 없음
+ )
+ full_schema_info.append(table_info)
+
+ return full_schema_info
+
+ except APIException:
+ # 이미 APIException인 경우 그대로 전달
+ raise
+ except Exception as e:
+ # 그 외 모든 예외는 일반 실패로 처리
+ raise APIException(CommonCode.FAIL) from e
+
+ def get_sample_rows(
+ self, db_info: AllDBProfileInfo, table_infos: list[TableInfo], repository: UserDbRepository = user_db_repository
+ ) -> dict[str, list[dict[str, Any]]]:
+ """
+ 테이블 정보 목록을 받아 각 테이블의 샘플 데이터를 조회하여 반환합니다.
+ """
+ try:
+ driver_module = self._get_driver_module(db_info.type)
+ connect_kwargs = self._prepare_connection_args(db_info)
+
+ # SQLite는 스키마 이름이 필요 없음
+ schema_name = db_info.name if db_info.type != "sqlite" else ""
+ table_names = [table.name for table in table_infos]
+
+ return repository.find_sample_rows(driver_module, db_info.type, schema_name, table_names, **connect_kwargs)
+ except Exception as e:
+ raise APIException(CommonCode.FAIL_FIND_SAMPLE_ROWS) from e
+
+ def _get_driver_module(self, db_type: str):
+ """
+ DB 타입에 따라 동적으로 드라이버 모듈을 로드합니다.
+ """
+ driver_name = DBTypesEnum[db_type.lower()].value
+ if driver_name == "sqlite3":
+ return sqlite3
+ return importlib.import_module(driver_name)
+
+ def _prepare_connection_args(self, db_info: DBProfileInfo) -> dict[str, Any]:
+ """
+ DB 타입에 따라 연결에 필요한 매개변수를 딕셔너리로 구성합니다.
+ """
+ # SQLite는 별도 처리
+ if db_info.type == "sqlite":
+ return {"db_name": db_info.name}
+
+ # MSSQL은 연결 문자열을 별도로 구성
+ if db_info.type == "mssql":
+ connection_string = (
+ f"DRIVER={{ODBC Driver 17 for SQL Server}};"
+ f"SERVER={db_info.host},{db_info.port};"
+ f"UID={db_info.username};"
+ f"PWD={db_info.password};"
+ )
+ if db_info.name:
+ connection_string += f"DATABASE={db_info.name};"
+ return {"connection_string": connection_string}
+
+ # 그 외 DB들은 공통 파라미터로 시작
+ kwargs = {"host": db_info.host, "port": db_info.port, "user": db_info.username, "password": db_info.password}
+
+ # DB 이름이 없을 경우, 기본 파라미터만 반환
+ if not db_info.name:
+ return kwargs
+
+ # DB 이름이 있다면, 타입에 따라 적절한 파라미터를 추가합니다.
+ if db_info.type == "postgresql":
+ kwargs["dbname"] = db_info.name
+ elif db_info.type in ["mysql", "mariadb"]:
+ kwargs["database"] = db_info.name
+ elif db_info.type == "oracle":
+ kwargs["dsn"] = f"{db_info.host}:{db_info.port}/{db_info.name}"
+
+ return kwargs
+
+ def _get_schema_query(self, db_type: str) -> str | None:
+ db_type = db_type.lower()
+ if db_type == "postgresql":
+ return """
+ SELECT schema_name FROM information_schema.schemata
+ WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
+ """
+ elif db_type in ["mysql", "mariadb"]:
+ return "SELECT schema_name FROM information_schema.schemata"
+ elif db_type == "oracle":
+ return "SELECT username FROM all_users"
+ elif db_type == "sqlite":
+ return None
+ return None
+
+ def _get_table_query(self, db_type: str, for_all_schemas: bool = False) -> str | None: # 수정됨
+ db_type = db_type.lower()
+ if db_type == "postgresql":
+ if for_all_schemas:
+ return """
+ SELECT table_name, table_schema FROM information_schema.tables
+ WHERE table_type = 'BASE TABLE' AND table_schema NOT IN ('pg_catalog', 'information_schema')
+ """
+ else:
+ return """
+ SELECT table_name FROM information_schema.tables
+ WHERE table_type = 'BASE TABLE' AND table_schema = %s
+ """
+ elif db_type in ["mysql", "mariadb"]:
+ if for_all_schemas:
+ return """
+ SELECT table_name, table_schema FROM information_schema.tables
+ WHERE table_type = 'BASE TABLE'
+ """
+ else:
+ return """
+ SELECT table_name, table_schema FROM information_schema.tables
+ WHERE table_type = 'BASE TABLE' AND table_schema = %s
+ """
+ elif db_type == "oracle":
+ return "SELECT table_name FROM all_tables WHERE owner = :owner"
+ elif db_type == "sqlite":
+ return "SELECT name FROM sqlite_master WHERE type='table'"
+ return None
+
+ def _get_column_query(self, db_type: str) -> str | None:
+ db_type = db_type.lower()
+ if db_type == "postgresql":
+ return """
+ SELECT
+ c.column_name,
+ c.data_type,
+ c.is_nullable,
+ c.column_default,
+ pgd.description AS comment,
+ (
+ SELECT TRUE
+ FROM information_schema.table_constraints tc
+ JOIN information_schema.key_column_usage kcu
+ ON tc.constraint_name = kcu.constraint_name
+ AND tc.table_schema = kcu.table_schema
+ WHERE tc.constraint_type = 'PRIMARY KEY'
+ AND tc.table_schema = c.table_schema
+ AND tc.table_name = c.table_name
+ AND kcu.column_name = c.column_name
+ ) AS is_pk,
+ c.character_maximum_length,
+ c.numeric_precision,
+ c.numeric_scale
+ FROM
+ information_schema.columns c
+ LEFT JOIN
+ pg_catalog.pg_stat_all_tables st
+ ON c.table_schema = st.schemaname AND c.table_name = st.relname
+ LEFT JOIN
+ pg_catalog.pg_description pgd
+ ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position
+ WHERE
+ c.table_schema = %s AND c.table_name = %s
+ ORDER BY
+ c.ordinal_position;
+ """
+ elif db_type in ["mysql", "mariadb"]:
+ return """
+ SELECT column_name, data_type, is_nullable, column_default, table_name, table_schema
+ FROM information_schema.columns
+ WHERE table_schema = %s AND table_name = %s
+ """
+ elif db_type == "oracle":
+ return """
+ SELECT column_name, data_type, nullable, data_default, table_name
+ FROM all_tab_columns
+ WHERE owner = :owner AND table_name = :table
+ """
+ elif db_type == "sqlite":
+ return None
+ return None
+
+ # ─────────────────────────────
+ # 프로필 CRUD 쿼리 생성 메서드
+ # ─────────────────────────────
+ def _get_create_query_and_data(self, db_info: UpdateOrCreateDBProfile) -> tuple[str, tuple]:
+ profile_dict = db_info.model_dump()
+ columns_to_insert = {k: v for k, v in profile_dict.items() if v is not None}
+ columns = ", ".join(columns_to_insert.keys())
+ placeholders = ", ".join(["?"] * len(columns_to_insert))
+ sql = f"INSERT INTO db_profile ({columns}) VALUES ({placeholders})"
+ data = tuple(columns_to_insert.values())
+ return sql, data
+
+ def _get_update_query_and_data(self, db_info: UpdateOrCreateDBProfile) -> tuple[str, tuple]:
+ profile_dict = db_info.model_dump()
+ columns_to_update = {k: v for k, v in profile_dict.items() if v is not None and k != "id"}
+ set_clause = ", ".join([f"{key} = ?" for key in columns_to_update.keys()])
+ sql = f"UPDATE db_profile SET {set_clause} WHERE id = ?"
+ data = tuple(columns_to_update.values()) + (db_info.id,)
+ return sql, data
+
+ def _get_delete_query_and_data(self, profile_id: str) -> tuple[str, tuple]:
+ sql = "DELETE FROM db_profile WHERE id = ?"
+ data = (profile_id,)
+ return sql, data
+
+ def _get_find_all_query(self) -> str:
+ return "SELECT id, type, host, port, name, username, view_name, created_at, updated_at FROM db_profile"
+
+ def _get_find_one_query_and_data(self, profile_id: str) -> tuple[str, tuple]:
+ sql = "SELECT * FROM db_profile WHERE id = ?"
+ data = (profile_id,)
+ return sql, data
+
+
+user_db_service = UserDbService()
diff --git a/poetry.lock b/poetry.lock
index ff51991..8b5f1c4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -27,14 +27,14 @@ files = [
[[package]]
name = "anyio"
-version = "4.9.0"
-description = "High level compatibility layer for multiple asynchronous event loop implementations"
+version = "4.10.0"
+description = "High-level concurrency and networking framework on top of asyncio or Trio"
optional = false
python-versions = ">=3.9"
-groups = ["main"]
+groups = ["main", "dev"]
files = [
- {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"},
- {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"},
+ {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"},
+ {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"},
]
[package.dependencies]
@@ -43,8 +43,6 @@ sniffio = ">=1.1"
typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras]
-doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
-test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
trio = ["trio (>=0.26.1)"]
[[package]]
@@ -92,6 +90,99 @@ d = ["aiohttp (>=3.10)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "certifi"
+version = "2025.8.3"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.7"
+groups = ["dev"]
+files = [
+ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
+ {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\""
+files = [
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "cfgv"
version = "3.4.0"
@@ -126,22 +217,183 @@ description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main", "dev"]
-markers = "platform_system == \"Windows\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
+
+[[package]]
+name = "coverage"
+version = "7.10.2"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "coverage-7.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:79f0283ab5e6499fd5fe382ca3d62afa40fb50ff227676a3125d18af70eabf65"},
+ {file = "coverage-7.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4545e906f595ee8ab8e03e21be20d899bfc06647925bc5b224ad7e8c40e08b8"},
+ {file = "coverage-7.10.2-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ae385e1d58fbc6a9b1c315e5510ac52281e271478b45f92ca9b5ad42cf39643f"},
+ {file = "coverage-7.10.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6f0cbe5f7dd19f3a32bac2251b95d51c3b89621ac88a2648096ce40f9a5aa1e7"},
+ {file = "coverage-7.10.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd17f427f041f6b116dc90b4049c6f3e1230524407d00daa2d8c7915037b5947"},
+ {file = "coverage-7.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7f10ca4cde7b466405cce0a0e9971a13eb22e57a5ecc8b5f93a81090cc9c7eb9"},
+ {file = "coverage-7.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3b990df23dd51dccce26d18fb09fd85a77ebe46368f387b0ffba7a74e470b31b"},
+ {file = "coverage-7.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc3902584d25c7eef57fb38f440aa849a26a3a9f761a029a72b69acfca4e31f8"},
+ {file = "coverage-7.10.2-cp310-cp310-win32.whl", hash = "sha256:9dd37e9ac00d5eb72f38ed93e3cdf2280b1dbda3bb9b48c6941805f265ad8d87"},
+ {file = "coverage-7.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:99d16f15cb5baf0729354c5bd3080ae53847a4072b9ba1e10957522fb290417f"},
+ {file = "coverage-7.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c3b210d79925a476dfc8d74c7d53224888421edebf3a611f3adae923e212b27"},
+ {file = "coverage-7.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf67d1787cd317c3f8b2e4c6ed1ae93497be7e30605a0d32237ac37a37a8a322"},
+ {file = "coverage-7.10.2-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:069b779d03d458602bc0e27189876e7d8bdf6b24ac0f12900de22dd2154e6ad7"},
+ {file = "coverage-7.10.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4c2de4cb80b9990e71c62c2d3e9f3ec71b804b1f9ca4784ec7e74127e0f42468"},
+ {file = "coverage-7.10.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75bf7ab2374a7eb107602f1e07310cda164016cd60968abf817b7a0b5703e288"},
+ {file = "coverage-7.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3f37516458ec1550815134937f73d6d15b434059cd10f64678a2068f65c62406"},
+ {file = "coverage-7.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:de3c6271c482c250d3303fb5c6bdb8ca025fff20a67245e1425df04dc990ece9"},
+ {file = "coverage-7.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:98a838101321ac3089c9bb1d4bfa967e8afed58021fda72d7880dc1997f20ae1"},
+ {file = "coverage-7.10.2-cp311-cp311-win32.whl", hash = "sha256:f2a79145a531a0e42df32d37be5af069b4a914845b6f686590739b786f2f7bce"},
+ {file = "coverage-7.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:e4f5f1320f8ee0d7cfa421ceb257bef9d39fd614dd3ddcfcacd284d4824ed2c2"},
+ {file = "coverage-7.10.2-cp311-cp311-win_arm64.whl", hash = "sha256:d8f2d83118f25328552c728b8e91babf93217db259ca5c2cd4dd4220b8926293"},
+ {file = "coverage-7.10.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:890ad3a26da9ec7bf69255b9371800e2a8da9bc223ae5d86daeb940b42247c83"},
+ {file = "coverage-7.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38fd1ccfca7838c031d7a7874d4353e2f1b98eb5d2a80a2fe5732d542ae25e9c"},
+ {file = "coverage-7.10.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:76c1ffaaf4f6f0f6e8e9ca06f24bb6454a7a5d4ced97a1bc466f0d6baf4bd518"},
+ {file = "coverage-7.10.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:86da8a3a84b79ead5c7d0e960c34f580bc3b231bb546627773a3f53c532c2f21"},
+ {file = "coverage-7.10.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99cef9731c8a39801830a604cc53c93c9e57ea8b44953d26589499eded9576e0"},
+ {file = "coverage-7.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ea58b112f2966a8b91eb13f5d3b1f8bb43c180d624cd3283fb33b1cedcc2dd75"},
+ {file = "coverage-7.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:20f405188d28da9522b7232e51154e1b884fc18d0b3a10f382d54784715bbe01"},
+ {file = "coverage-7.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:64586ce42bbe0da4d9f76f97235c545d1abb9b25985a8791857690f96e23dc3b"},
+ {file = "coverage-7.10.2-cp312-cp312-win32.whl", hash = "sha256:bc2e69b795d97ee6d126e7e22e78a509438b46be6ff44f4dccbb5230f550d340"},
+ {file = "coverage-7.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:adda2268b8cf0d11f160fad3743b4dfe9813cd6ecf02c1d6397eceaa5b45b388"},
+ {file = "coverage-7.10.2-cp312-cp312-win_arm64.whl", hash = "sha256:164429decd0d6b39a0582eaa30c67bf482612c0330572343042d0ed9e7f15c20"},
+ {file = "coverage-7.10.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:aca7b5645afa688de6d4f8e89d30c577f62956fefb1bad021490d63173874186"},
+ {file = "coverage-7.10.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:96e5921342574a14303dfdb73de0019e1ac041c863743c8fe1aa6c2b4a257226"},
+ {file = "coverage-7.10.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11333094c1bff621aa811b67ed794865cbcaa99984dedea4bd9cf780ad64ecba"},
+ {file = "coverage-7.10.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6eb586fa7d2aee8d65d5ae1dd71414020b2f447435c57ee8de8abea0a77d5074"},
+ {file = "coverage-7.10.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2d358f259d8019d4ef25d8c5b78aca4c7af25e28bd4231312911c22a0e824a57"},
+ {file = "coverage-7.10.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5250bda76e30382e0a2dcd68d961afcab92c3a7613606e6269855c6979a1b0bb"},
+ {file = "coverage-7.10.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a91e027d66eff214d88d9afbe528e21c9ef1ecdf4956c46e366c50f3094696d0"},
+ {file = "coverage-7.10.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:228946da741558904e2c03ce870ba5efd9cd6e48cbc004d9a27abee08100a15a"},
+ {file = "coverage-7.10.2-cp313-cp313-win32.whl", hash = "sha256:95e23987b52d02e7c413bf2d6dc6288bd5721beb518052109a13bfdc62c8033b"},
+ {file = "coverage-7.10.2-cp313-cp313-win_amd64.whl", hash = "sha256:f35481d42c6d146d48ec92d4e239c23f97b53a3f1fbd2302e7c64336f28641fe"},
+ {file = "coverage-7.10.2-cp313-cp313-win_arm64.whl", hash = "sha256:65b451949cb789c346f9f9002441fc934d8ccedcc9ec09daabc2139ad13853f7"},
+ {file = "coverage-7.10.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8415918856a3e7d57a4e0ad94651b761317de459eb74d34cc1bb51aad80f07e"},
+ {file = "coverage-7.10.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f287a25a8ca53901c613498e4a40885b19361a2fe8fbfdbb7f8ef2cad2a23f03"},
+ {file = "coverage-7.10.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:75cc1a3f8c88c69bf16a871dab1fe5a7303fdb1e9f285f204b60f1ee539b8fc0"},
+ {file = "coverage-7.10.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ca07fa78cc9d26bc8c4740de1abd3489cf9c47cc06d9a8ab3d552ff5101af4c0"},
+ {file = "coverage-7.10.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2e117e64c26300032755d4520cd769f2623cde1a1d1c3515b05a3b8add0ade1"},
+ {file = "coverage-7.10.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:daaf98009977f577b71f8800208f4d40d4dcf5c2db53d4d822787cdc198d76e1"},
+ {file = "coverage-7.10.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ea8d8fe546c528535c761ba424410bbeb36ba8a0f24be653e94b70c93fd8a8ca"},
+ {file = "coverage-7.10.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fe024d40ac31eb8d5aae70215b41dafa264676caa4404ae155f77d2fa95c37bb"},
+ {file = "coverage-7.10.2-cp313-cp313t-win32.whl", hash = "sha256:8f34b09f68bdadec122ffad312154eda965ade433559cc1eadd96cca3de5c824"},
+ {file = "coverage-7.10.2-cp313-cp313t-win_amd64.whl", hash = "sha256:71d40b3ac0f26fa9ffa6ee16219a714fed5c6ec197cdcd2018904ab5e75bcfa3"},
+ {file = "coverage-7.10.2-cp313-cp313t-win_arm64.whl", hash = "sha256:abb57fdd38bf6f7dcc66b38dafb7af7c5fdc31ac6029ce373a6f7f5331d6f60f"},
+ {file = "coverage-7.10.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a3e853cc04987c85ec410905667eed4bf08b1d84d80dfab2684bb250ac8da4f6"},
+ {file = "coverage-7.10.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0100b19f230df72c90fdb36db59d3f39232391e8d89616a7de30f677da4f532b"},
+ {file = "coverage-7.10.2-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9c1cd71483ea78331bdfadb8dcec4f4edfb73c7002c1206d8e0af6797853f5be"},
+ {file = "coverage-7.10.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9f75dbf4899e29a37d74f48342f29279391668ef625fdac6d2f67363518056a1"},
+ {file = "coverage-7.10.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7df481e7508de1c38b9b8043da48d94931aefa3e32b47dd20277e4978ed5b95"},
+ {file = "coverage-7.10.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:835f39e618099325e7612b3406f57af30ab0a0af350490eff6421e2e5f608e46"},
+ {file = "coverage-7.10.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:12e52b5aa00aa720097d6947d2eb9e404e7c1101ad775f9661ba165ed0a28303"},
+ {file = "coverage-7.10.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:718044729bf1fe3e9eb9f31b52e44ddae07e434ec050c8c628bf5adc56fe4bdd"},
+ {file = "coverage-7.10.2-cp314-cp314-win32.whl", hash = "sha256:f256173b48cc68486299d510a3e729a96e62c889703807482dbf56946befb5c8"},
+ {file = "coverage-7.10.2-cp314-cp314-win_amd64.whl", hash = "sha256:2e980e4179f33d9b65ac4acb86c9c0dde904098853f27f289766657ed16e07b3"},
+ {file = "coverage-7.10.2-cp314-cp314-win_arm64.whl", hash = "sha256:14fb5b6641ab5b3c4161572579f0f2ea8834f9d3af2f7dd8fbaecd58ef9175cc"},
+ {file = "coverage-7.10.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e96649ac34a3d0e6491e82a2af71098e43be2874b619547c3282fc11d3840a4b"},
+ {file = "coverage-7.10.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1a2e934e9da26341d342d30bfe91422bbfdb3f1f069ec87f19b2909d10d8dcc4"},
+ {file = "coverage-7.10.2-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:651015dcd5fd9b5a51ca79ece60d353cacc5beaf304db750407b29c89f72fe2b"},
+ {file = "coverage-7.10.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81bf6a32212f9f66da03d63ecb9cd9bd48e662050a937db7199dbf47d19831de"},
+ {file = "coverage-7.10.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d800705f6951f75a905ea6feb03fff8f3ea3468b81e7563373ddc29aa3e5d1ca"},
+ {file = "coverage-7.10.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:248b5394718e10d067354448dc406d651709c6765669679311170da18e0e9af8"},
+ {file = "coverage-7.10.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5c61675a922b569137cf943770d7ad3edd0202d992ce53ac328c5ff68213ccf4"},
+ {file = "coverage-7.10.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:52d708b5fd65589461381fa442d9905f5903d76c086c6a4108e8e9efdca7a7ed"},
+ {file = "coverage-7.10.2-cp314-cp314t-win32.whl", hash = "sha256:916369b3b914186b2c5e5ad2f7264b02cff5df96cdd7cdad65dccd39aa5fd9f0"},
+ {file = "coverage-7.10.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5b9d538e8e04916a5df63052d698b30c74eb0174f2ca9cd942c981f274a18eaf"},
+ {file = "coverage-7.10.2-cp314-cp314t-win_arm64.whl", hash = "sha256:04c74f9ef1f925456a9fd23a7eef1103126186d0500ef9a0acb0bd2514bdc7cc"},
+ {file = "coverage-7.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:765b13b164685a2f8b2abef867ad07aebedc0e090c757958a186f64e39d63dbd"},
+ {file = "coverage-7.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a219b70100500d0c7fd3ebb824a3302efb6b1a122baa9d4eb3f43df8f0b3d899"},
+ {file = "coverage-7.10.2-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e33e79a219105aa315439ee051bd50b6caa705dc4164a5aba6932c8ac3ce2d98"},
+ {file = "coverage-7.10.2-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc3945b7bad33957a9eca16e9e5eae4b17cb03173ef594fdaad228f4fc7da53b"},
+ {file = "coverage-7.10.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bdff88e858ee608a924acfad32a180d2bf6e13e059d6a7174abbae075f30436"},
+ {file = "coverage-7.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44329cbed24966c0b49acb386352c9722219af1f0c80db7f218af7793d251902"},
+ {file = "coverage-7.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:be127f292496d0fbe20d8025f73221b36117b3587f890346e80a13b310712982"},
+ {file = "coverage-7.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6c031da749a05f7a01447dd7f47beedb498edd293e31e1878c0d52db18787df0"},
+ {file = "coverage-7.10.2-cp39-cp39-win32.whl", hash = "sha256:22aca3e691c7709c5999ccf48b7a8ff5cf5a8bd6fe9b36efbd4993f5a36b2fcf"},
+ {file = "coverage-7.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c7195444b932356055a8e287fa910bf9753a84a1bc33aeb3770e8fca521e032e"},
+ {file = "coverage-7.10.2-py3-none-any.whl", hash = "sha256:95db3750dd2e6e93d99fa2498f3a1580581e49c494bddccc6f85c5c21604921f"},
+ {file = "coverage-7.10.2.tar.gz", hash = "sha256:5d6e6d84e6dd31a8ded64759626627247d676a23c1b892e1326f7c55c8d61055"},
+]
+
+[package.extras]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
+
+[[package]]
+name = "cryptography"
+version = "45.0.5"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = "!=3.9.0,!=3.9.1,>=3.7"
+groups = ["main"]
+files = [
+ {file = "cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9"},
+ {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27"},
+ {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e"},
+ {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174"},
+ {file = "cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9"},
+ {file = "cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63"},
+ {file = "cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42"},
+ {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492"},
+ {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0"},
+ {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a"},
+ {file = "cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f"},
+ {file = "cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e"},
+ {file = "cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1"},
+ {file = "cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f"},
+ {file = "cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
+docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
+nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
+pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
+sdist = ["build (>=1.0.0)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi (>=2024)", "cryptography-vectors (==45.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+test-randomorder = ["pytest-randomly"]
[[package]]
name = "distlib"
-version = "0.3.9"
+version = "0.4.0"
description = "Distribution utilities"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
- {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
- {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
+ {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"},
+ {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"},
]
[[package]]
@@ -182,18 +434,134 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
+[[package]]
+name = "greenlet"
+version = "3.2.3"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"
+files = [
+ {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"},
+ {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"},
+ {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"},
+ {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"},
+ {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"},
+ {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"},
+ {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"},
+ {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"},
+ {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"},
+ {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"},
+ {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"},
+ {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"},
+ {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"},
+ {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"},
+ {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"},
+ {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"},
+ {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"},
+ {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"},
+ {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"},
+ {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"},
+ {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"},
+ {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"},
+ {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"},
+ {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"},
+ {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"},
+ {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"},
+ {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"},
+ {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"},
+ {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"},
+ {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"},
+ {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"},
+ {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"},
+ {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"},
+ {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"},
+ {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"},
+ {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"},
+ {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"},
+ {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"},
+ {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"},
+ {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"},
+ {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"},
+ {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"},
+ {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"},
+ {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"},
+ {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"},
+ {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"},
+ {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"},
+ {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"},
+ {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"},
+ {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"},
+ {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"},
+ {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"},
+ {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"},
+ {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"},
+]
+
+[package.extras]
+docs = ["Sphinx", "furo"]
+test = ["objgraph", "psutil"]
+
[[package]]
name = "h11"
version = "0.16.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.8"
-groups = ["main"]
+groups = ["main", "dev"]
files = [
{file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"},
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
]
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+description = "A minimal low-level HTTP client."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
+ {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
+]
+
+[package.dependencies]
+certifi = "*"
+h11 = ">=0.16"
+
+[package.extras]
+asyncio = ["anyio (>=4.0,<5.0)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+trio = ["trio (>=0.22.0,<1.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+description = "The next generation HTTP client."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
+ {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
+]
+
+[package.dependencies]
+anyio = "*"
+certifi = "*"
+httpcore = "==1.*"
+idna = "*"
+
+[package.extras]
+brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+zstd = ["zstandard (>=0.18.0)"]
+
[[package]]
name = "identify"
version = "2.6.12"
@@ -215,7 +583,7 @@ version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
-groups = ["main"]
+groups = ["main", "dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@@ -224,6 +592,18 @@ files = [
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+[[package]]
+name = "iniconfig"
+version = "2.1.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
+ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
+]
+
[[package]]
name = "macholib"
version = "1.16.3"
@@ -252,6 +632,49 @@ files = [
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
]
+[[package]]
+name = "mysql-connector-python"
+version = "9.4.0"
+description = "A self-contained Python driver for communicating with MySQL servers, using an API that is compliant with the Python Database API Specification v2.0 (PEP 249)."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "mysql_connector_python-9.4.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3c2603e00516cf4208c6266e85c5c87d5f4d0ac79768106d50de42ccc8414c05"},
+ {file = "mysql_connector_python-9.4.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:47884fcb050112b8bef3458e17eac47cc81a6cbbf3524e3456146c949772d9b4"},
+ {file = "mysql_connector_python-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f14b6936cd326e212fc9ab5f666dea3efea654f0cb644460334e60e22986e735"},
+ {file = "mysql_connector_python-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0f5ad70355720e64b72d7c068e858c9fd1f69b671d9575f857f235a10f878939"},
+ {file = "mysql_connector_python-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:7106670abce510e440d393e27fc3602b8cf21e7a8a80216cc9ad9a68cd2e4595"},
+ {file = "mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7df1a8ddd182dd8adc914f6dc902a986787bf9599705c29aca7b2ce84e79d361"},
+ {file = "mysql_connector_python-9.4.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3892f20472e13e63b1fb4983f454771dd29f211b09724e69a9750e299542f2f8"},
+ {file = "mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d3e87142103d71c4df647ece30f98e85e826652272ed1c74822b56f6acdc38e7"},
+ {file = "mysql_connector_python-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b27fcd403436fe83bafb2fe7fcb785891e821e639275c4ad3b3bd1e25f533206"},
+ {file = "mysql_connector_python-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd6ff5afb9c324b0bbeae958c93156cce4168c743bf130faf224d52818d1f0ee"},
+ {file = "mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4efa3898a24aba6a4bfdbf7c1f5023c78acca3150d72cc91199cca2ccd22f76f"},
+ {file = "mysql_connector_python-9.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:665c13e7402235162e5b7a2bfdee5895192121b64ea455c90a81edac6a48ede5"},
+ {file = "mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:815aa6cad0f351c1223ef345781a538f2e5e44ef405fdb3851eb322bd9c4ca2b"},
+ {file = "mysql_connector_python-9.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b3436a2c8c0ec7052932213e8d01882e6eb069dbab33402e685409084b133a1c"},
+ {file = "mysql_connector_python-9.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:57b0c224676946b70548c56798d5023f65afa1ba5b8ac9f04a143d27976c7029"},
+ {file = "mysql_connector_python-9.4.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:fde3bbffb5270a4b02077029914e6a9d2ec08f67d8375b4111432a2778e7540b"},
+ {file = "mysql_connector_python-9.4.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:25f77ad7d845df3b5a5a3a6a8d1fed68248dc418a6938a371d1ddaaab6b9a8e3"},
+ {file = "mysql_connector_python-9.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:227dd420c71e6d4788d52d98f298e563f16b6853577e5ade4bd82d644257c812"},
+ {file = "mysql_connector_python-9.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5163381a312d38122eded2197eb5cd7ccf1a5c5881d4e7a6de10d6ea314d088e"},
+ {file = "mysql_connector_python-9.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:c727cb1f82b40c9aaa7a15ab5cf0a7f87c5d8dce32eab5ff2530a4aa6054e7df"},
+ {file = "mysql_connector_python-9.4.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:20f8154ab5c0ed444f8ef8e5fa91e65215037db102c137b5f995ebfffd309b78"},
+ {file = "mysql_connector_python-9.4.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:7b8976d89d67c8b0dc452471cb557d9998ed30601fb69a876bf1f0ecaa7954a4"},
+ {file = "mysql_connector_python-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:4ee4fe1b067e243aae21981e4b9f9d300a3104814b8274033ca8fc7a89b1729e"},
+ {file = "mysql_connector_python-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1c6b95404e80d003cd452e38674e91528e2b3a089fe505c882f813b564e64f9d"},
+ {file = "mysql_connector_python-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8f820c111335f225d63367307456eb7e10494f87e7a94acded3bb762e55a6d4"},
+ {file = "mysql_connector_python-9.4.0-py2.py3-none-any.whl", hash = "sha256:56e679169c704dab279b176fab2a9ee32d2c632a866c0f7cd48a8a1e2cf802c4"},
+ {file = "mysql_connector_python-9.4.0.tar.gz", hash = "sha256:d111360332ae78933daf3d48ff497b70739aa292ab0017791a33e826234e743b"},
+]
+
+[package.extras]
+dns-srv = ["dnspython (==2.6.1)"]
+gssapi = ["gssapi (==1.8.3)"]
+telemetry = ["opentelemetry-api (==1.33.1)", "opentelemetry-exporter-otlp-proto-http (==1.33.1)", "opentelemetry-sdk (==1.33.1)"]
+webauthn = ["fido2 (==1.1.2)"]
+
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -264,6 +687,53 @@ files = [
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
]
+[[package]]
+name = "oracledb"
+version = "3.3.0"
+description = "Python interface to Oracle Database"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "oracledb-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e9b52231f34349165dd9a70fe7ce20bc4d6b4ee1233462937fad79396bb1af6"},
+ {file = "oracledb-3.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e9e3da89174461ceebd3401817b4020b3812bfa221fcd6419bfec877972a890"},
+ {file = "oracledb-3.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:605a58ade4e967bdf61284cc16417a36f42e5778191c702234adf558b799b822"},
+ {file = "oracledb-3.3.0-cp310-cp310-win32.whl", hash = "sha256:f449925215cac7e41ce24107db614f49817d0a3032a595f47212bac418b14345"},
+ {file = "oracledb-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:58fb5ec16fd5ff49a2bd163e71d09adda73353bde18cea0eae9b2a41affc2a41"},
+ {file = "oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0"},
+ {file = "oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7"},
+ {file = "oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22"},
+ {file = "oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1"},
+ {file = "oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a"},
+ {file = "oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8"},
+ {file = "oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9"},
+ {file = "oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e"},
+ {file = "oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813"},
+ {file = "oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4"},
+ {file = "oracledb-3.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6abc3e4432350839ecb98527707f4929bfb58959159ea440977f621e0db82ac6"},
+ {file = "oracledb-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6770dabc441adce5c865c9f528992a7228b2e5e59924cbd8588eb159f548fc38"},
+ {file = "oracledb-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:55af5a49db7cbd03cef449ac51165d9aa30f26064481d68a653c81cc5a29ae80"},
+ {file = "oracledb-3.3.0-cp313-cp313-win32.whl", hash = "sha256:5b4a68e4d783186cea9236fb0caa295f6da382ba1b80ca7f86d2d045cf29a993"},
+ {file = "oracledb-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:ad63c0057d3f764cc2d96d4f6445b89a8ea59b42ed80f719d689292392ce62a3"},
+ {file = "oracledb-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:4c574a34a79934b9c6c3f5e4c715053ad3b46e18da38ec28d9c767e0541422ea"},
+ {file = "oracledb-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172217e7511c58d8d3c09e9385f7d51696de27e639f336ba0a65d15009cd8cda"},
+ {file = "oracledb-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d450dcada7711007a9a8a2770f81b54c24ba1e1d2456643c3fae7a2ff26b3a29"},
+ {file = "oracledb-3.3.0-cp314-cp314-win32.whl", hash = "sha256:b19ca41b3344dc77c53f74d31e0ca442734314593c4bec578a62efebdb1b59d7"},
+ {file = "oracledb-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a410dcf69b18ea607f3aed5cb4ecdebeb7bfb5f86e746c09a864c0f5bd563279"},
+ {file = "oracledb-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2615f4f516a574fdf18e5aadca809bc90ac6ab37889d0293a9192c695fe07cd9"},
+ {file = "oracledb-3.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed608fee4e87319618be200d2befcdd17fa534e16f20cf60df6e9cbbfeadf58e"},
+ {file = "oracledb-3.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:35f6df7bec55314f56d4d87a53a1d5f6a0ded9ee106bc9346a5a4d4fe64aa667"},
+ {file = "oracledb-3.3.0-cp39-cp39-win32.whl", hash = "sha256:0434f4ed7ded88120487b2ed3a13c37f89fc62b283960a72ddc051293e971244"},
+ {file = "oracledb-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:4c0e77e8dd1315f05f3d98d1f08df45f7bedd99612caccf315bb754cb768d692"},
+ {file = "oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0"},
+]
+
+[package.dependencies]
+cryptography = ">=3.2.1"
+
+[package.extras]
+test = ["numpy", "pandas", "pyarrow"]
+
[[package]]
name = "packaging"
version = "25.0"
@@ -318,6 +788,22 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-a
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.14.1)"]
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
+ {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["coverage", "pytest", "pytest-benchmark"]
+
[[package]]
name = "pre-commit"
version = "4.2.0"
@@ -337,6 +823,148 @@ nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\""
+files = [
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
+[[package]]
+name = "pycryptodome"
+version = "3.23.0"
+description = "Cryptographic library for Python"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main"]
+files = [
+ {file = "pycryptodome-3.23.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a176b79c49af27d7f6c12e4b178b0824626f40a7b9fed08f712291b6d54bf566"},
+ {file = "pycryptodome-3.23.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:573a0b3017e06f2cffd27d92ef22e46aa3be87a2d317a5abf7cc0e84e321bd75"},
+ {file = "pycryptodome-3.23.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:63dad881b99ca653302b2c7191998dd677226222a3f2ea79999aa51ce695f720"},
+ {file = "pycryptodome-3.23.0-cp27-cp27m-win32.whl", hash = "sha256:b34e8e11d97889df57166eda1e1ddd7676da5fcd4d71a0062a760e75060514b4"},
+ {file = "pycryptodome-3.23.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7ac1080a8da569bde76c0a104589c4f414b8ba296c0b3738cf39a466a9fb1818"},
+ {file = "pycryptodome-3.23.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6fe8258e2039eceb74dfec66b3672552b6b7d2c235b2dfecc05d16b8921649a8"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625"},
+ {file = "pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2"},
+ {file = "pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c"},
+ {file = "pycryptodome-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:350ebc1eba1da729b35ab7627a833a1a355ee4e852d8ba0447fafe7b14504d56"},
+ {file = "pycryptodome-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:93837e379a3e5fd2bb00302a47aee9fdf7940d83595be3915752c74033d17ca7"},
+ {file = "pycryptodome-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddb95b49df036ddd264a0ad246d1be5b672000f12d6961ea2c267083a5e19379"},
+ {file = "pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e95564beb8782abfd9e431c974e14563a794a4944c29d6d3b7b5ea042110b4"},
+ {file = "pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e15c081e912c4b0d75632acd8382dfce45b258667aa3c67caf7a4d4c13f630"},
+ {file = "pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7fc76bf273353dc7e5207d172b83f569540fc9a28d63171061c42e361d22353"},
+ {file = "pycryptodome-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:45c69ad715ca1a94f778215a11e66b7ff989d792a4d63b68dc586a1da1392ff5"},
+ {file = "pycryptodome-3.23.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:865d83c906b0fc6a59b510deceee656b6bc1c4fa0d82176e2b77e97a420a996a"},
+ {file = "pycryptodome-3.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d4d56153efc4d81defe8b65fd0821ef8b2d5ddf8ed19df31ba2f00872b8002"},
+ {file = "pycryptodome-3.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3f2d0aaf8080bda0587d58fc9fe4766e012441e2eed4269a77de6aea981c8be"},
+ {file = "pycryptodome-3.23.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64093fc334c1eccfd3933c134c4457c34eaca235eeae49d69449dc4728079339"},
+ {file = "pycryptodome-3.23.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ce64e84a962b63a47a592690bdc16a7eaf709d2c2697ababf24a0def566899a6"},
+ {file = "pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef"},
+]
+
[[package]]
name = "pydantic"
version = "2.11.7"
@@ -471,27 +1099,42 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+[[package]]
+name = "pygments"
+version = "2.19.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
+ {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
[[package]]
name = "pyinstaller"
-version = "6.14.2"
+version = "6.15.0"
description = "PyInstaller bundles a Python application and all its dependencies into a single package."
optional = false
-python-versions = "<3.14,>=3.8"
+python-versions = "<3.15,>=3.8"
groups = ["dev"]
markers = "python_version < \"3.14\""
files = [
- {file = "pyinstaller-6.14.2-py3-none-macosx_10_13_universal2.whl", hash = "sha256:d77d18bf5343a1afef2772393d7a489d4ec2282dee5bca549803fc0d74b78330"},
- {file = "pyinstaller-6.14.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3fa0c391e1300a9fd7752eb1ffe2950112b88fba9d2743eee2ef218a15f4705f"},
- {file = "pyinstaller-6.14.2-py3-none-manylinux2014_i686.whl", hash = "sha256:077efb2d01d16d9c8fdda3ad52788f0fead2791c5cec9ed6ce058af7e26eb74b"},
- {file = "pyinstaller-6.14.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:fdd2bd020a18736806a6bd5d3c4352f1209b427a96ad6c459d88aec1d90c4f21"},
- {file = "pyinstaller-6.14.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:03862c6b3cf7b16843d24b529f89cd4077cbe467883cd54ce7a81940d6da09d3"},
- {file = "pyinstaller-6.14.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:78827a21ada2a848e98671852d20d74b2955b6e2aaf2359ed13a462e1a603d84"},
- {file = "pyinstaller-6.14.2-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:185710ab1503dfdfa14c43237d394d96ac183422d588294be42531480dfa6c38"},
- {file = "pyinstaller-6.14.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6c673a7e761bd4a2560cfd5dbe1ccdcfe2dff304b774e6e5242fc5afed953661"},
- {file = "pyinstaller-6.14.2-py3-none-win32.whl", hash = "sha256:1697601aa788e3a52f0b5e620b4741a34b82e6f222ec6e1318b3a1349f566bb2"},
- {file = "pyinstaller-6.14.2-py3-none-win_amd64.whl", hash = "sha256:e10e0e67288d6dcb5898a917dd1d4272aa0ff33f197ad49a0e39618009d63ed9"},
- {file = "pyinstaller-6.14.2-py3-none-win_arm64.whl", hash = "sha256:69fd11ca57e572387826afaa4a1b3d4cb74927d76f231f0308c0bd7872ca5ac1"},
- {file = "pyinstaller-6.14.2.tar.gz", hash = "sha256:142cce0719e79315f0cc26400c2e5c45d9b6b17e7e0491fee444a9f8f16f4917"},
+ {file = "pyinstaller-6.15.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:9f00c71c40148cd1e61695b2c6f1e086693d3bcf9bfa22ab513aa4254c3b966f"},
+ {file = "pyinstaller-6.15.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cbcc8eb77320c60722030ac875883b564e00768fe3ff1721c7ba3ad0e0a277e9"},
+ {file = "pyinstaller-6.15.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c33e6302bc53db2df1104ed5566bd980b3e0ee7f18416a6e3caa908c12a54542"},
+ {file = "pyinstaller-6.15.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:eb902d0fed3bb1f8b7190dc4df5c11f3b59505767e0d56d1ed782b853938bbf3"},
+ {file = "pyinstaller-6.15.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:b4df862adae7cf1f08eff53c43ace283822447f7f528f72e4f94749062712f15"},
+ {file = "pyinstaller-6.15.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b9ebf16ed0f99016ae8ae5746dee4cb244848a12941539e62ce2eea1df5a3f95"},
+ {file = "pyinstaller-6.15.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:22193489e6a22435417103f61e7950363bba600ef36ec3ab1487303668c81092"},
+ {file = "pyinstaller-6.15.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:18f743069849dbaee3e10900385f35795a5743eabab55e99dcc42f204e40a0db"},
+ {file = "pyinstaller-6.15.0-py3-none-win32.whl", hash = "sha256:60da8f1b5071766b45c0f607d8bc3d7e59ba2c3b262d08f2e4066ba65f3544a2"},
+ {file = "pyinstaller-6.15.0-py3-none-win_amd64.whl", hash = "sha256:cbea297e16eeda30b41c300d6ec2fd2abea4dbd8d8a32650eeec36431c94fcd9"},
+ {file = "pyinstaller-6.15.0-py3-none-win_arm64.whl", hash = "sha256:f43c035621742cf2d19b84308c60e4e44e72c94786d176b8f6adcde351b5bd98"},
+ {file = "pyinstaller-6.15.0.tar.gz", hash = "sha256:a48fc4644ee4aa2aa2a35e7b51f496f8fbd7eecf6a2150646bbf1613ad07bc2d"},
]
[package.dependencies]
@@ -499,7 +1142,7 @@ altgraph = "*"
macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""}
packaging = ">=22.0"
pefile = {version = ">=2022.5.30,<2024.8.26 || >2024.8.26", markers = "sys_platform == \"win32\""}
-pyinstaller-hooks-contrib = ">=2025.5"
+pyinstaller-hooks-contrib = ">=2025.8"
pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""}
setuptools = ">=42.0.0"
@@ -509,21 +1152,98 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"]
[[package]]
name = "pyinstaller-hooks-contrib"
-version = "2025.5"
+version = "2025.8"
description = "Community maintained hooks for PyInstaller"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
markers = "python_version < \"3.14\""
files = [
- {file = "pyinstaller_hooks_contrib-2025.5-py3-none-any.whl", hash = "sha256:ebfae1ba341cb0002fb2770fad0edf2b3e913c2728d92df7ad562260988ca373"},
- {file = "pyinstaller_hooks_contrib-2025.5.tar.gz", hash = "sha256:707386770b8fe066c04aad18a71bc483c7b25e18b4750a756999f7da2ab31982"},
+ {file = "pyinstaller_hooks_contrib-2025.8-py3-none-any.whl", hash = "sha256:8d0b8cfa0cb689a619294ae200497374234bd4e3994b3ace2a4442274c899064"},
+ {file = "pyinstaller_hooks_contrib-2025.8.tar.gz", hash = "sha256:3402ad41dfe9b5110af134422e37fc5d421ba342c6cb980bd67cb30b7415641c"},
]
[package.dependencies]
packaging = ">=22.0"
setuptools = ">=42.0.0"
+[[package]]
+name = "pymysql"
+version = "1.1.1"
+description = "Pure Python MySQL Driver"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
+ {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
+]
+
+[package.extras]
+ed25519 = ["PyNaCl (>=1.4.0)"]
+rsa = ["cryptography"]
+
+[[package]]
+name = "pytest"
+version = "8.4.1"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"},
+ {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
+iniconfig = ">=1"
+packaging = ">=20"
+pluggy = ">=1.5,<2"
+pygments = ">=2.7.2"
+
+[package.extras]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-asyncio"
+version = "1.1.0"
+description = "Pytest support for asyncio"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf"},
+ {file = "pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea"},
+]
+
+[package.dependencies]
+pytest = ">=8.2,<9"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"]
+testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
+
+[[package]]
+name = "pytest-cov"
+version = "6.2.1"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5"},
+ {file = "pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2"},
+]
+
+[package.dependencies]
+coverage = {version = ">=7.5", extras = ["toml"]}
+pluggy = ">=1.2"
+pytest = ">=6.2.5"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
+
[[package]]
name = "pywin32-ctypes"
version = "0.2.3"
@@ -602,30 +1322,30 @@ files = [
[[package]]
name = "ruff"
-version = "0.12.2"
+version = "0.12.7"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
- {file = "ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be"},
- {file = "ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e"},
- {file = "ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9"},
- {file = "ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da"},
- {file = "ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce"},
- {file = "ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d"},
- {file = "ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04"},
- {file = "ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342"},
- {file = "ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a"},
- {file = "ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639"},
- {file = "ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12"},
- {file = "ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e"},
+ {file = "ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303"},
+ {file = "ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb"},
+ {file = "ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e"},
+ {file = "ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606"},
+ {file = "ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8"},
+ {file = "ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa"},
+ {file = "ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5"},
+ {file = "ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4"},
+ {file = "ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77"},
+ {file = "ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f"},
+ {file = "ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69"},
+ {file = "ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71"},
]
[[package]]
@@ -656,12 +1376,108 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
-groups = ["main"]
+groups = ["main", "dev"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
]
+[[package]]
+name = "sqlalchemy"
+version = "2.0.42"
+description = "Database Abstraction Library"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ee065898359fdee83961aed5cf1fb4cfa913ba71b58b41e036001d90bebbf7a"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bc76d86216443daa2e27e6b04a9b96423f0b69b5d0c40c7f4b9a4cdf7d8d90"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89143290fb94c50a8dec73b06109ccd245efd8011d24fc0ddafe89dc55b36651"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:4efbdc9754c7145a954911bfeef815fb0843e8edab0e9cecfa3417a5cbd316af"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:88f8a8007a658dfd82c16a20bd9673ae6b33576c003b5166d42697d49e496e61"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-win32.whl", hash = "sha256:c5dd245e6502990ccf612d51f220a7b04cbea3f00f6030691ffe27def76ca79b"},
+ {file = "SQLAlchemy-2.0.42-cp37-cp37m-win_amd64.whl", hash = "sha256:5651eb19cacbeb2fe7431e4019312ed00a0b3fbd2d701423e0e2ceaadb5bcd9f"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:172b244753e034d91a826f80a9a70f4cbac690641207f2217f8404c261473efe"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be28f88abd74af8519a4542185ee80ca914933ca65cdfa99504d82af0e4210df"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98b344859d282fde388047f1710860bb23f4098f705491e06b8ab52a48aafea9"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97978d223b11f1d161390a96f28c49a13ce48fdd2fed7683167c39bdb1b8aa09"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e35b9b000c59fcac2867ab3a79fc368a6caca8706741beab3b799d47005b3407"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bc7347ad7a7b1c78b94177f2d57263113bb950e62c59b96ed839b131ea4234e1"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-win32.whl", hash = "sha256:739e58879b20a179156b63aa21f05ccacfd3e28e08e9c2b630ff55cd7177c4f1"},
+ {file = "sqlalchemy-2.0.42-cp310-cp310-win_amd64.whl", hash = "sha256:1aef304ada61b81f1955196f584b9e72b798ed525a7c0b46e09e98397393297b"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c34100c0b7ea31fbc113c124bcf93a53094f8951c7bf39c45f39d327bad6d1e7"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad59dbe4d1252448c19d171dfba14c74e7950b46dc49d015722a4a06bfdab2b0"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9187498c2149919753a7fd51766ea9c8eecdec7da47c1b955fa8090bc642eaa"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f092cf83ebcafba23a247f5e03f99f5436e3ef026d01c8213b5eca48ad6efa9"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc6afee7e66fdba4f5a68610b487c1f754fccdc53894a9567785932dbb6a265e"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:260ca1d2e5910f1f1ad3fe0113f8fab28657cee2542cb48c2f342ed90046e8ec"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-win32.whl", hash = "sha256:2eb539fd83185a85e5fcd6b19214e1c734ab0351d81505b0f987705ba0a1e231"},
+ {file = "sqlalchemy-2.0.42-cp311-cp311-win_amd64.whl", hash = "sha256:9193fa484bf00dcc1804aecbb4f528f1123c04bad6a08d7710c909750fa76aeb"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:09637a0872689d3eb71c41e249c6f422e3e18bbd05b4cd258193cfc7a9a50da2"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3cb3ec67cc08bea54e06b569398ae21623534a7b1b23c258883a7c696ae10df"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87e6a5ef6f9d8daeb2ce5918bf5fddecc11cae6a7d7a671fcc4616c47635e01"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b718011a9d66c0d2f78e1997755cd965f3414563b31867475e9bc6efdc2281d"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:16d9b544873fe6486dddbb859501a07d89f77c61d29060bb87d0faf7519b6a4d"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21bfdf57abf72fa89b97dd74d3187caa3172a78c125f2144764a73970810c4ee"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-win32.whl", hash = "sha256:78b46555b730a24901ceb4cb901c6b45c9407f8875209ed3c5d6bcd0390a6ed1"},
+ {file = "sqlalchemy-2.0.42-cp312-cp312-win_amd64.whl", hash = "sha256:4c94447a016f36c4da80072e6c6964713b0af3c8019e9c4daadf21f61b81ab53"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941804f55c7d507334da38133268e3f6e5b0340d584ba0f277dd884197f4ae8c"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d3d06a968a760ce2aa6a5889fefcbdd53ca935735e0768e1db046ec08cbf01"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf10396a8a700a0f38ccd220d940be529c8f64435c5d5b29375acab9267a6c9"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cae6c2b05326d7c2c7c0519f323f90e0fb9e8afa783c6a05bb9ee92a90d0f04"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f50f7b20677b23cfb35b6afcd8372b2feb348a38e3033f6447ee0704540be894"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d88a1c0d66d24e229e3938e1ef16ebdbd2bf4ced93af6eff55225f7465cf350"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-win32.whl", hash = "sha256:45c842c94c9ad546c72225a0c0d1ae8ef3f7c212484be3d429715a062970e87f"},
+ {file = "sqlalchemy-2.0.42-cp313-cp313-win_amd64.whl", hash = "sha256:eb9905f7f1e49fd57a7ed6269bc567fcbbdac9feadff20ad6bd7707266a91577"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ed5a6959b1668d97a32e3fd848b485f65ee3c05a759dee06d90e4545a3c77f1e"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddbaafe32f0dd12d64284b1c3189104b784c9f3dba8cc1ba7e642e2b14b906f"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37f4f42568b6c656ee177b3e111d354b5dda75eafe9fe63492535f91dfa35829"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb57923d852d38671a17abda9a65cc59e3e5eab51fb8307b09de46ed775bcbb8"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:437c2a8b0c780ff8168a470beb22cb4a25e1c63ea6a7aec87ffeb07aa4b76641"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:480f7df62f0b3ad6aa011eefa096049dc1770208bb71f234959ee2864206eefe"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-win32.whl", hash = "sha256:d119c80c614d62d32e236ae68e21dd28a2eaf070876b2f28a6075d5bae54ef3f"},
+ {file = "sqlalchemy-2.0.42-cp38-cp38-win_amd64.whl", hash = "sha256:be3a02f963c8d66e28bb4183bebab66dc4379701d92e660f461c65fecd6ff399"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:78548fd65cd76d4c5a2e6b5f245d7734023ee4de33ee7bb298f1ac25a9935e0d"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cf4bf5a174d8a679a713b7a896470ffc6baab78e80a79e7ec5668387ffeccc8b"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c7ff7ba08b375f8a8fa0511e595c9bdabb5494ec68f1cf69bb24e54c0d90f2"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b3c117f65d64e806ce5ce9ce578f06224dc36845e25ebd2554b3e86960e1aed"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:27e4a7b3a7a61ff919c2e7caafd612f8626114e6e5ebbe339de3b5b1df9bc27e"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b01e0dd39f96aefda5ab002d8402db4895db871eb0145836246ce0661635ce55"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-win32.whl", hash = "sha256:49362193b1f43aa158deebf438062d7b5495daa9177c6c5d0f02ceeb64b544ea"},
+ {file = "sqlalchemy-2.0.42-cp39-cp39-win_amd64.whl", hash = "sha256:636ec3dc83b2422a7ff548d0f8abf9c23742ca50e2a5cdc492a151eac7a0248b"},
+ {file = "sqlalchemy-2.0.42-py3-none-any.whl", hash = "sha256:defcdff7e661f0043daa381832af65d616e060ddb54d3fe4476f51df7eaa1835"},
+ {file = "sqlalchemy-2.0.42.tar.gz", hash = "sha256:160bedd8a5c28765bd5be4dec2d881e109e33b34922e50a3b881a7681773ac5f"},
+]
+
+[package.dependencies]
+greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+typing-extensions = ">=4.6.0"
+
+[package.extras]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"]
+aioodbc = ["aioodbc", "greenlet (>=1)"]
+aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"]
+asyncio = ["greenlet (>=1)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql"]
+mssql-pyodbc = ["pyodbc"]
+mypy = ["mypy (>=0.910)"]
+mysql = ["mysqlclient (>=1.4.0)"]
+mysql-connector = ["mysql-connector-python"]
+oracle = ["cx_oracle (>=8)"]
+oracle-oracledb = ["oracledb (>=1.0.1)"]
+postgresql = ["psycopg2 (>=2.7)"]
+postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"]
+postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
+postgresql-psycopg = ["psycopg (>=3.0.7)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
+pymysql = ["pymysql"]
+sqlcipher = ["sqlcipher3_binary"]
+
[[package]]
name = "starlette"
version = "0.46.2"
@@ -686,11 +1502,12 @@ version = "4.14.1"
description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
python-versions = ">=3.9"
-groups = ["main"]
+groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
]
+markers = {dev = "python_version < \"3.13\""}
[[package]]
name = "typing-inspection"
@@ -728,14 +1545,14 @@ standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)
[[package]]
name = "virtualenv"
-version = "20.31.2"
+version = "20.33.0"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
- {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"},
- {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"},
+ {file = "virtualenv-20.33.0-py3-none-any.whl", hash = "sha256:106b6baa8ab1b526d5a9b71165c85c456fbd49b16976c88e2bc9352ee3bc5d3f"},
+ {file = "virtualenv-20.33.0.tar.gz", hash = "sha256:47e0c0d2ef1801fce721708ccdf2a28b9403fa2307c3268aebd03225976f61d2"},
]
[package.dependencies]
@@ -750,4 +1567,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess
[metadata]
lock-version = "2.1"
python-versions = ">=3.11"
-content-hash = "3b5b7454d461acc6cfb10bd75966c73b99fada86f142d8aa3b6b1d16463c6c30"
+content-hash = "f40eb442118ade4ca787e92415f89b495f0378212e7cad8ce1847a5d2b3b548c"
diff --git a/pyproject.toml b/pyproject.toml
index e13ea88..6621c55 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -10,7 +10,14 @@ readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"fastapi (>=0.115.14,<0.116.0)",
- "uvicorn (>=0.35.0,<0.36.0)"
+ "uvicorn (>=0.35.0,<0.36.0)",
+ "pydantic (>=2.11.7,<3.0.0)",
+ "sqlalchemy (>=2.0.41,<3.0.0)",
+ "psycopg2-binary (>=2.9.10,<3.0.0)",
+ "mysql-connector-python (>=9.4.0,<10.0.0)",
+ "pymysql (>=1.1.1,<2.0.0)",
+ "pycryptodome (>=3.23.0,<4.0.0)",
+ "oracledb (>=3.3.0,<4.0.0)"
]
@@ -23,11 +30,15 @@ ruff = "^0.12.2"
black = "^25.1.0"
pre-commit = "^4.2.0"
pyinstaller = {version = "^6.14.2", python = ">=3.11,<3.14"}
+pytest = "^8.4.1"
+httpx = "^0.28.1"
+pytest-asyncio = "^1.1.0"
# ----------------------------
# Ruff 설정
# ----------------------------
+pytest-cov = "^6.2.1"
[tool.ruff]
line-length = 120
exclude = [