Merge branch 'main' into jzh

This commit is contained in:
JzoNg
2026-04-07 12:52:13 +08:00
88 changed files with 1745 additions and 1719 deletions

View File

@@ -20,4 +20,4 @@
- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
- [x] I've updated the documentation accordingly.
- [x] I ran `make lint` and `make type-check` (backend) and `cd web && npx lint-staged` (frontend) to appease the lint gods
- [x] I ran `make lint` and `make type-check` (backend) and `cd web && pnpm exec vp staged` (frontend) to appease the lint gods

View File

@@ -65,7 +65,7 @@ jobs:
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
@@ -130,7 +130,7 @@ jobs:
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}

View File

@@ -149,7 +149,7 @@ jobs:
.editorconfig
- name: Super-linter
uses: super-linter/super-linter/slim@61abc07d755095a68f4987d1c2c3d1d64408f1f9 # v8.5.0
uses: super-linter/super-linter/slim@9e863354e3ff62e0727d37183162c4a88873df41 # v8.6.0
if: steps.changed-files.outputs.any_changed == 'true'
env:
BASH_SEVERITY: warning

View File

@@ -240,7 +240,7 @@ jobs:
- name: Run Claude Code for Translation Sync
if: steps.context.outputs.CHANGED_FILES != ''
uses: anthropics/claude-code-action@88c168b39e7e64da0286d812b6e9fbebb6708185 # v1.0.82
uses: anthropics/claude-code-action@6e2bd52842c65e914eba5c8badd17560bd26b5de # v1.0.89
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -36,7 +36,7 @@ jobs:
remove_tool_cache: true
- name: Setup UV and Python
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
with:
enable-cache: true
python-version: ${{ matrix.python-version }}

View File

@@ -95,31 +95,5 @@ if $web_modified; then
exit 1
fi
echo "Running unit tests check"
modified_files=$(git diff --cached --name-only -- utils | grep -v '\.spec\.ts$' || true)
if [ -n "$modified_files" ]; then
for file in $modified_files; do
test_file="${file%.*}.spec.ts"
echo "Checking for test file: $test_file"
# check if the test file exists
if [ -f "../$test_file" ]; then
echo "Detected changes in $file, running corresponding unit tests..."
pnpm run test "../$test_file"
if [ $? -ne 0 ]; then
echo "Unit tests failed. Please fix the errors before committing."
exit 1
fi
echo "Unit tests for $file passed."
else
echo "Warning: $file does not have a corresponding test file."
fi
done
echo "All unit tests for modified web/utils files have passed."
fi
cd ../
fi

18
api/celery_healthcheck.py Normal file
View File

@@ -0,0 +1,18 @@
# This module provides a lightweight Celery instance for use in Docker health checks.
# Unlike celery_entrypoint.py, this does NOT import app.py and therefore avoids
# initializing all Flask extensions (DB, Redis, storage, blueprints, etc.).
# Using this module keeps the health check fast and low-cost.
from celery import Celery
from configs import dify_config
from extensions.ext_celery import get_celery_broker_transport_options, get_celery_ssl_options
celery = Celery(broker=dify_config.CELERY_BROKER_URL)
broker_transport_options = get_celery_broker_transport_options()
if broker_transport_options:
celery.conf.update(broker_transport_options=broker_transport_options)
ssl_options = get_celery_ssl_options()
if ssl_options:
celery.conf.update(broker_use_ssl=ssl_options)

View File

@@ -1,7 +1,7 @@
import datetime
import logging
import time
from typing import Any
from typing import TypedDict
import click
import sqlalchemy as sa
@@ -503,7 +503,19 @@ def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
return [row[0] for row in result]
def _count_orphaned_draft_variables() -> dict[str, Any]:
class _AppOrphanCounts(TypedDict):
variables: int
files: int
class OrphanedDraftVariableStatsDict(TypedDict):
total_orphaned_variables: int
total_orphaned_files: int
orphaned_app_count: int
orphaned_by_app: dict[str, _AppOrphanCounts]
def _count_orphaned_draft_variables() -> OrphanedDraftVariableStatsDict:
"""
Count orphaned draft variables by app, including associated file counts.
@@ -526,7 +538,7 @@ def _count_orphaned_draft_variables() -> dict[str, Any]:
with db.engine.connect() as conn:
result = conn.execute(sa.text(variables_query))
orphaned_by_app = {}
orphaned_by_app: dict[str, _AppOrphanCounts] = {}
total_files = 0
for row in result:

View File

@@ -66,13 +66,13 @@ class WebhookTriggerApi(Resource):
with sessionmaker(db.engine).begin() as session:
# Get webhook trigger for this app and node
webhook_trigger = (
session.query(WorkflowWebhookTrigger)
webhook_trigger = session.scalar(
select(WorkflowWebhookTrigger)
.where(
WorkflowWebhookTrigger.app_id == app_model.id,
WorkflowWebhookTrigger.node_id == node_id,
)
.first()
.limit(1)
)
if not webhook_trigger:

View File

@@ -1,5 +1,3 @@
from typing import Any
import flask_login
from flask import make_response, request
from flask_restx import Resource
@@ -42,7 +40,7 @@ from libs.token import (
set_csrf_token_to_cookie,
set_refresh_token_to_cookie,
)
from services.account_service import AccountService, RegisterService, TenantService
from services.account_service import AccountService, InvitationDetailDict, RegisterService, TenantService
from services.billing_service import BillingService
from services.errors.account import AccountRegisterError
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkspacesLimitExceededError
@@ -101,7 +99,7 @@ class LoginApi(Resource):
raise EmailPasswordLoginLimitError()
invite_token = args.invite_token
invitation_data: dict[str, Any] | None = None
invitation_data: InvitationDetailDict | None = None
if invite_token:
invitation_data = RegisterService.get_invitation_with_case_fallback(None, request_email, invite_token)
if invitation_data is None:

View File

@@ -3,6 +3,7 @@ import logging
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from controllers.common.schema import register_schema_models
@@ -86,8 +87,8 @@ class CustomizedPipelineTemplateApi(Resource):
@enterprise_license_required
def post(self, template_id: str):
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
template = (
session.query(PipelineCustomizedTemplate).where(PipelineCustomizedTemplate.id == template_id).first()
template = session.scalar(
select(PipelineCustomizedTemplate).where(PipelineCustomizedTemplate.id == template_id).limit(1)
)
if not template:
raise ValueError("Customized pipeline template not found.")

View File

@@ -1,3 +1,5 @@
from typing import TypedDict
from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field
@@ -11,6 +13,21 @@ from services.billing_service import BillingService
_FALLBACK_LANG = "en-US"
class NotificationItemDict(TypedDict):
notification_id: str | None
frequency: str | None
lang: str
title: str
subtitle: str
body: str
title_pic_url: str
class NotificationResponseDict(TypedDict):
should_show: bool
notifications: list[NotificationItemDict]
def _pick_lang_content(contents: dict, lang: str) -> dict:
"""Return the single LangContent for *lang*, falling back to English."""
return contents.get(lang) or contents.get(_FALLBACK_LANG) or next(iter(contents.values()), {})
@@ -45,28 +62,30 @@ class NotificationApi(Resource):
result = BillingService.get_account_notification(str(current_user.id))
# Proto JSON uses camelCase field names (Kratos default marshaling).
response: NotificationResponseDict
if not result.get("shouldShow"):
return {"should_show": False, "notifications": []}, 200
response = {"should_show": False, "notifications": []}
return response, 200
lang = current_user.interface_language or _FALLBACK_LANG
notifications = []
notifications: list[NotificationItemDict] = []
for notification in result.get("notifications") or []:
contents: dict = notification.get("contents") or {}
lang_content = _pick_lang_content(contents, lang)
notifications.append(
{
"notification_id": notification.get("notificationId"),
"frequency": notification.get("frequency"),
"lang": lang_content.get("lang", lang),
"title": lang_content.get("title", ""),
"subtitle": lang_content.get("subtitle", ""),
"body": lang_content.get("body", ""),
"title_pic_url": lang_content.get("titlePicUrl", ""),
}
)
item: NotificationItemDict = {
"notification_id": notification.get("notificationId"),
"frequency": notification.get("frequency"),
"lang": lang_content.get("lang", lang),
"title": lang_content.get("title", ""),
"subtitle": lang_content.get("subtitle", ""),
"body": lang_content.get("body", ""),
"title_pic_url": lang_content.get("titlePicUrl", ""),
}
notifications.append(item)
return {"should_show": bool(notifications), "notifications": notifications}, 200
response = {"should_show": bool(notifications), "notifications": notifications}
return response, 200
@console_ns.route("/notification/dismiss")

View File

@@ -9,7 +9,14 @@ from controllers.common.schema import register_schema_models
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from libs.login import current_account_with_tenant, login_required
from services.tag_service import TagService
from models.enums import TagType
from services.tag_service import (
SaveTagPayload,
TagBindingCreatePayload,
TagBindingDeletePayload,
TagService,
UpdateTagPayload,
)
dataset_tag_fields = {
"id": fields.String,
@@ -25,19 +32,19 @@ def build_dataset_tag_fields(api_or_ns: Namespace):
class TagBasePayload(BaseModel):
name: str = Field(description="Tag name", min_length=1, max_length=50)
type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type")
type: TagType = Field(description="Tag type")
class TagBindingPayload(BaseModel):
tag_ids: list[str] = Field(description="Tag IDs to bind")
target_id: str = Field(description="Target ID to bind tags to")
type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type")
type: TagType = Field(description="Tag type")
class TagBindingRemovePayload(BaseModel):
tag_id: str = Field(description="Tag ID to remove")
target_id: str = Field(description="Target ID to unbind tag from")
type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type")
type: TagType = Field(description="Tag type")
class TagListQueryParam(BaseModel):
@@ -82,7 +89,7 @@ class TagListApi(Resource):
raise Forbidden()
payload = TagBasePayload.model_validate(console_ns.payload or {})
tag = TagService.save_tags(payload.model_dump())
tag = TagService.save_tags(SaveTagPayload(name=payload.name, type=payload.type))
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
@@ -103,7 +110,7 @@ class TagUpdateDeleteApi(Resource):
raise Forbidden()
payload = TagBasePayload.model_validate(console_ns.payload or {})
tag = TagService.update_tags(payload.model_dump(), tag_id)
tag = TagService.update_tags(UpdateTagPayload(name=payload.name, type=payload.type), tag_id)
binding_count = TagService.get_tag_binding_count(tag_id)
@@ -136,7 +143,9 @@ class TagBindingCreateApi(Resource):
raise Forbidden()
payload = TagBindingPayload.model_validate(console_ns.payload or {})
TagService.save_tag_binding(payload.model_dump())
TagService.save_tag_binding(
TagBindingCreatePayload(tag_ids=payload.tag_ids, target_id=payload.target_id, type=payload.type)
)
return {"result": "success"}, 200
@@ -154,6 +163,8 @@ class TagBindingDeleteApi(Resource):
raise Forbidden()
payload = TagBindingRemovePayload.model_validate(console_ns.payload or {})
TagService.delete_tag_binding(payload.model_dump())
TagService.delete_tag_binding(
TagBindingDeletePayload(tag_id=payload.tag_id, target_id=payload.target_id, type=payload.type)
)
return {"result": "success"}, 200

View File

@@ -1,6 +1,7 @@
from collections.abc import Callable
from functools import wraps
from sqlalchemy import select
from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import Forbidden
@@ -21,12 +22,12 @@ def plugin_permission_required(
tenant_id = current_tenant_id
with sessionmaker(db.engine).begin() as session:
permission = (
session.query(TenantPluginPermission)
permission = session.scalar(
select(TenantPluginPermission)
.where(
TenantPluginPermission.tenant_id == tenant_id,
)
.first()
.limit(1)
)
if not permission:

View File

@@ -4,6 +4,7 @@ from flask import Response
from flask_restx import Resource
from graphon.variables.input_entities import VariableEntity
from pydantic import BaseModel, Field, ValidationError
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from controllers.common.schema import register_schema_model
@@ -80,11 +81,11 @@ class MCPAppApi(Resource):
def _get_mcp_server_and_app(self, server_code: str, session: Session) -> tuple[AppMCPServer, App]:
"""Get and validate MCP server and app in one query session"""
mcp_server = session.query(AppMCPServer).where(AppMCPServer.server_code == server_code).first()
mcp_server = session.scalar(select(AppMCPServer).where(AppMCPServer.server_code == server_code).limit(1))
if not mcp_server:
raise MCPRequestError(mcp_types.INVALID_REQUEST, "Server Not Found")
app = session.query(App).where(App.id == mcp_server.app_id).first()
app = session.scalar(select(App).where(App.id == mcp_server.app_id).limit(1))
if not app:
raise MCPRequestError(mcp_types.INVALID_REQUEST, "App Not Found")
@@ -190,12 +191,12 @@ class MCPAppApi(Resource):
def _retrieve_end_user(self, tenant_id: str, mcp_server_id: str) -> EndUser | None:
"""Get end user - manages its own database session"""
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
return (
session.query(EndUser)
return session.scalar(
select(EndUser)
.where(EndUser.tenant_id == tenant_id)
.where(EndUser.session_id == mcp_server_id)
.where(EndUser.type == "mcp")
.first()
.limit(1)
)
def _create_end_user(

View File

@@ -22,10 +22,17 @@ from fields.tag_fields import DataSetTag
from libs.login import current_user
from models.account import Account
from models.dataset import DatasetPermissionEnum
from models.enums import TagType
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import RetrievalModel
from services.tag_service import TagService
from services.tag_service import (
SaveTagPayload,
TagBindingCreatePayload,
TagBindingDeletePayload,
TagService,
UpdateTagPayload,
)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
@@ -513,7 +520,7 @@ class DatasetTagsApi(DatasetApiResource):
raise Forbidden()
payload = TagCreatePayload.model_validate(service_api_ns.payload or {})
tag = TagService.save_tags({"name": payload.name, "type": "knowledge"})
tag = TagService.save_tags(SaveTagPayload(name=payload.name, type=TagType.KNOWLEDGE))
response = DataSetTag.model_validate(
{"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
@@ -536,9 +543,8 @@ class DatasetTagsApi(DatasetApiResource):
raise Forbidden()
payload = TagUpdatePayload.model_validate(service_api_ns.payload or {})
params = {"name": payload.name, "type": "knowledge"}
tag_id = payload.tag_id
tag = TagService.update_tags(params, tag_id)
tag = TagService.update_tags(UpdateTagPayload(name=payload.name, type=TagType.KNOWLEDGE), tag_id)
binding_count = TagService.get_tag_binding_count(tag_id)
@@ -585,7 +591,9 @@ class DatasetTagBindingApi(DatasetApiResource):
raise Forbidden()
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
TagService.save_tag_binding(
TagBindingCreatePayload(tag_ids=payload.tag_ids, target_id=payload.target_id, type=TagType.KNOWLEDGE)
)
return "", 204
@@ -609,7 +617,9 @@ class DatasetTagUnbindingApi(DatasetApiResource):
raise Forbidden()
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
TagService.delete_tag_binding(
TagBindingDeletePayload(tag_id=payload.tag_id, target_id=payload.target_id, type=TagType.KNOWLEDGE)
)
return "", 204

View File

@@ -4,13 +4,23 @@ Serialization helpers for Service API knowledge pipeline endpoints.
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, TypedDict
if TYPE_CHECKING:
from models.model import UploadFile
def serialize_upload_file(upload_file: UploadFile) -> dict[str, Any]:
class UploadFileDict(TypedDict):
id: str
name: str
size: int
extension: str
mime_type: str | None
created_by: str
created_at: str | None
def serialize_upload_file(upload_file: UploadFile) -> UploadFileDict:
return {
"id": upload_file.id,
"name": upload_file.name,

View File

@@ -509,8 +509,8 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
:return:
"""
with Session(db.engine, expire_on_commit=False) as session:
agent_thought: MessageAgentThought | None = (
session.query(MessageAgentThought).where(MessageAgentThought.id == event.agent_thought_id).first()
agent_thought: MessageAgentThought | None = session.scalar(
select(MessageAgentThought).where(MessageAgentThought.id == event.agent_thought_id).limit(1)
)
if agent_thought:

View File

@@ -345,8 +345,8 @@ class DatasourceManager:
@classmethod
def get_upload_file_by_id(cls, file_id: str, tenant_id: str) -> File:
with session_factory.create_session() as session:
upload_file = (
session.query(UploadFile).where(UploadFile.id == file_id, UploadFile.tenant_id == tenant_id).first()
upload_file = session.scalar(
select(UploadFile).where(UploadFile.id == file_id, UploadFile.tenant_id == tenant_id).limit(1)
)
if not upload_file:
raise ValueError(f"UploadFile not found for file_id={file_id}, tenant_id={tenant_id}")

View File

@@ -2,7 +2,7 @@ import json
import logging
import re
from collections.abc import Sequence
from typing import Protocol, cast
from typing import Protocol, TypedDict, cast
import json_repair
from graphon.enums import WorkflowNodeExecutionMetadataKey
@@ -49,6 +49,17 @@ class WorkflowServiceInterface(Protocol):
pass
class CodeGenerateResultDict(TypedDict):
code: str
language: str
error: str
class StructuredOutputResultDict(TypedDict):
output: str
error: str
class LLMGenerator:
@classmethod
def generate_conversation_name(
@@ -293,7 +304,7 @@ class LLMGenerator:
cls,
tenant_id: str,
args: RuleCodeGeneratePayload,
):
) -> CodeGenerateResultDict:
if args.code_language == "python":
prompt_template = PromptTemplateParser(PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE)
else:
@@ -362,7 +373,9 @@ class LLMGenerator:
return answer.strip()
@classmethod
def generate_structured_output(cls, tenant_id: str, args: RuleStructuredOutputPayload):
def generate_structured_output(
cls, tenant_id: str, args: RuleStructuredOutputPayload
) -> StructuredOutputResultDict:
model_manager = ModelManager.for_tenant(tenant_id=tenant_id)
model_instance = model_manager.get_model_instance(
tenant_id=tenant_id,
@@ -454,7 +467,7 @@ class LLMGenerator:
):
session = db.session()
app: App | None = session.query(App).where(App.id == flow_id).first()
app: App | None = session.scalar(select(App).where(App.id == flow_id).limit(1))
if not app:
raise ValueError("App not found.")
workflow = workflow_service.get_draft_workflow(app_model=app)

View File

@@ -6,6 +6,7 @@ import logging
import flask
from core.logging.context import get_request_id, get_trace_id
from core.logging.structured_formatter import IdentityDict
class TraceContextFilter(logging.Filter):
@@ -60,7 +61,7 @@ class IdentityContextFilter(logging.Filter):
record.user_type = identity.get("user_type", "")
return True
def _extract_identity(self) -> dict[str, str]:
def _extract_identity(self) -> IdentityDict:
"""Extract identity from current_user if in request context."""
try:
if not flask.has_request_context():
@@ -77,7 +78,7 @@ class IdentityContextFilter(logging.Filter):
from models import Account
from models.model import EndUser
identity: dict[str, str] = {}
identity: IdentityDict = {}
if isinstance(user, Account):
if user.current_tenant_id:

View File

@@ -1,7 +1,7 @@
import json
import logging
from collections.abc import Mapping
from typing import Any, cast
from typing import Any, NotRequired, TypedDict, cast
from graphon.variables.input_entities import VariableEntity, VariableEntityType
@@ -15,6 +15,17 @@ from services.app_generate_service import AppGenerateService
logger = logging.getLogger(__name__)
class ToolParameterSchemaDict(TypedDict):
type: str
properties: dict[str, Any]
required: list[str]
class ToolArgumentsDict(TypedDict):
query: NotRequired[str]
inputs: dict[str, Any]
def handle_mcp_request(
app: App,
request: mcp_types.ClientRequest,
@@ -119,7 +130,7 @@ def handle_list_tools(
mcp_types.Tool(
name=app_name,
description=description,
inputSchema=parameter_schema,
inputSchema=cast(dict[str, Any], parameter_schema),
)
],
)
@@ -154,7 +165,7 @@ def build_parameter_schema(
app_mode: str,
user_input_form: list[VariableEntity],
parameters_dict: dict[str, str],
) -> dict[str, Any]:
) -> ToolParameterSchemaDict:
"""Build parameter schema for the tool"""
parameters, required = convert_input_form_to_parameters(user_input_form, parameters_dict)
@@ -174,7 +185,7 @@ def build_parameter_schema(
}
def prepare_tool_arguments(app: App, arguments: dict[str, Any]) -> dict[str, Any]:
def prepare_tool_arguments(app: App, arguments: dict[str, Any]) -> ToolArgumentsDict:
"""Prepare arguments based on app mode"""
if app.mode == AppMode.WORKFLOW:
return {"inputs": arguments}

View File

@@ -56,8 +56,10 @@ class BaseTraceInstance(ABC):
if not service_account:
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
current_tenant = (
session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first()
current_tenant = session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.account_id == service_account.id, TenantAccountJoin.current.is_(True))
.limit(1)
)
if not current_tenant:
raise ValueError(f"Current tenant not found for account {service_account.id}")

View File

@@ -241,8 +241,10 @@ class TencentDataTrace(BaseTraceInstance):
if not service_account:
raise ValueError(f"Creator account not found for app {app_id}")
current_tenant = (
session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first()
current_tenant = session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.account_id == service_account.id, TenantAccountJoin.current.is_(True))
.limit(1)
)
if not current_tenant:
raise ValueError(f"Current tenant not found for account {service_account.id}")

View File

@@ -352,6 +352,7 @@ class MilvusVector(BaseVector):
# Create Index params for the collection
index_params_obj = IndexParams()
assert index_params is not None
index_params_obj.add_index(field_name=Field.VECTOR, **index_params)
# Create Sparse Vector Index for the collection

View File

@@ -135,37 +135,40 @@ def handle(sender: Message, **kwargs):
model_name=model_config.model,
)
if used_quota is not None:
if provider_configuration.system_configuration.current_quota_type == ProviderQuotaType.TRIAL:
from services.credit_pool_service import CreditPoolService
match provider_configuration.system_configuration.current_quota_type:
case ProviderQuotaType.TRIAL:
from services.credit_pool_service import CreditPoolService
CreditPoolService.check_and_deduct_credits(
tenant_id=tenant_id,
credits_required=used_quota,
pool_type="trial",
)
elif provider_configuration.system_configuration.current_quota_type == ProviderQuotaType.PAID:
from services.credit_pool_service import CreditPoolService
CreditPoolService.check_and_deduct_credits(
tenant_id=tenant_id,
credits_required=used_quota,
pool_type="paid",
)
else:
quota_update = _ProviderUpdateOperation(
filters=_ProviderUpdateFilters(
CreditPoolService.check_and_deduct_credits(
tenant_id=tenant_id,
provider_name=ModelProviderID(model_config.provider).provider_name,
provider_type=ProviderType.SYSTEM.value,
quota_type=provider_configuration.system_configuration.current_quota_type,
),
values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time),
additional_filters=_ProviderUpdateAdditionalFilters(
quota_limit_check=True # Provider.quota_limit > Provider.quota_used
),
description="quota_deduction_update",
)
updates_to_perform.append(quota_update)
credits_required=used_quota,
pool_type="trial",
)
case ProviderQuotaType.PAID:
from services.credit_pool_service import CreditPoolService
CreditPoolService.check_and_deduct_credits(
tenant_id=tenant_id,
credits_required=used_quota,
pool_type="paid",
)
case ProviderQuotaType.FREE:
quota_update = _ProviderUpdateOperation(
filters=_ProviderUpdateFilters(
tenant_id=tenant_id,
provider_name=ModelProviderID(model_config.provider).provider_name,
provider_type=ProviderType.SYSTEM.value,
quota_type=provider_configuration.system_configuration.current_quota_type,
),
values=_ProviderUpdateValues(
quota_used=Provider.quota_used + used_quota, last_used=current_time
),
additional_filters=_ProviderUpdateAdditionalFilters(
quota_limit_check=True # Provider.quota_limit > Provider.quota_used
),
description="quota_deduction_update",
)
updates_to_perform.append(quota_update)
# Execute all updates
start_time = time_module.perf_counter()

View File

@@ -10,7 +10,7 @@ from configs import dify_config
from dify_app import DifyApp
def _get_celery_ssl_options() -> dict[str, Any] | None:
def get_celery_ssl_options() -> dict[str, Any] | None:
"""Get SSL configuration for Celery broker/backend connections."""
# Only apply SSL if we're using Redis as broker/backend
if not dify_config.BROKER_USE_SSL:
@@ -43,6 +43,19 @@ def _get_celery_ssl_options() -> dict[str, Any] | None:
return ssl_options
def get_celery_broker_transport_options() -> dict[str, Any]:
"""Get broker transport options (e.g. Redis Sentinel) for Celery connections."""
if dify_config.CELERY_USE_SENTINEL:
return {
"master_name": dify_config.CELERY_SENTINEL_MASTER_NAME,
"sentinel_kwargs": {
"socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT,
"password": dify_config.CELERY_SENTINEL_PASSWORD,
},
}
return {}
def init_app(app: DifyApp) -> Celery:
class FlaskTask(Task):
def __call__(self, *args: object, **kwargs: object) -> object:
@@ -53,16 +66,7 @@ def init_app(app: DifyApp) -> Celery:
init_request_context()
return self.run(*args, **kwargs)
broker_transport_options = {}
if dify_config.CELERY_USE_SENTINEL:
broker_transport_options = {
"master_name": dify_config.CELERY_SENTINEL_MASTER_NAME,
"sentinel_kwargs": {
"socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT,
"password": dify_config.CELERY_SENTINEL_PASSWORD,
},
}
broker_transport_options = get_celery_broker_transport_options()
celery_app = Celery(
app.name,
@@ -89,7 +93,7 @@ def init_app(app: DifyApp) -> Celery:
)
# Apply SSL configuration if enabled
ssl_options = _get_celery_ssl_options()
ssl_options = get_celery_ssl_options()
if ssl_options:
celery_app.conf.update(
broker_use_ssl=ssl_options,

View File

@@ -11,7 +11,7 @@ SQLAlchemy instrumentor appends comments to SQL statements.
"""
import logging
from typing import Any
from typing import Any, TypedDict
from celery.signals import task_postrun, task_prerun
from opentelemetry import context
@@ -24,9 +24,17 @@ _SQLCOMMENTER_CONTEXT_KEY = "SQLCOMMENTER_ORM_TAGS_AND_VALUES"
_TOKEN_ATTR = "_dify_sqlcommenter_context_token"
def _build_celery_sqlcommenter_tags(task: Any) -> dict[str, str | int]:
class CelerySqlcommenterTagsDict(TypedDict, total=False):
framework: str
task_name: str
traceparent: str
celery_retries: int
routing_key: str
def _build_celery_sqlcommenter_tags(task: Any) -> CelerySqlcommenterTagsDict:
"""Build SQL commenter tags from the current Celery task and OpenTelemetry context."""
tags: dict[str, str | int] = {}
tags: CelerySqlcommenterTagsDict = {}
try:
tags["framework"] = f"celery:{_get_celery_version()}"

View File

@@ -13,7 +13,7 @@ import operator
from dataclasses import asdict, dataclass
from datetime import datetime
from enum import StrEnum, auto
from typing import Any
from typing import Any, TypedDict
from pydantic import TypeAdapter
@@ -22,6 +22,17 @@ logger = logging.getLogger(__name__)
_metadata_adapter: TypeAdapter[dict[str, Any]] = TypeAdapter(dict[str, Any])
class StorageStatisticsDict(TypedDict):
total_files: int
active_files: int
archived_files: int
deleted_files: int
total_size: int
versions_count: int
oldest_file: str | None
newest_file: str | None
class FileStatus(StrEnum):
"""File status enumeration"""
@@ -384,7 +395,7 @@ class FileLifecycleManager:
logger.exception("Failed to cleanup old versions")
return 0
def get_storage_statistics(self) -> dict[str, Any]:
def get_storage_statistics(self) -> StorageStatisticsDict:
"""Get storage statistics
Returns:
@@ -393,16 +404,16 @@ class FileLifecycleManager:
try:
metadata_dict = self._load_metadata()
stats: dict[str, Any] = {
"total_files": len(metadata_dict),
"active_files": 0,
"archived_files": 0,
"deleted_files": 0,
"total_size": 0,
"versions_count": 0,
"oldest_file": None,
"newest_file": None,
}
stats = StorageStatisticsDict(
total_files=len(metadata_dict),
active_files=0,
archived_files=0,
deleted_files=0,
total_size=0,
versions_count=0,
oldest_file=None,
newest_file=None,
)
oldest_date = None
newest_date = None
@@ -437,7 +448,16 @@ class FileLifecycleManager:
except Exception:
logger.exception("Failed to get storage statistics")
return {}
return StorageStatisticsDict(
total_files=0,
active_files=0,
archived_files=0,
deleted_files=0,
total_size=0,
versions_count=0,
oldest_file=None,
newest_file=None,
)
def _create_version_backup(self, filename: str, metadata: dict):
"""Create version backup"""

View File

@@ -8,13 +8,13 @@ dependencies = [
"arize-phoenix-otel~=0.15.0",
"azure-identity==1.25.3",
"beautifulsoup4==4.14.3",
"boto3==1.42.78",
"boto3==1.42.83",
"bs4~=0.0.1",
"cachetools~=5.3.0",
"celery~=5.6.2",
"charset-normalizer>=3.4.4",
"flask~=3.1.2",
"flask-compress>=1.17,<1.24",
"flask-compress>=1.17,<1.25",
"flask-cors~=6.0.0",
"flask-login~=0.6.3",
"flask-migrate~=4.1.0",
@@ -25,7 +25,7 @@ dependencies = [
"google-api-core>=2.19.1",
"google-api-python-client==2.193.0",
"google-auth>=2.47.0",
"google-auth-httplib2==0.3.0",
"google-auth-httplib2==0.3.1",
"google-cloud-aiplatform>=1.123.0",
"googleapis-common-protos>=1.65.0",
"graphon>=0.1.2",
@@ -111,9 +111,9 @@ package = false
dev = [
"coverage~=7.13.4",
"dotenv-linter~=0.7.0",
"faker~=40.11.0",
"faker~=40.12.0",
"lxml-stubs~=0.5.1",
"basedpyright~=1.38.2",
"basedpyright~=1.39.0",
"ruff~=0.15.5",
"pytest~=9.0.2",
"pytest-benchmark~=5.2.3",
@@ -139,15 +139,15 @@ dev = [
"types-olefile~=0.47.0",
"types-openpyxl~=3.1.5",
"types-pexpect~=4.9.0",
"types-protobuf~=6.32.1",
"types-protobuf~=7.34.1",
"types-psutil~=7.2.2",
"types-psycopg2~=2.9.21",
"types-pygments~=2.19.0",
"types-pygments~=2.20.0",
"types-pymysql~=1.1.0",
"types-python-dateutil~=2.9.0",
"types-pywin32~=311.0.0",
"types-pyyaml~=6.0.12",
"types-regex~=2026.3.32",
"types-regex~=2026.4.4",
"types-shapely~=2.1.0",
"types-simplejson>=3.20.0",
"types-six>=1.17.0",
@@ -166,7 +166,7 @@ dev = [
"import-linter>=2.3",
"types-redis>=4.6.0.20241004",
"celery-types>=0.23.0",
"mypy~=1.19.1",
"mypy~=1.20.0",
# "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved.
"sseclient-py>=1.8.0",
"pytest-timeout>=2.4.0",
@@ -200,23 +200,23 @@ tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"]
# Required by vector store clients
############################################################
vdb = [
"alibabacloud_gpdb20160503~=5.1.0",
"alibabacloud_gpdb20160503~=5.2.0",
"alibabacloud_tea_openapi~=0.4.3",
"chromadb==0.5.20",
"clickhouse-connect~=0.15.0",
"clickzetta-connector-python>=0.8.102",
"couchbase~=4.5.0",
"couchbase~=4.6.0",
"elasticsearch==8.14.0",
"opensearch-py==3.1.0",
"oracledb==3.4.2",
"pgvecto-rs[sqlalchemy]~=0.2.1",
"pgvector==0.4.2",
"pymilvus~=2.6.10",
"pymochow==2.3.6",
"pymochow==2.4.0",
"pyobvector~=0.2.17",
"qdrant-client==1.9.0",
"intersystems-irispython>=5.1.0",
"tablestore==6.4.2",
"tablestore==6.4.3",
"tcvectordb~=2.1.0",
"tidb-vector==0.0.15",
"upstash-vector==0.8.0",

View File

@@ -36,7 +36,7 @@ Example:
from collections.abc import Callable, Sequence
from datetime import datetime
from typing import Protocol
from typing import Protocol, TypedDict
from graphon.entities.pause_reason import PauseReason
from graphon.enums import WorkflowType
@@ -55,6 +55,16 @@ from repositories.types import (
)
class RunsWithRelatedCountsDict(TypedDict):
runs: int
node_executions: int
offloads: int
app_logs: int
trigger_logs: int
pauses: int
pause_reasons: int
class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
"""
Protocol for service-layer WorkflowRun repository operations.
@@ -333,7 +343,7 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
runs: Sequence[WorkflowRun],
delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
) -> dict[str, int]:
) -> RunsWithRelatedCountsDict:
"""
Delete workflow runs and their related records (node executions, offloads, app logs,
trigger logs, pauses, pause reasons).
@@ -400,7 +410,7 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
runs: Sequence[WorkflowRun],
count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
) -> dict[str, int]:
) -> RunsWithRelatedCountsDict:
"""
Count workflow runs and their related records (node executions, offloads, app logs,
trigger logs, pauses, pause reasons) without deleting data.

View File

@@ -45,7 +45,7 @@ from libs.uuid_utils import uuidv7
from models.enums import WorkflowRunTriggeredFrom
from models.human_input import HumanInputForm
from models.workflow import WorkflowAppLog, WorkflowArchiveLog, WorkflowPause, WorkflowPauseReason, WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.api_workflow_run_repository import APIWorkflowRunRepository, RunsWithRelatedCountsDict
from repositories.entities.workflow_pause import WorkflowPauseEntity
from repositories.types import (
AverageInteractionStats,
@@ -463,7 +463,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
runs: Sequence[WorkflowRun],
delete_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
delete_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
) -> dict[str, int]:
) -> RunsWithRelatedCountsDict:
if not runs:
return {
"runs": 0,
@@ -638,7 +638,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
runs: Sequence[WorkflowRun],
count_node_executions: Callable[[Session, Sequence[WorkflowRun]], tuple[int, int]] | None = None,
count_trigger_logs: Callable[[Session, Sequence[str]], int] | None = None,
) -> dict[str, int]:
) -> RunsWithRelatedCountsDict:
if not runs:
return {
"runs": 0,

View File

@@ -8,7 +8,7 @@ from hashlib import sha256
from typing import Any, TypedDict, cast
from pydantic import BaseModel, TypeAdapter
from sqlalchemy import delete, func, select
from sqlalchemy import delete, func, select, update
from sqlalchemy.orm import Session
@@ -83,6 +83,12 @@ from tasks.mail_reset_password_task import (
logger = logging.getLogger(__name__)
class InvitationDetailDict(TypedDict):
account: Account
data: InvitationData
tenant: Tenant
def _try_join_enterprise_default_workspace(account_id: str) -> None:
"""Best-effort join to enterprise default workspace."""
if not dify_config.ENTERPRISE_ENABLED:
@@ -1069,11 +1075,11 @@ class TenantService:
@staticmethod
def create_owner_tenant_if_not_exist(account: Account, name: str | None = None, is_setup: bool | None = False):
"""Check if user have a workspace or not"""
available_ta = (
db.session.query(TenantAccountJoin)
.filter_by(account_id=account.id)
available_ta = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.account_id == account.id)
.order_by(TenantAccountJoin.id.asc())
.first()
.limit(1)
)
if available_ta:
@@ -1104,7 +1110,11 @@ class TenantService:
logger.error("Tenant %s has already an owner.", tenant.id)
raise Exception("Tenant already has an owner.")
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
ta = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id)
.limit(1)
)
if ta:
ta.role = TenantAccountRole(role)
else:
@@ -1119,11 +1129,12 @@ class TenantService:
@staticmethod
def get_join_tenants(account: Account) -> list[Tenant]:
"""Get account join tenants"""
return (
db.session.query(Tenant)
.join(TenantAccountJoin, Tenant.id == TenantAccountJoin.tenant_id)
.where(TenantAccountJoin.account_id == account.id, Tenant.status == TenantStatus.NORMAL)
.all()
return list(
db.session.scalars(
select(Tenant)
.join(TenantAccountJoin, Tenant.id == TenantAccountJoin.tenant_id)
.where(TenantAccountJoin.account_id == account.id, Tenant.status == TenantStatus.NORMAL)
).all()
)
@staticmethod
@@ -1133,7 +1144,11 @@ class TenantService:
if not tenant:
raise TenantNotFoundError("Tenant not found.")
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
ta = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id)
.limit(1)
)
if ta:
tenant.role = ta.role
else:
@@ -1148,23 +1163,25 @@ class TenantService:
if tenant_id is None:
raise ValueError("Tenant ID must be provided.")
tenant_account_join = (
db.session.query(TenantAccountJoin)
tenant_account_join = db.session.scalar(
select(TenantAccountJoin)
.join(Tenant, TenantAccountJoin.tenant_id == Tenant.id)
.where(
TenantAccountJoin.account_id == account.id,
TenantAccountJoin.tenant_id == tenant_id,
Tenant.status == TenantStatus.NORMAL,
)
.first()
.limit(1)
)
if not tenant_account_join:
raise AccountNotLinkTenantError("Tenant not found or account is not a member of the tenant.")
else:
db.session.query(TenantAccountJoin).where(
TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id != tenant_id
).update({"current": False})
db.session.execute(
update(TenantAccountJoin)
.where(TenantAccountJoin.account_id == account.id, TenantAccountJoin.tenant_id != tenant_id)
.values(current=False)
)
tenant_account_join.current = True
# Set the current tenant for the account
account.set_tenant_id(tenant_account_join.tenant_id)
@@ -1173,8 +1190,8 @@ class TenantService:
@staticmethod
def get_tenant_members(tenant: Tenant) -> list[Account]:
"""Get tenant members"""
query = (
db.session.query(Account, TenantAccountJoin.role)
stmt = (
select(Account, TenantAccountJoin.role)
.select_from(Account)
.join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id)
.where(TenantAccountJoin.tenant_id == tenant.id)
@@ -1183,7 +1200,7 @@ class TenantService:
# Initialize an empty list to store the updated accounts
updated_accounts = []
for account, role in query:
for account, role in db.session.execute(stmt):
account.role = role
updated_accounts.append(account)
@@ -1192,8 +1209,8 @@ class TenantService:
@staticmethod
def get_dataset_operator_members(tenant: Tenant) -> list[Account]:
"""Get dataset admin members"""
query = (
db.session.query(Account, TenantAccountJoin.role)
stmt = (
select(Account, TenantAccountJoin.role)
.select_from(Account)
.join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id)
.where(TenantAccountJoin.tenant_id == tenant.id)
@@ -1203,7 +1220,7 @@ class TenantService:
# Initialize an empty list to store the updated accounts
updated_accounts = []
for account, role in query:
for account, role in db.session.execute(stmt):
account.role = role
updated_accounts.append(account)
@@ -1216,26 +1233,31 @@ class TenantService:
raise ValueError("all roles must be TenantAccountRole")
return (
db.session.query(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.role.in_([role.value for role in roles]))
.first()
db.session.scalar(
select(TenantAccountJoin)
.where(
TenantAccountJoin.tenant_id == tenant.id,
TenantAccountJoin.role.in_([role.value for role in roles]),
)
.limit(1)
)
is not None
)
@staticmethod
def get_user_role(account: Account, tenant: Tenant) -> TenantAccountRole | None:
"""Get the role of the current account for a given tenant"""
join = (
db.session.query(TenantAccountJoin)
join = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id)
.first()
.limit(1)
)
return TenantAccountRole(join.role) if join else None
@staticmethod
def get_tenant_count() -> int:
"""Get tenant count"""
return cast(int, db.session.query(func.count(Tenant.id)).scalar())
return cast(int, db.session.scalar(select(func.count(Tenant.id))))
@staticmethod
def check_member_permission(tenant: Tenant, operator: Account, member: Account | None, action: str):
@@ -1252,7 +1274,11 @@ class TenantService:
if operator.id == member.id:
raise CannotOperateSelfError("Cannot operate self.")
ta_operator = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=operator.id).first()
ta_operator = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == operator.id)
.limit(1)
)
if not ta_operator or ta_operator.role not in perms[action]:
raise NoPermissionError(f"No permission to {action} member.")
@@ -1270,7 +1296,11 @@ class TenantService:
TenantService.check_member_permission(tenant, operator, account, "remove")
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
ta = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == account.id)
.limit(1)
)
if not ta:
raise MemberNotInTenantError("Member not in tenant.")
@@ -1285,7 +1315,12 @@ class TenantService:
should_delete_account = False
if account.status == AccountStatus.PENDING:
# autoflush flushes ta deletion before this query, so 0 means no remaining joins
remaining_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).count()
remaining_joins = (
db.session.scalar(
select(func.count(TenantAccountJoin.id)).where(TenantAccountJoin.account_id == account_id)
)
or 0
)
if remaining_joins == 0:
db.session.delete(account)
should_delete_account = True
@@ -1320,8 +1355,10 @@ class TenantService:
"""Update member role"""
TenantService.check_member_permission(tenant, operator, member, "update")
target_member_join = (
db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=member.id).first()
target_member_join = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.account_id == member.id)
.limit(1)
)
if not target_member_join:
@@ -1332,8 +1369,10 @@ class TenantService:
if new_role == "owner":
# Find the current owner and change their role to 'admin'
current_owner_join = (
db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, role="owner").first()
current_owner_join = db.session.scalar(
select(TenantAccountJoin)
.where(TenantAccountJoin.tenant_id == tenant.id, TenantAccountJoin.role == "owner")
.limit(1)
)
if current_owner_join:
current_owner_join.role = TenantAccountRole.ADMIN
@@ -1552,7 +1591,7 @@ class RegisterService:
@classmethod
def get_invitation_if_token_valid(
cls, workspace_id: str | None, email: str | None, token: str
) -> dict[str, Any] | None:
) -> InvitationDetailDict | None:
invitation_data = cls.get_invitation_by_token(token, workspace_id, email)
if not invitation_data:
return None
@@ -1614,7 +1653,7 @@ class RegisterService:
@classmethod
def get_invitation_with_case_fallback(
cls, workspace_id: str | None, email: str | None, token: str
) -> dict[str, Any] | None:
) -> InvitationDetailDict | None:
invitation = cls.get_invitation_if_token_valid(workspace_id, email, token)
if invitation or not email or email == email.lower():
return invitation

View File

@@ -32,22 +32,33 @@ class AdvancedPromptTemplateService:
def get_common_prompt(cls, app_mode: str, model_mode: str, has_context: str):
context_prompt = copy.deepcopy(CONTEXT)
if app_mode == AppMode.CHAT:
if model_mode == "completion":
return cls.get_completion_prompt(
copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
)
elif model_mode == "chat":
return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
elif app_mode == AppMode.COMPLETION:
if model_mode == "completion":
return cls.get_completion_prompt(
copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
)
elif model_mode == "chat":
return cls.get_chat_prompt(
copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt
)
match app_mode:
case AppMode.CHAT:
match model_mode:
case "completion":
return cls.get_completion_prompt(
copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
)
case "chat":
return cls.get_chat_prompt(
copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt
)
case _:
pass
case AppMode.COMPLETION:
match model_mode:
case "completion":
return cls.get_completion_prompt(
copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt
)
case "chat":
return cls.get_chat_prompt(
copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt
)
case _:
pass
case _:
pass
# default return empty dict
return {}
@@ -73,25 +84,38 @@ class AdvancedPromptTemplateService:
def get_baichuan_prompt(cls, app_mode: str, model_mode: str, has_context: str):
baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
if app_mode == AppMode.CHAT:
if model_mode == "completion":
return cls.get_completion_prompt(
copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt
)
elif model_mode == "chat":
return cls.get_chat_prompt(
copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt
)
elif app_mode == AppMode.COMPLETION:
if model_mode == "completion":
return cls.get_completion_prompt(
copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG),
has_context,
baichuan_context_prompt,
)
elif model_mode == "chat":
return cls.get_chat_prompt(
copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt
)
match app_mode:
case AppMode.CHAT:
match model_mode:
case "completion":
return cls.get_completion_prompt(
copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG),
has_context,
baichuan_context_prompt,
)
case "chat":
return cls.get_chat_prompt(
copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt
)
case _:
pass
case AppMode.COMPLETION:
match model_mode:
case "completion":
return cls.get_completion_prompt(
copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG),
has_context,
baichuan_context_prompt,
)
case "chat":
return cls.get_chat_prompt(
copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG),
has_context,
baichuan_context_prompt,
)
case _:
pass
case _:
pass
# default return empty dict
return {}

View File

@@ -6,7 +6,7 @@ import pandas as pd
logger = logging.getLogger(__name__)
from typing import TypedDict
from sqlalchemy import or_, select
from sqlalchemy import delete, or_, select, update
from werkzeug.datastructures import FileStorage
from werkzeug.exceptions import NotFound
@@ -51,10 +51,8 @@ class AppAnnotationService:
def up_insert_app_annotation_from_message(cls, args: dict, app_id: str) -> MessageAnnotation:
# get app info
current_user, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
@@ -66,7 +64,9 @@ class AppAnnotationService:
if args.get("message_id"):
message_id = str(args["message_id"])
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app.id).first()
message = db.session.scalar(
select(Message).where(Message.id == message_id, Message.app_id == app.id).limit(1)
)
if not message:
raise NotFound("Message Not Exists.")
@@ -95,7 +95,9 @@ class AppAnnotationService:
db.session.add(annotation)
db.session.commit()
annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
assert current_tenant_id is not None
if annotation_setting:
add_annotation_to_index_task.delay(
@@ -151,10 +153,8 @@ class AppAnnotationService:
def get_annotation_list_by_app_id(cls, app_id: str, page: int, limit: int, keyword: str):
# get app info
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
@@ -193,20 +193,17 @@ class AppAnnotationService:
"""
# get app info
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotations = (
db.session.query(MessageAnnotation)
annotations = db.session.scalars(
select(MessageAnnotation)
.where(MessageAnnotation.app_id == app_id)
.order_by(MessageAnnotation.created_at.desc())
.all()
)
).all()
# Sanitize CSV-injectable fields to prevent formula injection
for annotation in annotations:
@@ -223,10 +220,8 @@ class AppAnnotationService:
def insert_app_annotation_directly(cls, args: dict, app_id: str) -> MessageAnnotation:
# get app info
current_user, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
@@ -242,7 +237,9 @@ class AppAnnotationService:
db.session.add(annotation)
db.session.commit()
# if annotation reply is enabled , add annotation to index
annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if annotation_setting:
add_annotation_to_index_task.delay(
annotation.id,
@@ -257,16 +254,14 @@ class AppAnnotationService:
def update_app_annotation_directly(cls, args: dict, app_id: str, annotation_id: str):
# get app info
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
annotation = db.session.get(MessageAnnotation, annotation_id)
if not annotation:
raise NotFound("Annotation not found")
@@ -280,8 +275,8 @@ class AppAnnotationService:
db.session.commit()
# if annotation reply is enabled , add annotation to index
app_annotation_setting = (
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
app_annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if app_annotation_setting:
@@ -299,16 +294,14 @@ class AppAnnotationService:
def delete_app_annotation(cls, app_id: str, annotation_id: str):
# get app info
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
annotation = db.session.get(MessageAnnotation, annotation_id)
if not annotation:
raise NotFound("Annotation not found")
@@ -324,8 +317,8 @@ class AppAnnotationService:
db.session.commit()
# if annotation reply is enabled , delete annotation index
app_annotation_setting = (
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
app_annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if app_annotation_setting:
@@ -337,22 +330,19 @@ class AppAnnotationService:
def delete_app_annotations_in_batch(cls, app_id: str, annotation_ids: list[str]):
# get app info
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
# Fetch annotations and their settings in a single query
annotations_to_delete = (
db.session.query(MessageAnnotation, AppAnnotationSetting)
annotations_to_delete = db.session.execute(
select(MessageAnnotation, AppAnnotationSetting)
.outerjoin(AppAnnotationSetting, MessageAnnotation.app_id == AppAnnotationSetting.app_id)
.where(MessageAnnotation.id.in_(annotation_ids))
.all()
)
).all()
if not annotations_to_delete:
return {"deleted_count": 0}
@@ -361,9 +351,9 @@ class AppAnnotationService:
annotation_ids_to_delete = [annotation.id for annotation, _ in annotations_to_delete]
# Step 2: Bulk delete hit histories in a single query
db.session.query(AppAnnotationHitHistory).where(
AppAnnotationHitHistory.annotation_id.in_(annotation_ids_to_delete)
).delete(synchronize_session=False)
db.session.execute(
delete(AppAnnotationHitHistory).where(AppAnnotationHitHistory.annotation_id.in_(annotation_ids_to_delete))
)
# Step 3: Trigger async tasks for search index deletion
for annotation, annotation_setting in annotations_to_delete:
@@ -373,11 +363,10 @@ class AppAnnotationService:
)
# Step 4: Bulk delete annotations in a single query
deleted_count = (
db.session.query(MessageAnnotation)
.where(MessageAnnotation.id.in_(annotation_ids_to_delete))
.delete(synchronize_session=False)
delete_result = db.session.execute(
delete(MessageAnnotation).where(MessageAnnotation.id.in_(annotation_ids_to_delete))
)
deleted_count = getattr(delete_result, "rowcount", 0)
db.session.commit()
return {"deleted_count": deleted_count}
@@ -398,10 +387,8 @@ class AppAnnotationService:
# get app info
current_user, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
@@ -522,16 +509,14 @@ class AppAnnotationService:
def get_annotation_hit_histories(cls, app_id: str, annotation_id: str, page, limit):
_, current_tenant_id = current_account_with_tenant()
# get app info
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
annotation = db.session.get(MessageAnnotation, annotation_id)
if not annotation:
raise NotFound("Annotation not found")
@@ -551,7 +536,7 @@ class AppAnnotationService:
@classmethod
def get_annotation_by_id(cls, annotation_id: str) -> MessageAnnotation | None:
annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
annotation = db.session.get(MessageAnnotation, annotation_id)
if not annotation:
return None
@@ -571,8 +556,10 @@ class AppAnnotationService:
score: float,
):
# add hit count to annotation
db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).update(
{MessageAnnotation.hit_count: MessageAnnotation.hit_count + 1}, synchronize_session=False
db.session.execute(
update(MessageAnnotation)
.where(MessageAnnotation.id == annotation_id)
.values(hit_count=MessageAnnotation.hit_count + 1)
)
annotation_hit_history = AppAnnotationHitHistory(
@@ -593,16 +580,16 @@ class AppAnnotationService:
def get_app_annotation_setting_by_app_id(cls, app_id: str) -> AnnotationSettingDict | AnnotationSettingDisabledDict:
_, current_tenant_id = current_account_with_tenant()
# get app info
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if annotation_setting:
collection_binding_detail = annotation_setting.collection_binding_detail
if collection_binding_detail:
@@ -630,22 +617,20 @@ class AppAnnotationService:
) -> AnnotationSettingDict:
current_user, current_tenant_id = current_account_with_tenant()
# get app info
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
annotation_setting = (
db.session.query(AppAnnotationSetting)
annotation_setting = db.session.scalar(
select(AppAnnotationSetting)
.where(
AppAnnotationSetting.app_id == app_id,
AppAnnotationSetting.id == annotation_setting_id,
)
.first()
.limit(1)
)
if not annotation_setting:
raise NotFound("App annotation not found")
@@ -678,26 +663,26 @@ class AppAnnotationService:
@classmethod
def clear_all_annotations(cls, app_id: str):
_, current_tenant_id = current_account_with_tenant()
app = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal")
.first()
app = db.session.scalar(
select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1)
)
if not app:
raise NotFound("App not found")
# if annotation reply is enabled, delete annotation index
app_annotation_setting = (
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
app_annotation_setting = db.session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
annotations_query = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id)
for annotation in annotations_query.yield_per(100):
annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).where(
AppAnnotationHitHistory.annotation_id == annotation.id
)
for annotation_hit_history in annotation_hit_histories_query.yield_per(100):
annotations_iter = db.session.scalars(
select(MessageAnnotation).where(MessageAnnotation.app_id == app_id)
).yield_per(100)
for annotation in annotations_iter:
hit_histories_iter = db.session.scalars(
select(AppAnnotationHitHistory).where(AppAnnotationHitHistory.annotation_id == annotation.id)
).yield_per(100)
for annotation_hit_history in hit_histories_iter:
db.session.delete(annotation_hit_history)
# if annotation reply is enabled, delete annotation index

View File

@@ -116,139 +116,143 @@ class AppGenerateService:
request_id = RateLimit.gen_request_key()
try:
request_id = rate_limit.enter(request_id)
if app_model.mode == AppMode.COMPLETION:
return rate_limit.generate(
CompletionAppGenerator.convert_to_event_stream(
CompletionAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id=request_id,
)
elif app_model.mode == AppMode.AGENT_CHAT or app_model.is_agent:
return rate_limit.generate(
AgentChatAppGenerator.convert_to_event_stream(
AgentChatAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id,
)
elif app_model.mode == AppMode.CHAT:
return rate_limit.generate(
ChatAppGenerator.convert_to_event_stream(
ChatAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id=request_id,
)
elif app_model.mode == AppMode.ADVANCED_CHAT:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
if streaming:
# Streaming mode: subscribe to SSE and enqueue the execution on first subscriber
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=True,
call_depth=0,
)
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
effective_mode = (
AppMode.AGENT_CHAT if app_model.is_agent and app_model.mode != AppMode.AGENT_CHAT else app_model.mode
)
match effective_mode:
case AppMode.COMPLETION:
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
CompletionAppGenerator.convert_to_event_stream(
CompletionAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id=request_id,
)
else:
# Blocking mode: run synchronously and return JSON instead of SSE
# Keep behaviour consistent with WORKFLOW blocking branch.
advanced_generator = AdvancedChatAppGenerator()
case AppMode.AGENT_CHAT:
return rate_limit.generate(
advanced_generator.convert_to_event_stream(
advanced_generator.generate(
AgentChatAppGenerator.convert_to_event_stream(
AgentChatAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id,
)
case AppMode.CHAT:
return rate_limit.generate(
ChatAppGenerator.convert_to_event_stream(
ChatAppGenerator().generate(
app_model=app_model, user=user, args=args, invoke_from=invoke_from, streaming=streaming
),
),
request_id=request_id,
)
case AppMode.ADVANCED_CHAT:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
if streaming:
# Streaming mode: subscribe to SSE and enqueue the execution on first subscriber
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
workflow_run_id=str(uuid.uuid4()),
streaming=False,
streaming=True,
call_depth=0,
)
),
request_id=request_id,
)
elif app_model.mode == AppMode.WORKFLOW:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
if streaming:
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=True,
call_depth=0,
root_node_id=root_node_id,
workflow_run_id=str(uuid.uuid4()),
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
generator = AdvancedChatAppGenerator()
return rate_limit.generate(
generator.convert_to_event_stream(
generator.retrieve_events(
AppMode.ADVANCED_CHAT,
payload.workflow_run_id,
on_subscribe=on_subscribe,
),
),
request_id=request_id,
)
payload_json = payload.model_dump_json()
else:
# Blocking mode: run synchronously and return JSON instead of SSE
# Keep behaviour consistent with WORKFLOW blocking branch.
advanced_generator = AdvancedChatAppGenerator()
return rate_limit.generate(
advanced_generator.convert_to_event_stream(
advanced_generator.generate(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
workflow_run_id=str(uuid.uuid4()),
streaming=False,
)
),
request_id=request_id,
)
case AppMode.WORKFLOW:
workflow_id = args.get("workflow_id")
workflow = cls._get_workflow(app_model, invoke_from, workflow_id)
if streaming:
with rate_limit_context(rate_limit, request_id):
payload = AppExecutionParams.new(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=True,
call_depth=0,
root_node_id=root_node_id,
workflow_run_id=str(uuid.uuid4()),
)
payload_json = payload.model_dump_json()
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
def on_subscribe():
workflow_based_app_execution_task.delay(payload_json)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
on_subscribe = cls._build_streaming_task_on_subscribe(on_subscribe)
return rate_limit.generate(
WorkflowAppGenerator.convert_to_event_stream(
MessageBasedAppGenerator.retrieve_events(
AppMode.WORKFLOW,
payload.workflow_run_id,
on_subscribe=on_subscribe,
),
),
request_id,
)
pause_config = PauseStateLayerConfig(
session_factory=session_factory.get_session_maker(),
state_owner_user_id=workflow.created_by,
)
return rate_limit.generate(
WorkflowAppGenerator.convert_to_event_stream(
MessageBasedAppGenerator.retrieve_events(
AppMode.WORKFLOW,
payload.workflow_run_id,
on_subscribe=on_subscribe,
WorkflowAppGenerator().generate(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=False,
root_node_id=root_node_id,
call_depth=0,
pause_state_config=pause_config,
),
),
request_id,
)
pause_config = PauseStateLayerConfig(
session_factory=session_factory.get_session_maker(),
state_owner_user_id=workflow.created_by,
)
return rate_limit.generate(
WorkflowAppGenerator.convert_to_event_stream(
WorkflowAppGenerator().generate(
app_model=app_model,
workflow=workflow,
user=user,
args=args,
invoke_from=invoke_from,
streaming=False,
root_node_id=root_node_id,
call_depth=0,
pause_state_config=pause_config,
),
),
request_id,
)
else:
raise ValueError(f"Invalid app mode {app_model.mode}")
case _:
raise ValueError(f"Invalid app mode {app_model.mode}")
except Exception:
quota_charge.refund()
rate_limit.exit(request_id)
@@ -280,43 +284,73 @@ class AppGenerateService:
@classmethod
def generate_single_iteration(cls, app_model: App, user: Account, node_id: str, args: Any, streaming: bool = True):
if app_model.mode == AppMode.ADVANCED_CHAT:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
AdvancedChatAppGenerator().single_iteration_generate(
app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, streaming=streaming
match app_model.mode:
case AppMode.COMPLETION | AppMode.CHAT | AppMode.AGENT_CHAT:
raise ValueError(f"Invalid app mode {app_model.mode}")
case AppMode.ADVANCED_CHAT:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
AdvancedChatAppGenerator().single_iteration_generate(
app_model=app_model,
workflow=workflow,
node_id=node_id,
user=user,
args=args,
streaming=streaming,
)
)
)
elif app_model.mode == AppMode.WORKFLOW:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
WorkflowAppGenerator().single_iteration_generate(
app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, streaming=streaming
case AppMode.WORKFLOW:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
WorkflowAppGenerator().single_iteration_generate(
app_model=app_model,
workflow=workflow,
node_id=node_id,
user=user,
args=args,
streaming=streaming,
)
)
)
else:
raise ValueError(f"Invalid app mode {app_model.mode}")
case AppMode.CHANNEL | AppMode.RAG_PIPELINE:
raise ValueError(f"Invalid app mode {app_model.mode}")
case _:
raise ValueError(f"Invalid app mode {app_model.mode}")
@classmethod
def generate_single_loop(
cls, app_model: App, user: Account, node_id: str, args: LoopNodeRunPayload, streaming: bool = True
):
if app_model.mode == AppMode.ADVANCED_CHAT:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
AdvancedChatAppGenerator().single_loop_generate(
app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, streaming=streaming
match app_model.mode:
case AppMode.COMPLETION | AppMode.CHAT | AppMode.AGENT_CHAT:
raise ValueError(f"Invalid app mode {app_model.mode}")
case AppMode.ADVANCED_CHAT:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
AdvancedChatAppGenerator().single_loop_generate(
app_model=app_model,
workflow=workflow,
node_id=node_id,
user=user,
args=args,
streaming=streaming,
)
)
)
elif app_model.mode == AppMode.WORKFLOW:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
WorkflowAppGenerator().single_loop_generate(
app_model=app_model, workflow=workflow, node_id=node_id, user=user, args=args, streaming=streaming
case AppMode.WORKFLOW:
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
return AdvancedChatAppGenerator.convert_to_event_stream(
WorkflowAppGenerator().single_loop_generate(
app_model=app_model,
workflow=workflow,
node_id=node_id,
user=user,
args=args,
streaming=streaming,
)
)
)
else:
raise ValueError(f"Invalid app mode {app_model.mode}")
case AppMode.CHANNEL | AppMode.RAG_PIPELINE:
raise ValueError(f"Invalid app mode {app_model.mode}")
case _:
raise ValueError(f"Invalid app mode {app_model.mode}")
@classmethod
def generate_more_like_this(

View File

@@ -7,11 +7,12 @@ from models.model import AppMode, AppModelConfigDict
class AppModelConfigService:
@classmethod
def validate_configuration(cls, tenant_id: str, config: dict, app_mode: AppMode) -> AppModelConfigDict:
if app_mode == AppMode.CHAT:
return ChatAppConfigManager.config_validate(tenant_id, config)
elif app_mode == AppMode.AGENT_CHAT:
return AgentChatAppConfigManager.config_validate(tenant_id, config)
elif app_mode == AppMode.COMPLETION:
return CompletionAppConfigManager.config_validate(tenant_id, config)
else:
raise ValueError(f"Invalid app mode: {app_mode}")
match app_mode:
case AppMode.CHAT:
return ChatAppConfigManager.config_validate(tenant_id, config)
case AppMode.AGENT_CHAT:
return AgentChatAppConfigManager.config_validate(tenant_id, config)
case AppMode.COMPLETION:
return CompletionAppConfigManager.config_validate(tenant_id, config)
case AppMode.WORKFLOW | AppMode.ADVANCED_CHAT | AppMode.CHANNEL | AppMode.RAG_PIPELINE:
raise ValueError(f"Invalid app mode: {app_mode}")

View File

@@ -1,6 +1,6 @@
import base64
from sqlalchemy import Engine
from sqlalchemy import Engine, select
from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
@@ -22,8 +22,8 @@ class AttachmentService:
raise AssertionError("must be a sessionmaker or an Engine.")
def get_file_base64(self, file_id: str) -> str:
upload_file = (
self._session_maker(expire_on_commit=False).query(UploadFile).where(UploadFile.id == file_id).first()
upload_file = self._session_maker(expire_on_commit=False).scalar(
select(UploadFile).where(UploadFile.id == file_id).limit(1)
)
if not upload_file:
raise NotFound("File not found")

View File

@@ -7,14 +7,14 @@ import time
import uuid
from collections import Counter
from collections.abc import Sequence
from typing import Any, Literal, cast
from typing import Any, Literal, TypedDict, cast
import sqlalchemy as sa
from graphon.file import helpers as file_helpers
from graphon.model_runtime.entities.model_entities import ModelFeature, ModelType
from graphon.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel
from redis.exceptions import LockNotOwnedError
from sqlalchemy import exists, func, select
from sqlalchemy import delete, exists, func, select, update
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, NotFound
@@ -107,6 +107,16 @@ from tasks.sync_website_document_indexing_task import sync_website_document_inde
logger = logging.getLogger(__name__)
class ProcessRulesDict(TypedDict):
mode: str
rules: dict[str, Any]
class AutoDisableLogsDict(TypedDict):
document_ids: list[str]
count: int
class DatasetService:
@staticmethod
def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None, include_all=False):
@@ -114,9 +124,11 @@ class DatasetService:
if user:
# get permitted dataset ids
dataset_permission = (
db.session.query(DatasetPermission).filter_by(account_id=user.id, tenant_id=tenant_id).all()
)
dataset_permission = db.session.scalars(
select(DatasetPermission).where(
DatasetPermission.account_id == user.id, DatasetPermission.tenant_id == tenant_id
)
).all()
permitted_dataset_ids = {dp.dataset_id for dp in dataset_permission} if dataset_permission else None
if user.current_role == TenantAccountRole.DATASET_OPERATOR:
@@ -180,21 +192,20 @@ class DatasetService:
return datasets.items, datasets.total
@staticmethod
def get_process_rules(dataset_id):
def get_process_rules(dataset_id) -> ProcessRulesDict:
# get the latest process rule
dataset_process_rule = (
db.session.query(DatasetProcessRule)
dataset_process_rule = db.session.execute(
select(DatasetProcessRule)
.where(DatasetProcessRule.dataset_id == dataset_id)
.order_by(DatasetProcessRule.created_at.desc())
.limit(1)
.one_or_none()
)
).scalar_one_or_none()
if dataset_process_rule:
mode = dataset_process_rule.mode
rules = dataset_process_rule.rules_dict
rules = dataset_process_rule.rules_dict or {}
else:
mode = DocumentService.DEFAULT_RULES["mode"]
rules = DocumentService.DEFAULT_RULES["rules"]
mode = str(DocumentService.DEFAULT_RULES["mode"])
rules = dict(DocumentService.DEFAULT_RULES.get("rules") or {})
return {"mode": mode, "rules": rules}
@staticmethod
@@ -225,7 +236,7 @@ class DatasetService:
summary_index_setting: dict | None = None,
):
# check if dataset name already exists
if db.session.query(Dataset).filter_by(name=name, tenant_id=tenant_id).first():
if db.session.scalar(select(Dataset).where(Dataset.name == name, Dataset.tenant_id == tenant_id).limit(1)):
raise DatasetNameDuplicateError(f"Dataset with name {name} already exists.")
embedding_model = None
if indexing_technique == IndexTechniqueType.HIGH_QUALITY:
@@ -300,17 +311,17 @@ class DatasetService:
):
if rag_pipeline_dataset_create_entity.name:
# check if dataset name already exists
if (
db.session.query(Dataset)
.filter_by(name=rag_pipeline_dataset_create_entity.name, tenant_id=tenant_id)
.first()
if db.session.scalar(
select(Dataset)
.where(Dataset.name == rag_pipeline_dataset_create_entity.name, Dataset.tenant_id == tenant_id)
.limit(1)
):
raise DatasetNameDuplicateError(
f"Dataset with name {rag_pipeline_dataset_create_entity.name} already exists."
)
else:
# generate a random name as Untitled 1 2 3 ...
datasets = db.session.query(Dataset).filter_by(tenant_id=tenant_id).all()
datasets = db.session.scalars(select(Dataset).where(Dataset.tenant_id == tenant_id)).all()
names = [dataset.name for dataset in datasets]
rag_pipeline_dataset_create_entity.name = generate_incremental_name(
names,
@@ -344,7 +355,7 @@ class DatasetService:
@staticmethod
def get_dataset(dataset_id) -> Dataset | None:
dataset: Dataset | None = db.session.query(Dataset).filter_by(id=dataset_id).first()
dataset: Dataset | None = db.session.get(Dataset, dataset_id)
return dataset
@staticmethod
@@ -466,14 +477,14 @@ class DatasetService:
@staticmethod
def _has_dataset_same_name(tenant_id: str, dataset_id: str, name: str):
dataset = (
db.session.query(Dataset)
dataset = db.session.scalar(
select(Dataset)
.where(
Dataset.id != dataset_id,
Dataset.name == name,
Dataset.tenant_id == tenant_id,
)
.first()
.limit(1)
)
return dataset is not None
@@ -596,7 +607,7 @@ class DatasetService:
filtered_data["icon_info"] = data.get("icon_info")
# Update dataset in database
db.session.query(Dataset).filter_by(id=dataset.id).update(filtered_data)
db.session.execute(update(Dataset).where(Dataset.id == dataset.id).values(**filtered_data))
db.session.commit()
# Reload dataset to get updated values
@@ -631,7 +642,7 @@ class DatasetService:
if dataset.runtime_mode != DatasetRuntimeMode.RAG_PIPELINE:
return
pipeline = db.session.query(Pipeline).filter_by(id=dataset.pipeline_id).first()
pipeline = db.session.get(Pipeline, dataset.pipeline_id)
if not pipeline:
return
@@ -1138,8 +1149,10 @@ class DatasetService:
if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
# For partial team permission, user needs explicit permission or be the creator
if dataset.created_by != user.id:
user_permission = (
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
user_permission = db.session.scalar(
select(DatasetPermission)
.where(DatasetPermission.dataset_id == dataset.id, DatasetPermission.account_id == user.id)
.limit(1)
)
if not user_permission:
logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
@@ -1161,7 +1174,9 @@ class DatasetService:
elif dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
if not any(
dp.dataset_id == dataset.id
for dp in db.session.query(DatasetPermission).filter_by(account_id=user.id).all()
for dp in db.session.scalars(
select(DatasetPermission).where(DatasetPermission.account_id == user.id)
).all()
):
raise NoPermissionError("You do not have permission to access this dataset.")
@@ -1175,12 +1190,11 @@ class DatasetService:
@staticmethod
def get_related_apps(dataset_id: str):
return (
db.session.query(AppDatasetJoin)
return db.session.scalars(
select(AppDatasetJoin)
.where(AppDatasetJoin.dataset_id == dataset_id)
.order_by(db.desc(AppDatasetJoin.created_at))
.all()
)
.order_by(AppDatasetJoin.created_at.desc())
).all()
@staticmethod
def update_dataset_api_status(dataset_id: str, status: bool):
@@ -1195,7 +1209,7 @@ class DatasetService:
db.session.commit()
@staticmethod
def get_dataset_auto_disable_logs(dataset_id: str):
def get_dataset_auto_disable_logs(dataset_id: str) -> AutoDisableLogsDict:
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
features = FeatureService.get_features(current_user.current_tenant_id)
@@ -1396,8 +1410,8 @@ class DocumentService:
@staticmethod
def get_document(dataset_id: str, document_id: str | None = None) -> Document | None:
if document_id:
document = (
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
document = db.session.scalar(
select(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).limit(1)
)
return document
else:
@@ -1626,7 +1640,7 @@ class DocumentService:
@staticmethod
def get_document_by_id(document_id: str) -> Document | None:
document = db.session.query(Document).where(Document.id == document_id).first()
document = db.session.get(Document, document_id)
return document
@@ -1691,7 +1705,7 @@ class DocumentService:
@staticmethod
def get_document_file_detail(file_id: str):
file_detail = db.session.query(UploadFile).where(UploadFile.id == file_id).one_or_none()
file_detail = db.session.get(UploadFile, file_id)
return file_detail
@staticmethod
@@ -1765,9 +1779,11 @@ class DocumentService:
document.name = name
db.session.add(document)
if document.data_source_info_dict and "upload_file_id" in document.data_source_info_dict:
db.session.query(UploadFile).where(
UploadFile.id == document.data_source_info_dict["upload_file_id"]
).update({UploadFile.name: name})
db.session.execute(
update(UploadFile)
.where(UploadFile.id == document.data_source_info_dict["upload_file_id"])
.values(name=name)
)
db.session.commit()
@@ -1854,8 +1870,8 @@ class DocumentService:
@staticmethod
def get_documents_position(dataset_id):
document = (
db.session.query(Document).filter_by(dataset_id=dataset_id).order_by(Document.position.desc()).first()
document = db.session.scalar(
select(Document).where(Document.dataset_id == dataset_id).order_by(Document.position.desc()).limit(1)
)
if document:
return document.position + 1
@@ -2012,28 +2028,28 @@ class DocumentService:
if not knowledge_config.data_source.info_list.file_info_list:
raise ValueError("File source info is required")
upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids
files = (
db.session.query(UploadFile)
.where(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id.in_(upload_file_list),
)
.all()
files = list(
db.session.scalars(
select(UploadFile).where(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id.in_(upload_file_list),
)
).all()
)
if len(files) != len(set(upload_file_list)):
raise FileNotExistsError("One or more files not found.")
file_names = [file.name for file in files]
db_documents = (
db.session.query(Document)
.where(
Document.dataset_id == dataset.id,
Document.tenant_id == current_user.current_tenant_id,
Document.data_source_type == DataSourceType.UPLOAD_FILE,
Document.enabled == True,
Document.name.in_(file_names),
)
.all()
db_documents = list(
db.session.scalars(
select(Document).where(
Document.dataset_id == dataset.id,
Document.tenant_id == current_user.current_tenant_id,
Document.data_source_type == DataSourceType.UPLOAD_FILE,
Document.enabled == True,
Document.name.in_(file_names),
)
).all()
)
documents_map = {document.name: document for document in db_documents}
for file in files:
@@ -2079,15 +2095,15 @@ class DocumentService:
raise ValueError("No notion info list found.")
exist_page_ids = []
exist_document = {}
documents = (
db.session.query(Document)
.filter_by(
dataset_id=dataset.id,
tenant_id=current_user.current_tenant_id,
data_source_type=DataSourceType.NOTION_IMPORT,
enabled=True,
)
.all()
documents = list(
db.session.scalars(
select(Document).where(
Document.dataset_id == dataset.id,
Document.tenant_id == current_user.current_tenant_id,
Document.data_source_type == DataSourceType.NOTION_IMPORT,
Document.enabled == True,
)
).all()
)
if documents:
for document in documents:
@@ -2518,14 +2534,15 @@ class DocumentService:
assert isinstance(current_user, Account)
documents_count = (
db.session.query(Document)
.where(
Document.completed_at.isnot(None),
Document.enabled == True,
Document.archived == False,
Document.tenant_id == current_user.current_tenant_id,
db.session.scalar(
select(func.count(Document.id)).where(
Document.completed_at.isnot(None),
Document.enabled == True,
Document.archived == False,
Document.tenant_id == current_user.current_tenant_id,
)
)
.count()
or 0
)
return documents_count
@@ -2575,10 +2592,10 @@ class DocumentService:
raise ValueError("No file info list found.")
upload_file_list = document_data.data_source.info_list.file_info_list.file_ids
for file_id in upload_file_list:
file = (
db.session.query(UploadFile)
file = db.session.scalar(
select(UploadFile)
.where(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
.first()
.limit(1)
)
# raise error if file not found
@@ -2595,8 +2612,8 @@ class DocumentService:
notion_info_list = document_data.data_source.info_list.notion_info_list
for notion_info in notion_info_list:
workspace_id = notion_info.workspace_id
data_source_binding = (
db.session.query(DataSourceOauthBinding)
data_source_binding = db.session.scalar(
select(DataSourceOauthBinding)
.where(
sa.and_(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
@@ -2605,7 +2622,7 @@ class DocumentService:
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
)
)
.first()
.limit(1)
)
if not data_source_binding:
raise ValueError("Data source binding not found.")
@@ -2650,8 +2667,10 @@ class DocumentService:
db.session.commit()
# update document segment
db.session.query(DocumentSegment).filter_by(document_id=document.id).update(
{DocumentSegment.status: SegmentStatus.RE_SEGMENT}
db.session.execute(
update(DocumentSegment)
.where(DocumentSegment.document_id == document.id)
.values(status=SegmentStatus.RE_SEGMENT)
)
db.session.commit()
# trigger async task
@@ -3143,10 +3162,8 @@ class SegmentService:
lock_name = f"add_segment_lock_document_id_{document.id}"
try:
with redis_client.lock(lock_name, timeout=600):
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
max_position = db.session.scalar(
select(func.max(DocumentSegment.position)).where(DocumentSegment.document_id == document.id)
)
segment_document = DocumentSegment(
tenant_id=current_user.current_tenant_id,
@@ -3198,7 +3215,7 @@ class SegmentService:
segment_document.status = SegmentStatus.ERROR
segment_document.error = str(e)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
segment = db.session.get(DocumentSegment, segment_document.id)
return segment
except LockNotOwnedError:
pass
@@ -3221,10 +3238,8 @@ class SegmentService:
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
)
max_position = (
db.session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document.id)
.scalar()
max_position = db.session.scalar(
select(func.max(DocumentSegment.position)).where(DocumentSegment.document_id == document.id)
)
pre_segment_data_list = []
segment_data_list = []
@@ -3369,11 +3384,7 @@ class SegmentService:
else:
raise ValueError("The knowledge base index technique is not high quality!")
# get the process rule
processing_rule = (
db.session.query(DatasetProcessRule)
.where(DatasetProcessRule.id == document.dataset_process_rule_id)
.first()
)
processing_rule = db.session.get(DatasetProcessRule, document.dataset_process_rule_id)
if processing_rule:
VectorService.generate_child_chunks(
segment, document, dataset, embedding_model_instance, processing_rule, True
@@ -3391,13 +3402,13 @@ class SegmentService:
# Query existing summary from database
from models.dataset import DocumentSegmentSummary
existing_summary = (
db.session.query(DocumentSegmentSummary)
existing_summary = db.session.scalar(
select(DocumentSegmentSummary)
.where(
DocumentSegmentSummary.chunk_id == segment.id,
DocumentSegmentSummary.dataset_id == dataset.id,
)
.first()
.limit(1)
)
# Check if summary has changed
@@ -3473,11 +3484,7 @@ class SegmentService:
else:
raise ValueError("The knowledge base index technique is not high quality!")
# get the process rule
processing_rule = (
db.session.query(DatasetProcessRule)
.where(DatasetProcessRule.id == document.dataset_process_rule_id)
.first()
)
processing_rule = db.session.get(DatasetProcessRule, document.dataset_process_rule_id)
if processing_rule:
VectorService.generate_child_chunks(
segment, document, dataset, embedding_model_instance, processing_rule, True
@@ -3489,13 +3496,13 @@ class SegmentService:
if dataset.indexing_technique == IndexTechniqueType.HIGH_QUALITY:
from models.dataset import DocumentSegmentSummary
existing_summary = (
db.session.query(DocumentSegmentSummary)
existing_summary = db.session.scalar(
select(DocumentSegmentSummary)
.where(
DocumentSegmentSummary.chunk_id == segment.id,
DocumentSegmentSummary.dataset_id == dataset.id,
)
.first()
.limit(1)
)
if args.summary is None:
@@ -3561,7 +3568,7 @@ class SegmentService:
segment.status = SegmentStatus.ERROR
segment.error = str(e)
db.session.commit()
new_segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment.id).first()
new_segment = db.session.get(DocumentSegment, segment.id)
if not new_segment:
raise ValueError("new_segment is not found")
return new_segment
@@ -3581,15 +3588,14 @@ class SegmentService:
# Get child chunk IDs before parent segment is deleted
child_node_ids = []
if segment.index_node_id:
child_chunks = (
db.session.query(ChildChunk.index_node_id)
.where(
ChildChunk.segment_id == segment.id,
ChildChunk.dataset_id == dataset.id,
)
.all()
child_node_ids = list(
db.session.scalars(
select(ChildChunk.index_node_id).where(
ChildChunk.segment_id == segment.id,
ChildChunk.dataset_id == dataset.id,
)
).all()
)
child_node_ids = [chunk[0] for chunk in child_chunks if chunk[0]]
delete_segment_from_index_task.delay(
[segment.index_node_id], dataset.id, document.id, [segment.id], child_node_ids
@@ -3608,17 +3614,14 @@ class SegmentService:
# Check if segment_ids is not empty to avoid WHERE false condition
if not segment_ids or len(segment_ids) == 0:
return
segments_info = (
db.session.query(DocumentSegment)
.with_entities(DocumentSegment.index_node_id, DocumentSegment.id, DocumentSegment.word_count)
.where(
segments_info = db.session.execute(
select(DocumentSegment.index_node_id, DocumentSegment.id, DocumentSegment.word_count).where(
DocumentSegment.id.in_(segment_ids),
DocumentSegment.dataset_id == dataset.id,
DocumentSegment.document_id == document.id,
DocumentSegment.tenant_id == current_user.current_tenant_id,
)
.all()
)
).all()
if not segments_info:
return
@@ -3630,15 +3633,16 @@ class SegmentService:
# Get child chunk IDs before parent segments are deleted
child_node_ids = []
if index_node_ids:
child_chunks = (
db.session.query(ChildChunk.index_node_id)
.where(
ChildChunk.segment_id.in_(segment_db_ids),
ChildChunk.dataset_id == dataset.id,
)
.all()
)
child_node_ids = [chunk[0] for chunk in child_chunks if chunk[0]]
child_node_ids = [
nid
for nid in db.session.scalars(
select(ChildChunk.index_node_id).where(
ChildChunk.segment_id.in_(segment_db_ids),
ChildChunk.dataset_id == dataset.id,
)
).all()
if nid
]
# Start async cleanup with both parent and child node IDs
if index_node_ids or child_node_ids:
@@ -3654,7 +3658,7 @@ class SegmentService:
db.session.add(document)
# Delete database records
db.session.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)).delete()
db.session.execute(delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)))
db.session.commit()
@classmethod
@@ -3728,15 +3732,13 @@ class SegmentService:
with redis_client.lock(lock_name, timeout=20):
index_node_id = str(uuid.uuid4())
index_node_hash = helper.generate_text_hash(content)
max_position = (
db.session.query(func.max(ChildChunk.position))
.where(
max_position = db.session.scalar(
select(func.max(ChildChunk.position)).where(
ChildChunk.tenant_id == current_user.current_tenant_id,
ChildChunk.dataset_id == dataset.id,
ChildChunk.document_id == document.id,
ChildChunk.segment_id == segment.id,
)
.scalar()
)
child_chunk = ChildChunk(
tenant_id=current_user.current_tenant_id,
@@ -3896,10 +3898,8 @@ class SegmentService:
@classmethod
def get_child_chunk_by_id(cls, child_chunk_id: str, tenant_id: str) -> ChildChunk | None:
"""Get a child chunk by its ID."""
result = (
db.session.query(ChildChunk)
.where(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id)
.first()
result = db.session.scalar(
select(ChildChunk).where(ChildChunk.id == child_chunk_id, ChildChunk.tenant_id == tenant_id).limit(1)
)
return result if isinstance(result, ChildChunk) else None
@@ -3934,10 +3934,10 @@ class SegmentService:
@classmethod
def get_segment_by_id(cls, segment_id: str, tenant_id: str) -> DocumentSegment | None:
"""Get a segment by its ID."""
result = (
db.session.query(DocumentSegment)
result = db.session.scalar(
select(DocumentSegment)
.where(DocumentSegment.id == segment_id, DocumentSegment.tenant_id == tenant_id)
.first()
.limit(1)
)
return result if isinstance(result, DocumentSegment) else None
@@ -3980,15 +3980,15 @@ class DatasetCollectionBindingService:
def get_dataset_collection_binding(
cls, provider_name: str, model_name: str, collection_type: str = "dataset"
) -> DatasetCollectionBinding:
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
dataset_collection_binding = db.session.scalar(
select(DatasetCollectionBinding)
.where(
DatasetCollectionBinding.provider_name == provider_name,
DatasetCollectionBinding.model_name == model_name,
DatasetCollectionBinding.type == collection_type,
)
.order_by(DatasetCollectionBinding.created_at)
.first()
.limit(1)
)
if not dataset_collection_binding:
@@ -4006,13 +4006,13 @@ class DatasetCollectionBindingService:
def get_dataset_collection_binding_by_id_and_type(
cls, collection_binding_id: str, collection_type: str = "dataset"
) -> DatasetCollectionBinding:
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
dataset_collection_binding = db.session.scalar(
select(DatasetCollectionBinding)
.where(
DatasetCollectionBinding.id == collection_binding_id, DatasetCollectionBinding.type == collection_type
)
.order_by(DatasetCollectionBinding.created_at)
.first()
.limit(1)
)
if not dataset_collection_binding:
raise ValueError("Dataset collection binding not found")
@@ -4034,7 +4034,7 @@ class DatasetPermissionService:
@classmethod
def update_partial_member_list(cls, tenant_id, dataset_id, user_list):
try:
db.session.query(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id).delete()
db.session.execute(delete(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id))
permissions = []
for user in user_list:
permission = DatasetPermission(
@@ -4070,7 +4070,7 @@ class DatasetPermissionService:
@classmethod
def clear_partial_member_list(cls, dataset_id):
try:
db.session.query(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id).delete()
db.session.execute(delete(DatasetPermission).where(DatasetPermission.dataset_id == dataset_id))
db.session.commit()
except Exception as e:
db.session.rollback()

View File

@@ -4,6 +4,7 @@ from collections.abc import Mapping
from typing import Any
from graphon.model_runtime.entities.provider_entities import FormType
from sqlalchemy import func, select
from sqlalchemy.orm import Session
from configs import dify_config
@@ -367,16 +368,16 @@ class DatasourceProviderService:
check if tenant oauth params is enabled
"""
return (
db.session.query(DatasourceOauthTenantParamConfig)
.filter_by(
tenant_id=tenant_id,
provider=datasource_provider_id.provider_name,
plugin_id=datasource_provider_id.plugin_id,
enabled=True,
db.session.scalar(
select(func.count(DatasourceOauthTenantParamConfig.id)).where(
DatasourceOauthTenantParamConfig.tenant_id == tenant_id,
DatasourceOauthTenantParamConfig.provider == datasource_provider_id.provider_name,
DatasourceOauthTenantParamConfig.plugin_id == datasource_provider_id.plugin_id,
DatasourceOauthTenantParamConfig.enabled == True,
)
)
.count()
> 0
)
or 0
) > 0
def get_tenant_oauth_client(
self, tenant_id: str, datasource_provider_id: DatasourceProviderID, mask: bool = False
@@ -384,14 +385,14 @@ class DatasourceProviderService:
"""
get tenant oauth client
"""
tenant_oauth_client_params = (
db.session.query(DatasourceOauthTenantParamConfig)
.filter_by(
tenant_id=tenant_id,
provider=datasource_provider_id.provider_name,
plugin_id=datasource_provider_id.plugin_id,
tenant_oauth_client_params = db.session.scalar(
select(DatasourceOauthTenantParamConfig)
.where(
DatasourceOauthTenantParamConfig.tenant_id == tenant_id,
DatasourceOauthTenantParamConfig.provider == datasource_provider_id.provider_name,
DatasourceOauthTenantParamConfig.plugin_id == datasource_provider_id.plugin_id,
)
.first()
.limit(1)
)
if tenant_oauth_client_params:
encrypter, _ = self.get_oauth_encrypter(tenant_id, datasource_provider_id)
@@ -707,24 +708,27 @@ class DatasourceProviderService:
:return:
"""
# Get all provider configurations of the current workspace
datasource_providers: list[DatasourceProvider] = (
db.session.query(DatasourceProvider)
datasource_providers: list[DatasourceProvider] = list(
db.session.scalars(
select(DatasourceProvider).where(
DatasourceProvider.tenant_id == tenant_id,
DatasourceProvider.provider == provider,
DatasourceProvider.plugin_id == plugin_id,
)
).all()
)
if not datasource_providers:
return []
copy_credentials_list = []
default_provider = db.session.execute(
select(DatasourceProvider.id)
.where(
DatasourceProvider.tenant_id == tenant_id,
DatasourceProvider.provider == provider,
DatasourceProvider.plugin_id == plugin_id,
)
.all()
)
if not datasource_providers:
return []
copy_credentials_list = []
default_provider = (
db.session.query(DatasourceProvider.id)
.filter_by(tenant_id=tenant_id, provider=provider, plugin_id=plugin_id)
.order_by(DatasourceProvider.is_default.desc(), DatasourceProvider.created_at.asc())
.first()
)
).first()
default_provider_id = default_provider.id if default_provider else None
for datasource_provider in datasource_providers:
encrypted_credentials = datasource_provider.encrypted_credentials
@@ -880,14 +884,14 @@ class DatasourceProviderService:
:return:
"""
# Get all provider configurations of the current workspace
datasource_providers: list[DatasourceProvider] = (
db.session.query(DatasourceProvider)
.where(
DatasourceProvider.tenant_id == tenant_id,
DatasourceProvider.provider == provider,
DatasourceProvider.plugin_id == plugin_id,
)
.all()
datasource_providers: list[DatasourceProvider] = list(
db.session.scalars(
select(DatasourceProvider).where(
DatasourceProvider.tenant_id == tenant_id,
DatasourceProvider.provider == provider,
DatasourceProvider.plugin_id == plugin_id,
)
).all()
)
if not datasource_providers:
return []
@@ -987,10 +991,15 @@ class DatasourceProviderService:
:param plugin_id: plugin id
:return:
"""
datasource_provider = (
db.session.query(DatasourceProvider)
.filter_by(tenant_id=tenant_id, id=auth_id, provider=provider, plugin_id=plugin_id)
.first()
datasource_provider = db.session.scalar(
select(DatasourceProvider)
.where(
DatasourceProvider.tenant_id == tenant_id,
DatasourceProvider.id == auth_id,
DatasourceProvider.provider == provider,
DatasourceProvider.plugin_id == plugin_id,
)
.limit(1)
)
if datasource_provider:
db.session.delete(datasource_provider)

View File

@@ -1,7 +1,7 @@
import logging
from collections.abc import Mapping
from sqlalchemy import case
from sqlalchemy import case, select
from sqlalchemy.orm import Session
from core.app.entities.app_invoke_entities import InvokeFrom
@@ -25,14 +25,14 @@ class EndUserService:
"""
with Session(db.engine, expire_on_commit=False) as session:
return (
session.query(EndUser)
return session.scalar(
select(EndUser)
.where(
EndUser.id == end_user_id,
EndUser.tenant_id == tenant_id,
EndUser.app_id == app_id,
)
.first()
.limit(1)
)
@classmethod
@@ -57,8 +57,8 @@ class EndUserService:
with Session(db.engine, expire_on_commit=False) as session:
# Query with ORDER BY to prioritize exact type matches while maintaining backward compatibility
# This single query approach is more efficient than separate queries
end_user = (
session.query(EndUser)
end_user = session.scalar(
select(EndUser)
.where(
EndUser.tenant_id == tenant_id,
EndUser.app_id == app_id,
@@ -68,7 +68,7 @@ class EndUserService:
# Prioritize records with matching type (0 = match, 1 = no match)
case((EndUser.type == type, 0), else_=1)
)
.first()
.limit(1)
)
if end_user:
@@ -137,15 +137,15 @@ class EndUserService:
with Session(db.engine, expire_on_commit=False) as session:
# Fetch existing end users for all target apps in a single query
existing_end_users: list[EndUser] = (
session.query(EndUser)
.where(
EndUser.tenant_id == tenant_id,
EndUser.app_id.in_(unique_app_ids),
EndUser.session_id == user_id,
EndUser.type == type,
)
.all()
existing_end_users: list[EndUser] = list(
session.scalars(
select(EndUser).where(
EndUser.tenant_id == tenant_id,
EndUser.app_id.in_(unique_app_ids),
EndUser.session_id == user_id,
EndUser.type == type,
)
).all()
)
found_app_ids: set[str] = set()

View File

@@ -44,7 +44,7 @@ class HitTestingService:
dataset: Dataset,
query: str,
account: Account,
retrieval_model: Any, # FIXME drop this any
retrieval_model: dict | None,
external_retrieval_model: dict,
attachment_ids: list | None = None,
limit: int = 10,
@@ -54,6 +54,7 @@ class HitTestingService:
# get retrieval model , if the model is not setting , using default
if not retrieval_model:
retrieval_model = dataset.retrieval_model or default_retrieval_model
assert isinstance(retrieval_model, dict)
document_ids_filter = None
metadata_filtering_conditions = retrieval_model.get("metadata_filtering_conditions", {})
if metadata_filtering_conditions and query:

View File

@@ -5,7 +5,7 @@ import time
from collections.abc import Mapping, Sequence
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Any, TypedDict
from typing import TypedDict
from uuid import uuid4
import click
@@ -42,6 +42,16 @@ class _TenantPluginRecord(TypedDict):
_tenant_plugin_adapter: TypeAdapter[_TenantPluginRecord] = TypeAdapter(_TenantPluginRecord)
class ExtractedPluginsDict(TypedDict):
plugins: dict[str, str]
plugin_not_exist: list[str]
class PluginInstallResultDict(TypedDict):
success: list[str]
failed: list[str]
class PluginMigration:
@classmethod
def extract_plugins(cls, filepath: str, workers: int):
@@ -310,7 +320,7 @@ class PluginMigration:
Path(output_file).write_text(json.dumps(cls.extract_unique_plugins(extracted_plugins)))
@classmethod
def extract_unique_plugins(cls, extracted_plugins: str) -> Mapping[str, Any]:
def extract_unique_plugins(cls, extracted_plugins: str) -> ExtractedPluginsDict:
plugins: dict[str, str] = {}
plugin_ids = []
plugin_not_exist = []
@@ -524,7 +534,7 @@ class PluginMigration:
@classmethod
def handle_plugin_instance_install(
cls, tenant_id: str, plugin_identifiers_map: Mapping[str, str]
) -> Mapping[str, Any]:
) -> PluginInstallResultDict:
"""
Install plugins for a tenant.
"""

View File

@@ -1,6 +1,7 @@
from collections.abc import Mapping, Sequence
from typing import Any, Literal
from sqlalchemy import select
from sqlalchemy.orm import Session
from core.plugin.entities.parameters import PluginParameterOption
@@ -56,24 +57,24 @@ class PluginParameterService:
# fetch credentials from db
with Session(db.engine) as session:
if credential_id:
db_record = (
session.query(BuiltinToolProvider)
db_record = session.scalar(
select(BuiltinToolProvider)
.where(
BuiltinToolProvider.tenant_id == tenant_id,
BuiltinToolProvider.provider == provider,
BuiltinToolProvider.id == credential_id,
)
.first()
.limit(1)
)
else:
db_record = (
session.query(BuiltinToolProvider)
db_record = session.scalar(
select(BuiltinToolProvider)
.where(
BuiltinToolProvider.tenant_id == tenant_id,
BuiltinToolProvider.provider == provider,
)
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
.first()
.limit(1)
)
if db_record is None:

View File

@@ -3,7 +3,7 @@ import logging
import random
import time
from collections.abc import Sequence
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING, TypedDict, cast
import sqlalchemy as sa
from sqlalchemy import delete, select, tuple_
@@ -158,6 +158,13 @@ class MessagesCleanupMetrics:
self._record(self._job_duration_seconds, job_duration_seconds, attributes)
class MessagesCleanStatsDict(TypedDict):
batches: int
total_messages: int
filtered_messages: int
total_deleted: int
class MessagesCleanService:
"""
Service for cleaning expired messages based on retention policies.
@@ -299,7 +306,7 @@ class MessagesCleanService:
task_label=task_label,
)
def run(self) -> dict[str, int]:
def run(self) -> MessagesCleanStatsDict:
"""
Execute the message cleanup operation.
@@ -319,7 +326,7 @@ class MessagesCleanService:
job_duration_seconds=time.monotonic() - run_start,
)
def _clean_messages_by_time_range(self) -> dict[str, int]:
def _clean_messages_by_time_range(self) -> MessagesCleanStatsDict:
"""
Clean messages within a time range using cursor-based pagination.
@@ -334,7 +341,7 @@ class MessagesCleanService:
Returns:
Dict with statistics: batches, filtered_messages, total_deleted
"""
stats = {
stats: MessagesCleanStatsDict = {
"batches": 0,
"total_messages": 0,
"filtered_messages": 0,

View File

@@ -24,7 +24,7 @@ import zipfile
from collections.abc import Sequence
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass, field
from typing import Any
from typing import Any, TypedDict
import click
from graphon.enums import WorkflowType
@@ -49,6 +49,23 @@ from services.retention.workflow_run.constants import ARCHIVE_BUNDLE_NAME, ARCHI
logger = logging.getLogger(__name__)
class TableStatsManifestEntry(TypedDict):
row_count: int
checksum: str
size_bytes: int
class ArchiveManifestDict(TypedDict):
schema_version: str
workflow_run_id: str
tenant_id: str
app_id: str
workflow_id: str
created_at: str
archived_at: str
tables: dict[str, TableStatsManifestEntry]
@dataclass
class TableStats:
"""Statistics for a single archived table."""
@@ -472,25 +489,26 @@ class WorkflowRunArchiver:
self,
run: WorkflowRun,
table_stats: list[TableStats],
) -> dict[str, Any]:
) -> ArchiveManifestDict:
"""Generate a manifest for the archived workflow run."""
return {
"schema_version": ARCHIVE_SCHEMA_VERSION,
"workflow_run_id": run.id,
"tenant_id": run.tenant_id,
"app_id": run.app_id,
"workflow_id": run.workflow_id,
"created_at": run.created_at.isoformat(),
"archived_at": datetime.datetime.now(datetime.UTC).isoformat(),
"tables": {
stat.table_name: {
"row_count": stat.row_count,
"checksum": stat.checksum,
"size_bytes": stat.size_bytes,
}
for stat in table_stats
},
tables: dict[str, TableStatsManifestEntry] = {
stat.table_name: {
"row_count": stat.row_count,
"checksum": stat.checksum,
"size_bytes": stat.size_bytes,
}
for stat in table_stats
}
return ArchiveManifestDict(
schema_version=ARCHIVE_SCHEMA_VERSION,
workflow_run_id=run.id,
tenant_id=run.tenant_id,
app_id=run.app_id,
workflow_id=run.workflow_id,
created_at=run.created_at.isoformat(),
archived_at=datetime.datetime.now(datetime.UTC).isoformat(),
tables=tables,
)
def _build_archive_bundle(self, manifest_data: bytes, table_payloads: dict[str, bytes]) -> bytes:
buffer = io.BytesIO()

View File

@@ -3,7 +3,7 @@ import logging
import random
import time
from collections.abc import Iterable, Sequence
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, TypedDict
import click
from sqlalchemy.orm import Session, sessionmaker
@@ -12,7 +12,7 @@ from configs import dify_config
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from models.workflow import WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.api_workflow_run_repository import APIWorkflowRunRepository, RunsWithRelatedCountsDict
from repositories.factory import DifyAPIRepositoryFactory
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
from services.billing_service import BillingService, SubscriptionPlan
@@ -24,6 +24,15 @@ if TYPE_CHECKING:
from opentelemetry.metrics import Counter, Histogram
class RelatedCountsDict(TypedDict):
node_executions: int
offloads: int
app_logs: int
trigger_logs: int
pauses: int
pause_reasons: int
class WorkflowRunCleanupMetrics:
"""
Records low-cardinality OpenTelemetry metrics for workflow run cleanup jobs.
@@ -173,6 +182,9 @@ class WorkflowRunCleanupMetrics:
self._record(self._job_duration_seconds, job_duration_seconds, attributes)
_RELATED_RECORD_KEYS = ("node_executions", "offloads", "app_logs", "trigger_logs", "pauses", "pause_reasons")
class WorkflowRunCleanup:
def __init__(
self,
@@ -230,7 +242,7 @@ class WorkflowRunCleanup:
total_runs_deleted = 0
total_runs_targeted = 0
related_totals = self._empty_related_counts() if self.dry_run else None
related_totals: RelatedCountsDict | None = self._empty_related_counts() if self.dry_run else None
batch_index = 0
last_seen: tuple[datetime.datetime, str] | None = None
status = "success"
@@ -312,8 +324,7 @@ class WorkflowRunCleanup:
int((time.monotonic() - count_start) * 1000),
)
if related_totals is not None:
for key in related_totals:
related_totals[key] += batch_counts.get(key, 0)
self._accumulate_related_counts(related_totals, batch_counts)
sample_ids = ", ".join(run.id for run in free_runs[:5])
click.echo(
click.style(
@@ -332,7 +343,10 @@ class WorkflowRunCleanup:
targeted_runs=len(free_runs),
skipped_runs=paid_or_skipped,
deleted_runs=0,
related_counts={key: batch_counts.get(key, 0) for key in self._empty_related_counts()},
related_counts={
k: batch_counts[k] # type: ignore[literal-required]
for k in _RELATED_RECORD_KEYS
},
related_action="would_delete",
batch_duration_seconds=time.monotonic() - batch_start,
)
@@ -372,7 +386,10 @@ class WorkflowRunCleanup:
targeted_runs=len(free_runs),
skipped_runs=paid_or_skipped,
deleted_runs=counts["runs"],
related_counts={key: counts.get(key, 0) for key in self._empty_related_counts()},
related_counts={
k: counts[k] # type: ignore[literal-required]
for k in _RELATED_RECORD_KEYS
},
related_action="deleted",
batch_duration_seconds=time.monotonic() - batch_start,
)
@@ -506,7 +523,7 @@ class WorkflowRunCleanup:
return trigger_repo.count_by_run_ids(run_ids)
@staticmethod
def _empty_related_counts() -> dict[str, int]:
def _empty_related_counts() -> RelatedCountsDict:
return {
"node_executions": 0,
"offloads": 0,
@@ -517,7 +534,7 @@ class WorkflowRunCleanup:
}
@staticmethod
def _format_related_counts(counts: dict[str, int]) -> str:
def _format_related_counts(counts: RelatedCountsDict) -> str:
return (
f"node_executions {counts['node_executions']}, "
f"offloads {counts['offloads']}, "
@@ -527,6 +544,15 @@ class WorkflowRunCleanup:
f"pause_reasons {counts['pause_reasons']}"
)
@staticmethod
def _accumulate_related_counts(totals: RelatedCountsDict, batch: RunsWithRelatedCountsDict) -> None:
totals["node_executions"] += batch.get("node_executions", 0)
totals["offloads"] += batch.get("offloads", 0)
totals["app_logs"] += batch.get("app_logs", 0)
totals["trigger_logs"] += batch.get("trigger_logs", 0)
totals["pauses"] += batch.get("pauses", 0)
totals["pause_reasons"] += batch.get("pause_reasons", 0)
def _count_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
run_ids = [run.id for run in runs]
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(

View File

@@ -14,7 +14,7 @@ from sqlalchemy.orm import Session, sessionmaker
from extensions.ext_database import db
from models.workflow import WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.api_workflow_run_repository import APIWorkflowRunRepository, RunsWithRelatedCountsDict
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
@@ -23,7 +23,17 @@ class DeleteResult:
run_id: str
tenant_id: str
success: bool
deleted_counts: dict[str, int] = field(default_factory=dict)
deleted_counts: RunsWithRelatedCountsDict = field(
default_factory=lambda: { # type: ignore[assignment]
"runs": 0,
"node_executions": 0,
"offloads": 0,
"app_logs": 0,
"trigger_logs": 0,
"pauses": 0,
"pause_reasons": 0,
}
)
error: str | None = None
elapsed_time: float = 0.0

View File

@@ -4,7 +4,7 @@ import logging
import time
import uuid
from datetime import UTC, datetime
from typing import Any
from typing import TypedDict, cast
from graphon.model_runtime.entities.llm_entities import LLMUsage
from graphon.model_runtime.entities.model_entities import ModelType
@@ -25,6 +25,22 @@ from models.enums import SummaryStatus
logger = logging.getLogger(__name__)
class SummaryEntryDict(TypedDict):
segment_id: str
segment_position: int
status: str
summary_preview: str | None
error: str | None
created_at: int | None
updated_at: int | None
class DocumentSummaryStatusDetailDict(TypedDict):
total_segments: int
summary_status: dict[str, int]
summaries: list[SummaryEntryDict]
class SummaryIndexService:
"""Service for generating and managing summary indexes."""
@@ -1352,7 +1368,7 @@ class SummaryIndexService:
def get_document_summary_status_detail(
document_id: str,
dataset_id: str,
) -> dict[str, Any]:
) -> DocumentSummaryStatusDetailDict:
"""
Get detailed summary status for a document.
@@ -1403,7 +1419,7 @@ class SummaryIndexService:
SummaryStatus.NOT_STARTED: 0,
}
summary_list = []
summary_list: list[SummaryEntryDict] = []
for segment in segments:
summary = summary_map.get(segment.id)
if summary:
@@ -1438,8 +1454,8 @@ class SummaryIndexService:
}
)
return {
"total_segments": total_segments,
"summary_status": status_counts,
"summaries": summary_list,
}
return DocumentSummaryStatusDetailDict(
total_segments=total_segments,
summary_status=cast(dict[str, int], status_counts),
summaries=summary_list,
)

View File

@@ -2,6 +2,7 @@ import uuid
import sqlalchemy as sa
from flask_login import current_user
from pydantic import BaseModel, Field
from sqlalchemy import func, select
from werkzeug.exceptions import NotFound
@@ -11,6 +12,28 @@ from models.enums import TagType
from models.model import App, Tag, TagBinding
class SaveTagPayload(BaseModel):
name: str = Field(min_length=1, max_length=50)
type: TagType
class UpdateTagPayload(BaseModel):
name: str = Field(min_length=1, max_length=50)
type: TagType
class TagBindingCreatePayload(BaseModel):
tag_ids: list[str]
target_id: str
type: TagType
class TagBindingDeletePayload(BaseModel):
tag_id: str
target_id: str
type: TagType
class TagService:
@staticmethod
def get_tags(tag_type: str, current_tenant_id: str, keyword: str | None = None):
@@ -78,12 +101,12 @@ class TagService:
return tags or []
@staticmethod
def save_tags(args: dict) -> Tag:
if TagService.get_tag_by_tag_name(args["type"], current_user.current_tenant_id, args["name"]):
def save_tags(payload: SaveTagPayload) -> Tag:
if TagService.get_tag_by_tag_name(payload.type, current_user.current_tenant_id, payload.name):
raise ValueError("Tag name already exists")
tag = Tag(
name=args["name"],
type=TagType(args["type"]),
name=payload.name,
type=TagType(payload.type),
created_by=current_user.id,
tenant_id=current_user.current_tenant_id,
)
@@ -93,13 +116,24 @@ class TagService:
return tag
@staticmethod
def update_tags(args: dict, tag_id: str) -> Tag:
if TagService.get_tag_by_tag_name(args.get("type", ""), current_user.current_tenant_id, args.get("name", "")):
raise ValueError("Tag name already exists")
def update_tags(payload: UpdateTagPayload, tag_id: str) -> Tag:
tag = db.session.scalar(select(Tag).where(Tag.id == tag_id).limit(1))
if not tag:
raise NotFound("Tag not found")
tag.name = args["name"]
if payload.name != tag.name:
existing = db.session.scalar(
select(Tag)
.where(
Tag.name == payload.name,
Tag.tenant_id == current_user.current_tenant_id,
Tag.type == tag.type,
Tag.id != tag_id,
)
.limit(1)
)
if existing:
raise ValueError("Tag name already exists")
tag.name = payload.name
db.session.commit()
return tag
@@ -122,21 +156,19 @@ class TagService:
db.session.commit()
@staticmethod
def save_tag_binding(args):
# check if target exists
TagService.check_target_exists(args["type"], args["target_id"])
# save tag binding
for tag_id in args["tag_ids"]:
def save_tag_binding(payload: TagBindingCreatePayload):
TagService.check_target_exists(payload.type, payload.target_id)
for tag_id in payload.tag_ids:
tag_binding = db.session.scalar(
select(TagBinding)
.where(TagBinding.tag_id == tag_id, TagBinding.target_id == args["target_id"])
.where(TagBinding.tag_id == tag_id, TagBinding.target_id == payload.target_id)
.limit(1)
)
if tag_binding:
continue
new_tag_binding = TagBinding(
tag_id=tag_id,
target_id=args["target_id"],
target_id=payload.target_id,
tenant_id=current_user.current_tenant_id,
created_by=current_user.id,
)
@@ -144,17 +176,15 @@ class TagService:
db.session.commit()
@staticmethod
def delete_tag_binding(args):
# check if target exists
TagService.check_target_exists(args["type"], args["target_id"])
# delete tag binding
tag_bindings = db.session.scalar(
def delete_tag_binding(payload: TagBindingDeletePayload):
TagService.check_target_exists(payload.type, payload.target_id)
tag_binding = db.session.scalar(
select(TagBinding)
.where(TagBinding.target_id == args["target_id"], TagBinding.tag_id == args["tag_id"])
.where(TagBinding.target_id == payload.target_id, TagBinding.tag_id == payload.tag_id)
.limit(1)
)
if tag_bindings:
db.session.delete(tag_bindings)
if tag_binding:
db.session.delete(tag_binding)
db.session.commit()
@staticmethod

View File

@@ -285,7 +285,7 @@ class MCPToolManageService:
# Batch query all users to avoid N+1 problem
user_ids = {provider.user_id for provider in mcp_providers}
users = self._session.query(Account).where(Account.id.in_(user_ids)).all()
users = self._session.scalars(select(Account).where(Account.id.in_(user_ids))).all()
user_name_map = {user.id: user.name for user in users}
return [

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
import datetime
import json
from dataclasses import dataclass
from typing import Any
from typing import Any, NotRequired, TypedDict, cast
import httpx
from flask_login import current_user
@@ -126,6 +126,15 @@ class WebsiteCrawlStatusApiRequest:
return cls(provider=provider, job_id=job_id)
class CrawlStatusDict(TypedDict):
status: str
job_id: str
total: int
current: int
data: list[Any]
time_consuming: NotRequired[str | float]
class WebsiteService:
"""Service class for website crawling operations using different providers."""
@@ -261,13 +270,13 @@ class WebsiteService:
return {"status": "active", "job_id": response.json().get("data", {}).get("taskId")}
@classmethod
def get_crawl_status(cls, job_id: str, provider: str) -> dict[str, Any]:
def get_crawl_status(cls, job_id: str, provider: str) -> CrawlStatusDict:
"""Get crawl status using string parameters."""
api_request = WebsiteCrawlStatusApiRequest(provider=provider, job_id=job_id)
return cls.get_crawl_status_typed(api_request)
@classmethod
def get_crawl_status_typed(cls, api_request: WebsiteCrawlStatusApiRequest) -> dict[str, Any]:
def get_crawl_status_typed(cls, api_request: WebsiteCrawlStatusApiRequest) -> CrawlStatusDict:
"""Get crawl status using typed request."""
api_key, config = cls._get_credentials_and_config(current_user.current_tenant_id, api_request.provider)
@@ -281,10 +290,10 @@ class WebsiteService:
raise ValueError("Invalid provider")
@classmethod
def _get_firecrawl_status(cls, job_id: str, api_key: str, config: dict) -> dict[str, Any]:
def _get_firecrawl_status(cls, job_id: str, api_key: str, config: dict) -> CrawlStatusDict:
firecrawl_app = FirecrawlApp(api_key=api_key, base_url=config.get("base_url"))
result: CrawlStatusResponse = firecrawl_app.check_crawl_status(job_id)
crawl_status_data: dict[str, Any] = {
crawl_status_data: CrawlStatusDict = {
"status": result["status"],
"job_id": job_id,
"total": result["total"] or 0,
@@ -302,18 +311,18 @@ class WebsiteService:
return crawl_status_data
@classmethod
def _get_watercrawl_status(cls, job_id: str, api_key: str, config: dict[str, Any]) -> dict[str, Any]:
return dict(WaterCrawlProvider(api_key, config.get("base_url")).get_crawl_status(job_id))
def _get_watercrawl_status(cls, job_id: str, api_key: str, config: dict[str, Any]) -> CrawlStatusDict:
return cast(CrawlStatusDict, dict(WaterCrawlProvider(api_key, config.get("base_url")).get_crawl_status(job_id)))
@classmethod
def _get_jinareader_status(cls, job_id: str, api_key: str) -> dict[str, Any]:
def _get_jinareader_status(cls, job_id: str, api_key: str) -> CrawlStatusDict:
response = _adaptive_http_client.post(
"https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app",
headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"},
json={"taskId": job_id},
)
data = response.json().get("data", {})
crawl_status_data = {
crawl_status_data: CrawlStatusDict = {
"status": data.get("status", "active"),
"job_id": job_id,
"total": len(data.get("urls", [])),

View File

@@ -170,34 +170,38 @@ class WorkflowConverter:
graph = self._append_node(graph, llm_node)
if new_app_mode == AppMode.WORKFLOW:
# convert to end node by app mode
end_node = self._convert_to_end_node()
graph = self._append_node(graph, end_node)
else:
answer_node = self._convert_to_answer_node()
graph = self._append_node(graph, answer_node)
app_model_config_dict = app_config.app_model_config_dict
# features
if new_app_mode == AppMode.ADVANCED_CHAT:
features = {
"opening_statement": app_model_config_dict.get("opening_statement"),
"suggested_questions": app_model_config_dict.get("suggested_questions"),
"suggested_questions_after_answer": app_model_config_dict.get("suggested_questions_after_answer"),
"speech_to_text": app_model_config_dict.get("speech_to_text"),
"text_to_speech": app_model_config_dict.get("text_to_speech"),
"file_upload": app_model_config_dict.get("file_upload"),
"sensitive_word_avoidance": app_model_config_dict.get("sensitive_word_avoidance"),
"retriever_resource": app_model_config_dict.get("retriever_resource"),
}
else:
features = {
"text_to_speech": app_model_config_dict.get("text_to_speech"),
"file_upload": app_model_config_dict.get("file_upload"),
"sensitive_word_avoidance": app_model_config_dict.get("sensitive_word_avoidance"),
}
match new_app_mode:
case AppMode.WORKFLOW:
end_node = self._convert_to_end_node()
graph = self._append_node(graph, end_node)
features = {
"text_to_speech": app_model_config_dict.get("text_to_speech"),
"file_upload": app_model_config_dict.get("file_upload"),
"sensitive_word_avoidance": app_model_config_dict.get("sensitive_word_avoidance"),
}
case AppMode.ADVANCED_CHAT:
answer_node = self._convert_to_answer_node()
graph = self._append_node(graph, answer_node)
features = {
"opening_statement": app_model_config_dict.get("opening_statement"),
"suggested_questions": app_model_config_dict.get("suggested_questions"),
"suggested_questions_after_answer": app_model_config_dict.get("suggested_questions_after_answer"),
"speech_to_text": app_model_config_dict.get("speech_to_text"),
"text_to_speech": app_model_config_dict.get("text_to_speech"),
"file_upload": app_model_config_dict.get("file_upload"),
"sensitive_word_avoidance": app_model_config_dict.get("sensitive_word_avoidance"),
"retriever_resource": app_model_config_dict.get("retriever_resource"),
}
case _:
answer_node = self._convert_to_answer_node()
graph = self._append_node(graph, answer_node)
features = {
"text_to_speech": app_model_config_dict.get("text_to_speech"),
"file_upload": app_model_config_dict.get("file_upload"),
"sensitive_word_avoidance": app_model_config_dict.get("sensitive_word_avoidance"),
}
# create workflow record
workflow = Workflow(
@@ -220,19 +224,23 @@ class WorkflowConverter:
def _convert_to_app_config(self, app_model: App, app_model_config: AppModelConfig) -> EasyUIBasedAppConfig:
app_mode_enum = AppMode.value_of(app_model.mode)
app_config: EasyUIBasedAppConfig
if app_mode_enum == AppMode.AGENT_CHAT or app_model.is_agent:
app_model.mode = AppMode.AGENT_CHAT
app_config = AgentChatAppConfigManager.get_app_config(
app_model=app_model, app_model_config=app_model_config
)
elif app_mode_enum == AppMode.CHAT:
app_config = ChatAppConfigManager.get_app_config(app_model=app_model, app_model_config=app_model_config)
elif app_mode_enum == AppMode.COMPLETION:
app_config = CompletionAppConfigManager.get_app_config(
app_model=app_model, app_model_config=app_model_config
)
else:
raise ValueError("Invalid app mode")
effective_mode = (
AppMode.AGENT_CHAT if app_model.is_agent and app_mode_enum != AppMode.AGENT_CHAT else app_mode_enum
)
match effective_mode:
case AppMode.AGENT_CHAT:
app_model.mode = AppMode.AGENT_CHAT
app_config = AgentChatAppConfigManager.get_app_config(
app_model=app_model, app_model_config=app_model_config
)
case AppMode.CHAT:
app_config = ChatAppConfigManager.get_app_config(app_model=app_model, app_model_config=app_model_config)
case AppMode.COMPLETION:
app_config = CompletionAppConfigManager.get_app_config(
app_model=app_model, app_model_config=app_model_config
)
case _:
raise ValueError("Invalid app mode")
return app_config

View File

@@ -1417,16 +1417,17 @@ class WorkflowService:
self._validate_human_input_node_data(node_data)
def validate_features_structure(self, app_model: App, features: dict):
if app_model.mode == AppMode.ADVANCED_CHAT:
return AdvancedChatAppConfigManager.config_validate(
tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
)
elif app_model.mode == AppMode.WORKFLOW:
return WorkflowAppConfigManager.config_validate(
tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
)
else:
raise ValueError(f"Invalid app mode: {app_model.mode}")
match app_model.mode:
case AppMode.ADVANCED_CHAT:
return AdvancedChatAppConfigManager.config_validate(
tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
)
case AppMode.WORKFLOW:
return WorkflowAppConfigManager.config_validate(
tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
)
case _:
raise ValueError(f"Invalid app mode: {app_model.mode}")
def _validate_human_input_node_data(self, node_data: dict) -> None:
"""

View File

@@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import select
from werkzeug.exceptions import NotFound
from core.db.session_factory import session_factory
@@ -35,7 +36,9 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
with session_factory.create_session() as session:
# get app info
app = session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
app = session.scalar(
select(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").limit(1)
)
if app:
try:
@@ -53,8 +56,8 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
)
documents.append(document)
# if annotation reply is enabled , batch add annotations' index
app_annotation_setting = (
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
app_annotation_setting = session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if app_annotation_setting:

View File

@@ -24,14 +24,16 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
start_at = time.perf_counter()
# get app info
with session_factory.create_session() as session:
app = session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
app = session.scalar(
select(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").limit(1)
)
annotations_exists = session.scalar(select(exists().where(MessageAnnotation.app_id == app_id)))
if not app:
logger.info(click.style(f"App not found: {app_id}", fg="red"))
return
app_annotation_setting = (
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
app_annotation_setting = session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if not app_annotation_setting:

View File

@@ -36,7 +36,9 @@ def enable_annotation_reply_task(
start_at = time.perf_counter()
# get app info
with session_factory.create_session() as session:
app = session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
app = session.scalar(
select(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").limit(1)
)
if not app:
logger.info(click.style(f"App not found: {app_id}", fg="red"))
@@ -51,8 +53,8 @@ def enable_annotation_reply_task(
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding(
embedding_provider_name, embedding_model_name, CollectionBindingType.ANNOTATION
)
annotation_setting = (
session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
annotation_setting = session.scalar(
select(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).limit(1)
)
if annotation_setting:
if dataset_collection_binding.id != annotation_setting.collection_binding_id:

View File

@@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import select
from core.db.session_factory import session_factory
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
@@ -24,7 +25,7 @@ def disable_segment_from_index_task(segment_id: str):
start_at = time.perf_counter()
with session_factory.create_session() as session:
segment = session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
segment = session.scalar(select(DocumentSegment).where(DocumentSegment.id == segment_id).limit(1))
if not segment:
logger.info(click.style(f"Segment not found: {segment_id}", fg="red"))
return

View File

@@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import select
from core.db.session_factory import session_factory
from core.rag.index_processor.constant.doc_type import DocType
@@ -29,7 +30,7 @@ def enable_segment_to_index_task(segment_id: str):
start_at = time.perf_counter()
with session_factory.create_session() as session:
segment = session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
segment = session.scalar(select(DocumentSegment).where(DocumentSegment.id == segment_id).limit(1))
if not segment:
logger.info(click.style(f"Segment not found: {segment_id}", fg="red"))
return

View File

@@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
@@ -24,7 +25,9 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
start_at = time.perf_counter()
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
document = session.scalar(
select(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).limit(1)
)
if not document:
logger.info(click.style(f"Document not found: {document_id}", fg="red"))

View File

@@ -29,7 +29,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
start_at = time.perf_counter()
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
if dataset is None:
raise ValueError("Dataset not found")
@@ -45,8 +45,8 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
"your subscription."
)
except Exception as e:
document = (
session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
document = session.scalar(
select(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).limit(1)
)
if document:
document.indexing_status = IndexingStatus.ERROR
@@ -58,7 +58,9 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
return
logger.info(click.style(f"Start sync website document: {document_id}", fg="green"))
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
document = session.scalar(
select(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).limit(1)
)
if not document:
logger.info(click.style(f"Document not found: {document_id}", fg="yellow"))
return

View File

@@ -4,6 +4,7 @@ from collections.abc import Mapping
from typing import Any
from celery import shared_task
from sqlalchemy import select
from sqlalchemy.orm import Session
from configs import dify_config
@@ -22,7 +23,11 @@ def _now_ts() -> int:
def _load_subscription(session: Session, tenant_id: str, subscription_id: str) -> TriggerSubscription | None:
return session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first()
return session.scalar(
select(TriggerSubscription)
.where(TriggerSubscription.tenant_id == tenant_id, TriggerSubscription.id == subscription_id)
.limit(1)
)
def _refresh_oauth_if_expired(tenant_id: str, subscription: TriggerSubscription, now: int) -> None:

View File

@@ -555,7 +555,7 @@ class TestWorkflowTriggerEndpoints:
trigger = MagicMock()
session = MagicMock()
session.query.return_value.where.return_value.first.return_value = trigger
session.scalar.return_value = trigger
class DummySessionCtx:
def __enter__(self):

View File

@@ -444,7 +444,7 @@ class TestMCPAppApi:
)
session = MagicMock()
session.query().where().first.side_effect = [server, app]
session.scalar.side_effect = [server, app]
result_server, result_app = api._get_mcp_server_and_app("server-1", session)

View File

@@ -970,8 +970,10 @@ class TestDatasetTagBindingApiPost:
result = api.post(_=None)
assert result == ("", 204)
from services.tag_service import TagBindingCreatePayload
mock_tag_svc.save_tag_binding.assert_called_once_with(
{"tag_ids": ["tag-1"], "target_id": "ds-1", "type": "knowledge"}
TagBindingCreatePayload(tag_ids=["tag-1"], target_id="ds-1", type="knowledge")
)
@patch("controllers.service_api.dataset.dataset.current_user")
@@ -1019,8 +1021,10 @@ class TestDatasetTagUnbindingApiPost:
result = api.post(_=None)
assert result == ("", 204)
from services.tag_service import TagBindingDeletePayload
mock_tag_svc.delete_tag_binding.assert_called_once_with(
{"tag_id": "tag-1", "target_id": "ds-1", "type": "knowledge"}
TagBindingDeletePayload(tag_id="tag-1", target_id="ds-1", type="knowledge")
)
@patch("controllers.service_api.dataset.dataset.current_user")

View File

@@ -12,7 +12,13 @@ from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset
from models.enums import DataSourceType, TagType
from models.model import App, Tag, TagBinding
from services.tag_service import TagService
from services.tag_service import (
SaveTagPayload,
TagBindingCreatePayload,
TagBindingDeletePayload,
TagService,
UpdateTagPayload,
)
class TestTagService:
@@ -685,7 +691,7 @@ class TestTagService:
db_session_with_containers, mock_external_service_dependencies
)
tag_args = {"name": "test_tag_name", "type": "knowledge"}
tag_args = SaveTagPayload(name="test_tag_name", type="knowledge")
# Act: Execute the method under test
result = TagService.save_tags(tag_args)
@@ -725,7 +731,7 @@ class TestTagService:
)
# Create first tag
tag_args = {"name": "duplicate_tag", "type": "app"}
tag_args = SaveTagPayload(name="duplicate_tag", type="app")
TagService.save_tags(tag_args)
# Act & Assert: Verify proper error handling
@@ -749,11 +755,11 @@ class TestTagService:
)
# Create a tag to update
tag_args = {"name": "original_name", "type": "knowledge"}
tag_args = SaveTagPayload(name="original_name", type="knowledge")
tag = TagService.save_tags(tag_args)
# Update args
update_args = {"name": "updated_name", "type": "knowledge"}
update_args = UpdateTagPayload(name="updated_name", type="knowledge")
# Act: Execute the method under test
result = TagService.update_tags(update_args, tag.id)
@@ -793,7 +799,7 @@ class TestTagService:
non_existent_tag_id = str(uuid.uuid4())
update_args = {"name": "updated_name", "type": "knowledge"}
update_args = UpdateTagPayload(name="updated_name", type="knowledge")
# Act & Assert: Verify proper error handling
with pytest.raises(NotFound) as exc_info:
@@ -817,14 +823,14 @@ class TestTagService:
)
# Create two tags
tag1_args = {"name": "first_tag", "type": "app"}
tag1_args = SaveTagPayload(name="first_tag", type="app")
tag1 = TagService.save_tags(tag1_args)
tag2_args = {"name": "second_tag", "type": "app"}
tag2_args = SaveTagPayload(name="second_tag", type="app")
tag2 = TagService.save_tags(tag2_args)
# Try to update second tag with first tag's name
update_args = {"name": "first_tag", "type": "app"}
update_args = UpdateTagPayload(name="first_tag", type="app")
# Act & Assert: Verify proper error handling
with pytest.raises(ValueError) as exc_info:
@@ -988,8 +994,10 @@ class TestTagService:
dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id)
# Act: Execute the method under test
binding_args = {"type": "knowledge", "target_id": dataset.id, "tag_ids": [tag.id for tag in tags]}
TagService.save_tag_binding(binding_args)
binding_payload = TagBindingCreatePayload(
type="knowledge", target_id=dataset.id, tag_ids=[tag.id for tag in tags]
)
TagService.save_tag_binding(binding_payload)
# Assert: Verify the expected outcomes
@@ -1030,11 +1038,11 @@ class TestTagService:
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id)
# Create first binding
binding_args = {"type": "app", "target_id": app.id, "tag_ids": [tag.id]}
TagService.save_tag_binding(binding_args)
binding_payload = TagBindingCreatePayload(type="app", target_id=app.id, tag_ids=[tag.id])
TagService.save_tag_binding(binding_payload)
# Act: Try to create duplicate binding
TagService.save_tag_binding(binding_args)
TagService.save_tag_binding(binding_payload)
# Assert: Verify the expected outcomes
@@ -1071,11 +1079,10 @@ class TestTagService:
non_existent_target_id = str(uuid.uuid4())
# Act & Assert: Verify proper error handling
binding_args = {"type": "invalid_type", "target_id": non_existent_target_id, "tag_ids": [tag.id]}
from pydantic import ValidationError
with pytest.raises(NotFound) as exc_info:
TagService.save_tag_binding(binding_args)
assert "Invalid binding type" in str(exc_info.value)
with pytest.raises(ValidationError):
TagBindingCreatePayload(type="invalid_type", target_id=non_existent_target_id, tag_ids=[tag.id])
def test_delete_tag_binding_success(self, db_session_with_containers: Session, mock_external_service_dependencies):
"""
@@ -1113,8 +1120,8 @@ class TestTagService:
assert binding_before is not None
# Act: Execute the method under test
delete_args = {"type": "knowledge", "target_id": dataset.id, "tag_id": tag.id}
TagService.delete_tag_binding(delete_args)
delete_payload = TagBindingDeletePayload(type="knowledge", target_id=dataset.id, tag_id=tag.id)
TagService.delete_tag_binding(delete_payload)
# Assert: Verify the expected outcomes
# Verify tag binding was deleted
@@ -1149,8 +1156,8 @@ class TestTagService:
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id)
# Act: Try to delete non-existent binding
delete_args = {"type": "app", "target_id": app.id, "tag_id": tag.id}
TagService.delete_tag_binding(delete_args)
delete_payload = TagBindingDeletePayload(type="app", target_id=app.id, tag_id=tag.id)
TagService.delete_tag_binding(delete_payload)
# Assert: Verify the expected outcomes
# No error should be raised, and database state should remain unchanged

View File

@@ -505,13 +505,7 @@ class TestEasyUiBasedGenerateTaskPipeline:
def __exit__(self, exc_type, exc, tb):
return False
def query(self, *args, **kwargs):
return self
def where(self, *args, **kwargs):
return self
def first(self):
def scalar(self, *args, **kwargs):
return agent_thought
monkeypatch.setattr(
@@ -1182,13 +1176,7 @@ class TestEasyUiBasedGenerateTaskPipeline:
def __exit__(self, exc_type, exc, tb):
return False
def query(self, *args, **kwargs):
return self
def where(self, *args, **kwargs):
return self
def first(self):
def scalar(self, *args, **kwargs):
return None
monkeypatch.setattr("core.app.task_pipeline.easy_ui_based_generate_task_pipeline.Session", _Session)

View File

@@ -632,16 +632,6 @@ def test_get_upload_file_by_id_builds_file(mocker):
source_url="http://x",
)
class _Q:
def __init__(self, row):
self._row = row
def where(self, *_args, **_kwargs):
return self
def first(self):
return self._row
class _S:
def __init__(self, row):
self._row = row
@@ -652,8 +642,8 @@ def test_get_upload_file_by_id_builds_file(mocker):
def __exit__(self, *exc):
return False
def query(self, *_):
return _Q(self._row)
def scalar(self, *_args, **_kwargs):
return self._row
mocker.patch("core.datasource.datasource_manager.session_factory.create_session", return_value=_S(fake_row))
@@ -665,13 +655,6 @@ def test_get_upload_file_by_id_builds_file(mocker):
def test_get_upload_file_by_id_raises_when_missing(mocker):
class _Q:
def where(self, *_args, **_kwargs):
return self
def first(self):
return None
class _S:
def __enter__(self):
return self
@@ -679,8 +662,8 @@ def test_get_upload_file_by_id_raises_when_missing(mocker):
def __exit__(self, *exc):
return False
def query(self, *_):
return _Q()
def scalar(self, *_args, **_kwargs):
return None
mocker.patch("core.datasource.datasource_manager.session_factory.create_session", return_value=_S())

View File

@@ -346,13 +346,13 @@ class TestLLMGenerator:
def test_instruction_modify_workflow_app_not_found(self):
with patch("extensions.ext_database.db.session") as mock_session:
mock_session.return_value.query.return_value.where.return_value.first.return_value = None
mock_session.return_value.scalar.return_value = None
with pytest.raises(ValueError, match="App not found."):
LLMGenerator.instruction_modify_workflow("t", "f", "n", "c", "i", MagicMock(), "o", MagicMock())
def test_instruction_modify_workflow_no_workflow(self):
with patch("extensions.ext_database.db.session") as mock_session:
mock_session.return_value.query.return_value.where.return_value.first.return_value = MagicMock()
mock_session.return_value.scalar.return_value = MagicMock()
workflow_service = MagicMock()
workflow_service.get_draft_workflow.return_value = None
with pytest.raises(ValueError, match="Workflow not found for the given app model."):
@@ -360,7 +360,7 @@ class TestLLMGenerator:
def test_instruction_modify_workflow_success(self, mock_model_instance, model_config_entity):
with patch("extensions.ext_database.db.session") as mock_session:
mock_session.return_value.query.return_value.where.return_value.first.return_value = MagicMock()
mock_session.return_value.scalar.return_value = MagicMock()
workflow = MagicMock()
workflow.graph_dict = {"graph": {"nodes": [{"id": "node_id", "data": {"type": "llm"}}]}}

View File

@@ -407,8 +407,7 @@ class TestTencentDataTrace:
mock_db.engine = "engine"
with patch("core.ops.tencent_trace.tencent_trace.Session") as mock_session_ctx:
session = mock_session_ctx.return_value.__enter__.return_value
session.scalar.side_effect = [app, account]
session.query.return_value.filter_by.return_value.first.return_value = tenant_join
session.scalar.side_effect = [app, account, tenant_join]
with patch(
"core.ops.tencent_trace.tencent_trace.SQLAlchemyWorkflowNodeExecutionRepository"

View File

@@ -76,10 +76,7 @@ def test_get_service_account_with_tenant_tenant_not_found(mock_db_session):
mock_account = MagicMock(spec=Account)
mock_account.id = "creator_id"
mock_db_session.scalar.side_effect = [mock_app, mock_account]
# session.query(TenantAccountJoin).filter_by(...).first() returns None
mock_db_session.query.return_value.filter_by.return_value.first.return_value = None
mock_db_session.scalar.side_effect = [mock_app, mock_account, None]
config = MagicMock(spec=BaseTracingConfig)
instance = ConcreteTraceInstance(config)
@@ -97,11 +94,10 @@ def test_get_service_account_with_tenant_success(mock_db_session):
mock_account.id = "creator_id"
mock_account.set_tenant_id = MagicMock()
mock_db_session.scalar.side_effect = [mock_app, mock_account]
mock_tenant_join = MagicMock(spec=TenantAccountJoin)
mock_tenant_join.tenant_id = "tenant_id"
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_tenant_join
mock_db_session.scalar.side_effect = [mock_app, mock_account, mock_tenant_join]
config = MagicMock(spec=BaseTracingConfig)
instance = ConcreteTraceInstance(config)

View File

@@ -14,9 +14,9 @@ class TestCelerySSLConfiguration:
dify_config = DifyConfig(CELERY_BROKER_URL="redis://localhost:6379/0")
with patch("extensions.ext_celery.dify_config", dify_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is None
def test_get_celery_ssl_options_when_broker_not_redis(self):
@@ -25,9 +25,9 @@ class TestCelerySSLConfiguration:
mock_config.CELERY_BROKER_URL = "amqp://localhost:5672"
with patch("extensions.ext_celery.dify_config", mock_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is None
def test_get_celery_ssl_options_with_cert_none(self):
@@ -40,9 +40,9 @@ class TestCelerySSLConfiguration:
mock_config.REDIS_SSL_KEYFILE = None
with patch("extensions.ext_celery.dify_config", mock_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is not None
assert result["ssl_cert_reqs"] == ssl.CERT_NONE
assert result["ssl_ca_certs"] is None
@@ -59,9 +59,9 @@ class TestCelerySSLConfiguration:
mock_config.REDIS_SSL_KEYFILE = "/path/to/client.key"
with patch("extensions.ext_celery.dify_config", mock_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is not None
assert result["ssl_cert_reqs"] == ssl.CERT_REQUIRED
assert result["ssl_ca_certs"] == "/path/to/ca.crt"
@@ -78,9 +78,9 @@ class TestCelerySSLConfiguration:
mock_config.REDIS_SSL_KEYFILE = None
with patch("extensions.ext_celery.dify_config", mock_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is not None
assert result["ssl_cert_reqs"] == ssl.CERT_OPTIONAL
assert result["ssl_ca_certs"] == "/path/to/ca.crt"
@@ -95,9 +95,9 @@ class TestCelerySSLConfiguration:
mock_config.REDIS_SSL_KEYFILE = None
with patch("extensions.ext_celery.dify_config", mock_config):
from extensions.ext_celery import _get_celery_ssl_options
from extensions.ext_celery import get_celery_ssl_options
result = _get_celery_ssl_options()
result = get_celery_ssl_options()
assert result is not None
assert result["ssl_cert_reqs"] == ssl.CERT_NONE # Should default to CERT_NONE

View File

@@ -556,12 +556,8 @@ class TestTenantService:
# Setup test data
mock_account = TestAccountAssociatedDataFactory.create_account_mock()
# Setup smart database query mock - no existing tenant joins
query_results = {
("TenantAccountJoin", "account_id", "user-123"): None,
("TenantAccountJoin", "tenant_id", "tenant-456"): None, # For has_roles check
}
ServiceDbTestHelper.setup_db_query_filter_by_mock(mock_db_dependencies["db"], query_results)
# Mock scalar to return None (no existing tenant joins)
mock_db_dependencies["db"].session.scalar.return_value = None
# Setup external service mocks
mock_external_service_dependencies[
@@ -650,9 +646,8 @@ class TestTenantService:
mock_tenant.id = "tenant-456"
mock_account = TestAccountAssociatedDataFactory.create_account_mock()
# Setup smart database query mock - no existing member
query_results = {("TenantAccountJoin", "tenant_id", "tenant-456"): None}
ServiceDbTestHelper.setup_db_query_filter_by_mock(mock_db_dependencies["db"], query_results)
# Mock scalar to return None (no existing member)
mock_db_dependencies["db"].session.scalar.return_value = None
# Mock database operations
mock_db_dependencies["db"].session.add = MagicMock()
@@ -693,16 +688,8 @@ class TestTenantService:
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 0
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
# scalar calls: permission check, ta lookup, remaining count
mock_db.session.scalar.side_effect = [mock_operator_join, mock_ta, 0]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
@@ -741,17 +728,8 @@ class TestTenantService:
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
# Remaining join count = 1 (still in another workspace)
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 1
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
# scalar calls: permission check, ta lookup, remaining count = 1
mock_db.session.scalar.side_effect = [mock_operator_join, mock_ta, 1]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
@@ -781,13 +759,8 @@ class TestTenantService:
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta]
# scalar calls: permission check, ta lookup (no count needed for active member)
mock_db.session.scalar.side_effect = [mock_operator_join, mock_ta]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
@@ -810,13 +783,8 @@ class TestTenantService:
# Mock the complex query in switch_tenant method
with patch("services.account_service.db") as mock_db:
# Mock the join query that returns the tenant_account_join
mock_query = MagicMock()
mock_where = MagicMock()
mock_where.first.return_value = mock_tenant_join
mock_query.where.return_value = mock_where
mock_query.join.return_value = mock_query
mock_db.session.query.return_value = mock_query
# Mock scalar for the join query
mock_db.session.scalar.return_value = mock_tenant_join
# Execute test
TenantService.switch_tenant(mock_account, "tenant-456")
@@ -851,20 +819,8 @@ class TestTenantService:
# Mock the database queries in update_member_role method
with patch("services.account_service.db") as mock_db:
# Mock the first query for operator permission check
mock_query1 = MagicMock()
mock_filter1 = MagicMock()
mock_filter1.first.return_value = mock_operator_join
mock_query1.filter_by.return_value = mock_filter1
# Mock the second query for target member
mock_query2 = MagicMock()
mock_filter2 = MagicMock()
mock_filter2.first.return_value = mock_target_join
mock_query2.filter_by.return_value = mock_filter2
# Make the query method return different mocks for different calls
mock_db.session.query.side_effect = [mock_query1, mock_query2]
# scalar calls: permission check, target member lookup
mock_db.session.scalar.side_effect = [mock_operator_join, mock_target_join]
# Execute test
TenantService.update_member_role(mock_tenant, mock_member, "admin", mock_operator)
@@ -886,9 +842,7 @@ class TestTenantService:
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
# Setup smart database query mock
query_results = {("TenantAccountJoin", "tenant_id", "tenant-456"): mock_operator_join}
ServiceDbTestHelper.setup_db_query_filter_by_mock(mock_db_dependencies["db"], query_results)
mock_db_dependencies["db"].session.scalar.return_value = mock_operator_join
# Execute test - should not raise exception
TenantService.check_member_permission(mock_tenant, mock_operator, mock_member, "add")

View File

@@ -79,10 +79,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.current_account_with_tenant", return_value=(current_user, tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -100,10 +97,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.current_account_with_tenant", return_value=(current_user, tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act & Assert
with pytest.raises(ValueError):
@@ -121,15 +115,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.current_account_with_tenant", return_value=(current_user, tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
message_query = MagicMock()
message_query.where.return_value = message_query
message_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, message_query]
mock_db.session.scalar.side_effect = [app, None]
# Act & Assert
with pytest.raises(NotFound):
@@ -152,19 +138,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.add_annotation_to_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
message_query = MagicMock()
message_query.where.return_value = message_query
message_query.first.return_value = message
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, message_query, setting_query]
mock_db.session.scalar.side_effect = [app, message, setting]
# Act
result = AppAnnotationService.up_insert_app_annotation_from_message(args, app.id)
@@ -202,19 +176,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.MessageAnnotation", return_value=annotation_instance) as mock_cls,
patch("services.annotation_service.add_annotation_to_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
message_query = MagicMock()
message_query.where.return_value = message_query
message_query.first.return_value = message
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, message_query, setting_query]
mock_db.session.scalar.side_effect = [app, message, None]
# Act
result = AppAnnotationService.up_insert_app_annotation_from_message(args, app.id)
@@ -245,10 +207,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.current_account_with_tenant", return_value=(current_user, tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act & Assert
with pytest.raises(ValueError):
@@ -270,15 +229,7 @@ class TestAppAnnotationServiceUpInsert:
patch("services.annotation_service.MessageAnnotation", return_value=annotation_instance) as mock_cls,
patch("services.annotation_service.add_annotation_to_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.up_insert_app_annotation_from_message(args, app.id)
@@ -406,10 +357,7 @@ class TestAppAnnotationServiceListAndExport:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -427,10 +375,7 @@ class TestAppAnnotationServiceListAndExport:
patch("services.annotation_service.db") as mock_db,
patch("libs.helper.escape_like_pattern", return_value="safe"),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
mock_db.paginate.return_value = pagination
# Act
@@ -451,10 +396,7 @@ class TestAppAnnotationServiceListAndExport:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
mock_db.paginate.return_value = pagination
# Act
@@ -481,16 +423,8 @@ class TestAppAnnotationServiceListAndExport:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.CSVSanitizer.sanitize_value", side_effect=lambda v: f"safe:{v}"),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.order_by.return_value = annotation_query
annotation_query.all.return_value = [annotation1, annotation2]
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.scalars.return_value.all.return_value = [annotation1, annotation2]
# Act
result = AppAnnotationService.export_annotation_list_by_app_id(app.id)
@@ -511,10 +445,7 @@ class TestAppAnnotationServiceListAndExport:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -534,10 +465,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -554,10 +482,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act & Assert
with pytest.raises(ValueError):
@@ -579,15 +504,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.MessageAnnotation", return_value=annotation_instance) as mock_cls,
patch("services.annotation_service.add_annotation_to_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.insert_app_annotation_directly(args, app.id)
@@ -621,15 +538,8 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.get.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -645,10 +555,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -666,15 +573,8 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = annotation
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.get.return_value = annotation
# Act & Assert
with pytest.raises(ValueError):
@@ -695,19 +595,8 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.update_annotation_to_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = annotation
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, annotation_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
mock_db.session.get.return_value = annotation
# Act
result = AppAnnotationService.update_app_annotation_directly(args, app.id, annotation.id)
@@ -740,22 +629,11 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.delete_annotation_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = annotation
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.scalar.side_effect = [app, setting]
mock_db.session.get.return_value = annotation
scalars_result = MagicMock()
scalars_result.all.return_value = [history1, history2]
mock_db.session.query.side_effect = [app_query, annotation_query, setting_query]
mock_db.session.scalars.return_value = scalars_result
# Act
@@ -782,10 +660,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -801,15 +676,8 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.get.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -825,16 +693,8 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotations_query = MagicMock()
annotations_query.outerjoin.return_value = annotations_query
annotations_query.where.return_value = annotations_query
annotations_query.all.return_value = []
mock_db.session.query.side_effect = [app_query, annotations_query]
mock_db.session.scalar.return_value = app
mock_db.session.execute.return_value.all.return_value = []
# Act
result = AppAnnotationService.delete_app_annotations_in_batch(app.id, ["ann-1"])
@@ -851,10 +711,7 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -874,24 +731,14 @@ class TestAppAnnotationServiceDirectManipulation:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.delete_annotation_index_task") as mock_task,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.scalar.return_value = app
annotations_query = MagicMock()
annotations_query.outerjoin.return_value = annotations_query
annotations_query.where.return_value = annotations_query
annotations_query.all.return_value = [(annotation1, setting), (annotation2, None)]
hit_history_query = MagicMock()
hit_history_query.where.return_value = hit_history_query
hit_history_query.delete.return_value = None
delete_query = MagicMock()
delete_query.where.return_value = delete_query
delete_query.delete.return_value = 2
mock_db.session.query.side_effect = [app_query, annotations_query, hit_history_query, delete_query]
# First execute().all() for multi-column query, subsequent execute() calls for deletes
execute_result_multi = MagicMock()
execute_result_multi.all.return_value = [(annotation1, setting), (annotation2, None)]
execute_result_delete = MagicMock()
execute_result_delete.rowcount = 2
mock_db.session.execute.side_effect = [execute_result_multi, MagicMock(), execute_result_delete]
# Act
result = AppAnnotationService.delete_app_annotations_in_batch(app.id, ["ann-1", "ann-2"])
@@ -915,10 +762,7 @@ class TestAppAnnotationServiceBatchImport:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -941,10 +785,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -968,10 +809,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -999,10 +837,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=2),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1028,10 +863,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=1, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1061,10 +893,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1090,10 +919,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1119,10 +945,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1148,10 +971,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1182,10 +1002,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1218,10 +1035,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
# Act
result = AppAnnotationService.batch_import_app_annotations(app.id, file)
@@ -1257,10 +1071,7 @@ class TestAppAnnotationServiceBatchImport:
new=SimpleNamespace(ANNOTATION_IMPORT_MAX_RECORDS=5, ANNOTATION_IMPORT_MIN_RECORDS=1),
),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = app
mock_redis.zadd.side_effect = RuntimeError("boom")
mock_redis.zrem.side_effect = RuntimeError("cleanup-failed")
@@ -1285,10 +1096,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -1306,15 +1114,8 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = annotation
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.get.return_value = annotation
mock_db.paginate.return_value = pagination
# Act
@@ -1334,15 +1135,8 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
annotation_query = MagicMock()
annotation_query.where.return_value = annotation_query
annotation_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, annotation_query]
mock_db.session.scalar.return_value = app
mock_db.session.get.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -1352,10 +1146,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
"""Test get_annotation_by_id returns None when not found."""
# Arrange
with patch("services.annotation_service.db") as mock_db:
query = MagicMock()
query.where.return_value = query
query.first.return_value = None
mock_db.session.query.return_value = query
mock_db.session.get.return_value = None
# Act
result = AppAnnotationService.get_annotation_by_id("ann-1")
@@ -1368,10 +1159,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
# Arrange
annotation = _make_annotation("ann-1")
with patch("services.annotation_service.db") as mock_db:
query = MagicMock()
query.where.return_value = query
query.first.return_value = annotation
mock_db.session.query.return_value = query
mock_db.session.get.return_value = annotation
# Act
result = AppAnnotationService.get_annotation_by_id("ann-1")
@@ -1386,10 +1174,6 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.AppAnnotationHitHistory") as mock_history_cls,
):
query = MagicMock()
query.where.return_value = query
mock_db.session.query.return_value = query
# Act
AppAnnotationService.add_annotation_history(
annotation_id="ann-1",
@@ -1404,7 +1188,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
)
# Assert
query.update.assert_called_once()
mock_db.session.execute.assert_called_once()
mock_history_cls.assert_called_once()
mock_db.session.add.assert_called_once()
mock_db.session.commit.assert_called_once()
@@ -1420,15 +1204,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.get_app_annotation_setting_by_app_id(app.id)
@@ -1448,10 +1224,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -1468,15 +1241,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.get_app_annotation_setting_by_app_id(app.id)
@@ -1495,15 +1260,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, None]
# Act
result = AppAnnotationService.get_app_annotation_setting_by_app_id(app.id)
@@ -1525,15 +1282,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.naive_utc_now", return_value="now"),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.update_app_annotation_setting(app.id, setting.id, args)
@@ -1560,15 +1309,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.naive_utc_now", return_value="now"),
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = setting
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, setting]
# Act
result = AppAnnotationService.update_app_annotation_setting(app.id, setting.id, args)
@@ -1587,10 +1328,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = None
mock_db.session.query.return_value = app_query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):
@@ -1606,15 +1344,7 @@ class TestAppAnnotationServiceHitHistoryAndSettings:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
app_query = MagicMock()
app_query.where.return_value = app_query
app_query.first.return_value = app
setting_query = MagicMock()
setting_query.where.return_value = setting_query
setting_query.first.return_value = None
mock_db.session.query.side_effect = [app_query, setting_query]
mock_db.session.scalar.side_effect = [app, None]
# Act & Assert
with pytest.raises(NotFound):
@@ -1634,25 +1364,21 @@ class TestAppAnnotationServiceClearAll:
annotation2 = _make_annotation("ann-2")
history = MagicMock(spec=AppAnnotationHitHistory)
def query_side_effect(*args: object, **kwargs: object) -> MagicMock:
query = MagicMock()
query.where.return_value = query
if App in args:
query.first.return_value = app
elif AppAnnotationSetting in args:
query.first.return_value = setting
elif MessageAnnotation in args:
query.yield_per.return_value = [annotation1, annotation2]
elif AppAnnotationHitHistory in args:
query.yield_per.return_value = [history]
return query
with (
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
patch("services.annotation_service.delete_annotation_index_task") as mock_task,
):
mock_db.session.query.side_effect = query_side_effect
# scalar calls: app lookup, annotation_setting lookup
mock_db.session.scalar.side_effect = [app, setting]
# scalars calls: first for annotations iteration, then for each annotation's hit histories
annotations_scalars = MagicMock()
annotations_scalars.yield_per.return_value = [annotation1, annotation2]
histories_scalars_1 = MagicMock()
histories_scalars_1.yield_per.return_value = [history]
histories_scalars_2 = MagicMock()
histories_scalars_2.yield_per.return_value = []
mock_db.session.scalars.side_effect = [annotations_scalars, histories_scalars_1, histories_scalars_2]
# Act
result = AppAnnotationService.clear_all_annotations(app.id)
@@ -1675,10 +1401,7 @@ class TestAppAnnotationServiceClearAll:
patch("services.annotation_service.current_account_with_tenant", return_value=(_make_user(), tenant_id)),
patch("services.annotation_service.db") as mock_db,
):
query = MagicMock()
query.where.return_value = query
query.first.return_value = None
mock_db.session.query.return_value = query
mock_db.session.scalar.return_value = None
# Act & Assert
with pytest.raises(NotFound):

View File

@@ -62,7 +62,7 @@ class TestDatasetServiceQueries:
self, mock_dataset_query_dependencies
):
user = DatasetServiceUnitDataFactory.create_user_mock(role=TenantAccountRole.DATASET_OPERATOR)
mock_dataset_query_dependencies["db"].session.query.return_value.filter_by.return_value.all.return_value = []
mock_dataset_query_dependencies["db"].session.scalars.return_value.all.return_value = []
items, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id="tenant-1", user=user)
@@ -108,9 +108,7 @@ class TestDatasetServiceQueries:
dataset_process_rule.rules_dict = {"delimiter": "\n"}
with patch("services.dataset_service.db") as mock_db:
(
mock_db.session.query.return_value.where.return_value.order_by.return_value.limit.return_value.one_or_none.return_value
) = dataset_process_rule
(mock_db.session.execute.return_value.scalar_one_or_none.return_value) = dataset_process_rule
result = DatasetService.get_process_rules("dataset-1")
@@ -118,9 +116,7 @@ class TestDatasetServiceQueries:
def test_get_process_rules_falls_back_to_default_rules_when_missing(self):
with patch("services.dataset_service.db") as mock_db:
(
mock_db.session.query.return_value.where.return_value.order_by.return_value.limit.return_value.one_or_none.return_value
) = None
(mock_db.session.execute.return_value.scalar_one_or_none.return_value) = None
result = DatasetService.get_process_rules("dataset-1")
@@ -151,7 +147,7 @@ class TestDatasetServiceQueries:
dataset = DatasetServiceUnitDataFactory.create_dataset_mock()
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = dataset
mock_db.session.get.return_value = dataset
result = DatasetService.get_dataset(dataset.id)
@@ -308,7 +304,7 @@ class TestDatasetServiceCreationAndUpdate:
account = SimpleNamespace(id="user-1")
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = object()
mock_db.session.scalar.return_value = object()
with pytest.raises(DatasetNameDuplicateError, match="Dataset with name Dataset already exists"):
DatasetService.create_empty_dataset("tenant-1", "Dataset", None, "economy", account)
@@ -319,6 +315,7 @@ class TestDatasetServiceCreationAndUpdate:
with (
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.select"),
patch(
"services.dataset_service.Dataset",
side_effect=lambda **kwargs: SimpleNamespace(id="dataset-1", **kwargs),
@@ -326,7 +323,7 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.ModelManager") as model_manager_cls,
patch.object(DatasetService, "check_embedding_model_setting") as check_embedding,
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
model_manager_cls.for_tenant.return_value.get_default_model_instance.return_value = default_embedding_model
dataset = DatasetService.create_empty_dataset(
@@ -355,6 +352,7 @@ class TestDatasetServiceCreationAndUpdate:
with (
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.select"),
patch(
"services.dataset_service.Dataset",
side_effect=lambda **kwargs: SimpleNamespace(id="dataset-1", **kwargs),
@@ -368,7 +366,7 @@ class TestDatasetServiceCreationAndUpdate:
patch.object(DatasetService, "check_embedding_model_setting") as check_embedding,
patch.object(DatasetService, "check_reranking_model_setting") as check_reranking,
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
dataset = DatasetService.create_empty_dataset(
@@ -412,7 +410,7 @@ class TestDatasetServiceCreationAndUpdate:
)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = object()
mock_db.session.scalar.return_value = object()
with pytest.raises(DatasetNameDuplicateError, match="Existing Dataset already exists"):
DatasetService.create_empty_rag_pipeline_dataset("tenant-1", entity)
@@ -435,12 +433,13 @@ class TestDatasetServiceCreationAndUpdate:
with (
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.select"),
patch("services.dataset_service.current_user", SimpleNamespace(id="user-1")),
patch("services.dataset_service.generate_incremental_name", return_value="Untitled 2") as generate_name,
patch("services.dataset_service.Pipeline", side_effect=pipeline_factory),
patch("services.dataset_service.Dataset", side_effect=dataset_factory),
):
mock_db.session.query.return_value.filter_by.return_value.all.return_value = [
mock_db.session.scalars.return_value.all.return_value = [
SimpleNamespace(name="Untitled"),
SimpleNamespace(name="Untitled 1"),
]
@@ -465,7 +464,7 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.current_user", SimpleNamespace(id=None)),
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with pytest.raises(ValueError, match="Current user or current user id not found"):
DatasetService.create_empty_rag_pipeline_dataset("tenant-1", entity)
@@ -520,7 +519,7 @@ class TestDatasetServiceCreationAndUpdate:
def test_has_dataset_same_name_returns_true_when_query_matches(self):
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = object()
mock_db.session.scalar.return_value = object()
result = DatasetService._has_dataset_same_name("tenant-1", "dataset-1", "Dataset")
@@ -630,7 +629,7 @@ class TestDatasetServiceCreationAndUpdate:
result = DatasetService._update_internal_dataset(dataset, update_payload.copy(), user)
assert result is dataset
updated_values = mock_db.session.query.return_value.filter_by.return_value.update.call_args.args[0]
updated_values = mock_db.session.execute.call_args.args[0].compile().params
assert updated_values["name"] == "Updated Dataset"
assert updated_values["description"] is None
assert updated_values["retrieval_model"] == {"top_k": 4}
@@ -658,13 +657,13 @@ class TestDatasetServiceCreationAndUpdate:
with patch("services.dataset_service.db") as mock_db:
DatasetService._update_pipeline_knowledge_base_node_data(dataset, "user-1")
mock_db.session.query.assert_not_called()
mock_db.session.get.assert_not_called()
def test_update_pipeline_knowledge_base_node_data_returns_when_pipeline_is_missing(self):
dataset = SimpleNamespace(runtime_mode="rag_pipeline", pipeline_id="pipeline-1")
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
mock_db.session.get.return_value = None
DatasetService._update_pipeline_knowledge_base_node_data(dataset, "user-1")
@@ -703,7 +702,7 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.RagPipelineService", return_value=rag_pipeline_service),
patch("services.dataset_service.Workflow.new", return_value=new_workflow) as workflow_new,
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = pipeline
mock_db.session.get.return_value = pipeline
DatasetService._update_pipeline_knowledge_base_node_data(dataset, "user-1")
@@ -725,7 +724,7 @@ class TestDatasetServiceCreationAndUpdate:
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.RagPipelineService", return_value=rag_pipeline_service),
):
mock_db.session.query.return_value.filter_by.return_value.first.return_value = pipeline
mock_db.session.get.return_value = pipeline
with pytest.raises(RuntimeError, match="boom"):
DatasetService._update_pipeline_knowledge_base_node_data(dataset, "user-1")
@@ -1364,7 +1363,7 @@ class TestDatasetServicePermissionsAndLifecycle:
)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with pytest.raises(NoPermissionError, match="do not have permission"):
DatasetService.check_dataset_permission(dataset, user)
@@ -1382,7 +1381,7 @@ class TestDatasetServicePermissionsAndLifecycle:
with patch("services.dataset_service.db") as mock_db:
DatasetService.check_dataset_permission(dataset, user)
mock_db.session.query.assert_not_called()
mock_db.session.scalar.assert_not_called()
def test_check_dataset_permission_allows_partial_team_member_with_binding(self):
dataset = DatasetServiceUnitDataFactory.create_dataset_mock(
@@ -1395,7 +1394,7 @@ class TestDatasetServicePermissionsAndLifecycle:
)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.first.return_value = object()
mock_db.session.scalar.return_value = object()
DatasetService.check_dataset_permission(dataset, user)
@@ -1427,7 +1426,7 @@ class TestDatasetServicePermissionsAndLifecycle:
)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.all.return_value = []
mock_db.session.scalars.return_value.all.return_value = []
with pytest.raises(NoPermissionError, match="do not have permission"):
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
@@ -1446,9 +1445,7 @@ class TestDatasetServicePermissionsAndLifecycle:
def test_get_related_apps_returns_ordered_query_results(self):
with patch("services.dataset_service.db") as mock_db:
mock_db.desc.side_effect = lambda column: column
mock_db.session.query.return_value.where.return_value.order_by.return_value.all.return_value = [
"relation-1"
]
mock_db.session.scalars.return_value.all.return_value = ["relation-1"]
result = DatasetService.get_related_apps("dataset-1")
@@ -1610,7 +1607,7 @@ class TestDatasetCollectionBindingService:
binding = SimpleNamespace(id="binding-1")
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.order_by.return_value.first.return_value = binding
mock_db.session.scalar.return_value = binding
result = DatasetCollectionBindingService.get_dataset_collection_binding("provider", "model")
@@ -1622,10 +1619,11 @@ class TestDatasetCollectionBindingService:
with (
patch("services.dataset_service.db") as mock_db,
patch("services.dataset_service.select"),
patch("services.dataset_service.DatasetCollectionBinding", return_value=created_binding) as binding_cls,
patch.object(Dataset, "gen_collection_name_by_id", return_value="generated-collection"),
):
mock_db.session.query.return_value.where.return_value.order_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
result = DatasetCollectionBindingService.get_dataset_collection_binding("provider", "model", "dataset")
@@ -1641,7 +1639,7 @@ class TestDatasetCollectionBindingService:
def test_get_dataset_collection_binding_by_id_and_type_raises_when_missing(self):
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.order_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with pytest.raises(ValueError, match="Dataset collection binding not found"):
DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type("binding-1")
@@ -1650,7 +1648,7 @@ class TestDatasetCollectionBindingService:
binding = SimpleNamespace(id="binding-1")
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.order_by.return_value.first.return_value = binding
mock_db.session.scalar.return_value = binding
result = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type("binding-1")
@@ -1676,7 +1674,7 @@ class TestDatasetPermissionService:
[{"user_id": "user-1"}, {"user_id": "user-2"}],
)
mock_db.session.query.return_value.where.return_value.delete.assert_called_once()
mock_db.session.execute.assert_called()
mock_db.session.add_all.assert_called_once()
mock_db.session.commit.assert_called_once()
@@ -1747,12 +1745,12 @@ class TestDatasetPermissionService:
with patch("services.dataset_service.db") as mock_db:
DatasetPermissionService.clear_partial_member_list("dataset-1")
mock_db.session.query.return_value.where.return_value.delete.assert_called_once()
mock_db.session.execute.assert_called()
mock_db.session.commit.assert_called_once()
def test_clear_partial_member_list_rolls_back_on_exception(self):
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.delete.side_effect = RuntimeError("boom")
mock_db.session.execute.side_effect = RuntimeError("boom")
with pytest.raises(RuntimeError, match="boom"):
DatasetPermissionService.clear_partial_member_list("dataset-1")

View File

@@ -90,13 +90,13 @@ class TestDocumentServiceQueryAndDownloadHelpers:
result = DocumentService.get_document("dataset-1", None)
assert result is None
mock_db.session.query.assert_not_called()
mock_db.session.scalar.assert_not_called()
def test_get_document_queries_by_dataset_and_document_id(self):
document = DatasetServiceUnitDataFactory.create_document_mock()
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = document
mock_db.session.scalar.return_value = document
result = DocumentService.get_document("dataset-1", "doc-1")
@@ -435,7 +435,7 @@ class TestDocumentServiceQueryAndDownloadHelpers:
upload_file = DatasetServiceUnitDataFactory.create_upload_file_mock()
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.one_or_none.return_value = upload_file
mock_db.session.get.return_value = upload_file
result = DocumentService.get_document_file_detail(upload_file.id)
@@ -570,7 +570,7 @@ class TestDocumentServiceMutations:
assert document.name == "New Name"
assert document.doc_metadata[BuiltInField.document_name] == "New Name"
mock_db.session.add.assert_called_once_with(document)
mock_db.session.query.return_value.where.return_value.update.assert_called_once()
mock_db.session.execute.assert_called()
mock_db.session.commit.assert_called_once()
def test_recover_document_raises_when_document_is_not_paused(self):
@@ -624,9 +624,7 @@ class TestDocumentServiceMutations:
document = DatasetServiceUnitDataFactory.create_document_mock(position=7)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.first.return_value = (
document
)
mock_db.session.scalar.return_value = document
result = DocumentService.get_documents_position("dataset-1")
@@ -634,7 +632,7 @@ class TestDocumentServiceMutations:
def test_get_documents_position_defaults_to_one_when_dataset_is_empty(self):
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
result = DocumentService.get_documents_position("dataset-1")
@@ -869,11 +867,7 @@ class TestDocumentServiceUpdateDocumentWithDatasetId:
patch("services.dataset_service.naive_utc_now", return_value="now"),
patch("services.dataset_service.document_indexing_update_task") as update_task,
):
upload_query = MagicMock()
upload_query.where.return_value.first.return_value = SimpleNamespace(id="file-1", name="upload.txt")
segment_query = MagicMock()
segment_query.filter_by.return_value.update.return_value = 3
mock_db.session.query.side_effect = [upload_query, segment_query]
mock_db.session.scalar.return_value = SimpleNamespace(id="file-1", name="upload.txt")
result = DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
@@ -892,7 +886,7 @@ class TestDocumentServiceUpdateDocumentWithDatasetId:
assert document.created_from == "web"
assert document.doc_form == IndexStructureType.QA_INDEX
assert mock_db.session.commit.call_count == 3
segment_query.filter_by.return_value.update.assert_called_once()
mock_db.session.execute.assert_called()
update_task.delay.assert_called_once_with(document.dataset_id, document.id)
def test_update_document_with_dataset_id_notion_import_requires_binding(self, account_context):
@@ -920,9 +914,7 @@ class TestDocumentServiceUpdateDocumentWithDatasetId:
patch.object(DatasetService, "check_dataset_model_setting"),
patch("services.dataset_service.db") as mock_db,
):
binding_query = MagicMock()
binding_query.where.return_value.first.return_value = None
mock_db.session.query.return_value = binding_query
mock_db.session.scalar.return_value = None
with pytest.raises(ValueError, match="Data source binding not found"):
DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
@@ -954,10 +946,6 @@ class TestDocumentServiceUpdateDocumentWithDatasetId:
patch("services.dataset_service.naive_utc_now", return_value="now"),
patch("services.dataset_service.document_indexing_update_task") as update_task,
):
segment_query = MagicMock()
segment_query.filter_by.return_value.update.return_value = 2
mock_db.session.query.return_value = segment_query
result = DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
assert result is document
@@ -968,7 +956,7 @@ class TestDocumentServiceUpdateDocumentWithDatasetId:
)
assert document.name == ""
assert document.doc_form == IndexStructureType.PARENT_CHILD_INDEX
segment_query.filter_by.return_value.update.assert_called_once()
mock_db.session.execute.assert_called()
update_task.delay.assert_called_once_with("dataset-1", "doc-1")
@@ -1218,11 +1206,10 @@ class TestDocumentServiceSaveDocumentWithDatasetId:
patch("services.dataset_service.secrets.randbelow", return_value=23),
):
mock_redis.lock.return_value = _make_lock_context()
upload_query = MagicMock()
upload_query.where.return_value.all.return_value = [upload_file_a, upload_file_b]
existing_documents_query = MagicMock()
existing_documents_query.where.return_value.all.return_value = [duplicate_document]
mock_db.session.query.side_effect = [upload_query, existing_documents_query]
mock_db.session.scalars.return_value.all.side_effect = [
[upload_file_a, upload_file_b],
[duplicate_document],
]
documents, batch = DocumentService.save_document_with_dataset_id(
dataset,
@@ -1302,9 +1289,7 @@ class TestDocumentServiceSaveDocumentWithDatasetId:
patch("services.dataset_service.DocumentIndexingTaskProxy") as document_proxy_cls,
):
mock_redis.lock.return_value = _make_lock_context()
notion_documents_query = MagicMock()
notion_documents_query.filter_by.return_value.all.return_value = [existing_keep, existing_remove]
mock_db.session.query.return_value = notion_documents_query
mock_db.session.scalars.return_value.all.return_value = [existing_keep, existing_remove]
documents, _ = DocumentService.save_document_with_dataset_id(
dataset,
@@ -1474,12 +1459,11 @@ class TestDocumentServiceTenantAndUpdateEdges:
def test_get_tenant_documents_count_returns_query_count(self, account_context):
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.count.return_value = 12
mock_db.session.scalar.return_value = 12
result = DocumentService.get_tenant_documents_count()
assert result == 12
mock_db.session.query.return_value.where.return_value.count.assert_called_once()
def test_update_document_with_dataset_id_uses_automatic_process_rule_payload(self, account_context):
dataset = SimpleNamespace(id="dataset-1", tenant_id="tenant-1")
@@ -1514,11 +1498,7 @@ class TestDocumentServiceTenantAndUpdateEdges:
):
process_rule_cls.AUTOMATIC_RULES = DatasetProcessRule.AUTOMATIC_RULES
process_rule_cls.return_value = created_process_rule
upload_query = MagicMock()
upload_query.where.return_value.first.return_value = SimpleNamespace(id="file-1", name="upload.txt")
segment_query = MagicMock()
segment_query.filter_by.return_value.update.return_value = 1
mock_db.session.query.side_effect = [upload_query, segment_query]
mock_db.session.scalar.return_value = SimpleNamespace(id="file-1", name="upload.txt")
result = DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
@@ -1567,7 +1547,7 @@ class TestDocumentServiceTenantAndUpdateEdges:
patch.object(DatasetService, "check_dataset_model_setting"),
patch("services.dataset_service.db") as mock_db,
):
mock_db.session.query.return_value.where.return_value.first.return_value = None
mock_db.session.scalar.return_value = None
with pytest.raises(FileNotExistsError):
DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
@@ -1618,11 +1598,7 @@ class TestDocumentServiceTenantAndUpdateEdges:
patch("services.dataset_service.naive_utc_now", return_value="now"),
patch("services.dataset_service.document_indexing_update_task") as update_task,
):
binding_query = MagicMock()
binding_query.where.return_value.first.return_value = SimpleNamespace(id="binding-1")
segment_query = MagicMock()
segment_query.filter_by.return_value.update.return_value = 1
mock_db.session.query.side_effect = [binding_query, segment_query]
mock_db.session.scalar.return_value = SimpleNamespace(id="binding-1")
result = DocumentService.update_document_with_dataset_id(dataset, document_data, account_context)
@@ -1914,11 +1890,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
):
mock_redis.lock.return_value = _make_lock_context()
process_rule_cls.return_value = created_process_rule
upload_query = MagicMock()
upload_query.where.return_value.all.return_value = [SimpleNamespace(id="file-1", name="file.txt")]
existing_documents_query = MagicMock()
existing_documents_query.where.return_value.all.return_value = []
mock_db.session.query.side_effect = [upload_query, existing_documents_query]
mock_db.session.scalars.return_value.all.side_effect = [[SimpleNamespace(id="file-1", name="file.txt")], []]
documents, batch = DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account_context)
@@ -1958,11 +1930,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
mock_redis.lock.return_value = _make_lock_context()
process_rule_cls.AUTOMATIC_RULES = DatasetProcessRule.AUTOMATIC_RULES
process_rule_cls.return_value = created_process_rule
upload_query = MagicMock()
upload_query.where.return_value.all.return_value = [SimpleNamespace(id="file-1", name="file.txt")]
existing_documents_query = MagicMock()
existing_documents_query.where.return_value.all.return_value = []
mock_db.session.query.side_effect = [upload_query, existing_documents_query]
mock_db.session.scalars.return_value.all.side_effect = [[SimpleNamespace(id="file-1", name="file.txt")], []]
DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account_context)
@@ -1996,11 +1964,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
mock_redis.lock.return_value = _make_lock_context()
process_rule_cls.AUTOMATIC_RULES = DatasetProcessRule.AUTOMATIC_RULES
process_rule_cls.return_value = created_process_rule
upload_query = MagicMock()
upload_query.where.return_value.all.return_value = [SimpleNamespace(id="file-1", name="file.txt")]
existing_documents_query = MagicMock()
existing_documents_query.where.return_value.all.return_value = []
mock_db.session.query.side_effect = [upload_query, existing_documents_query]
mock_db.session.scalars.return_value.all.side_effect = [[SimpleNamespace(id="file-1", name="file.txt")], []]
DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account_context)
@@ -2024,9 +1988,7 @@ class TestDocumentServiceSaveDocumentAdditionalBranches:
patch("services.dataset_service.secrets.randbelow", return_value=23),
):
mock_redis.lock.return_value = _make_lock_context()
upload_query = MagicMock()
upload_query.where.return_value.all.return_value = [SimpleNamespace(id="file-1", name="file.txt")]
mock_db.session.query.return_value = upload_query
mock_db.session.scalars.return_value.all.return_value = [SimpleNamespace(id="file-1", name="file.txt")]
with pytest.raises(FileNotExistsError, match="One or more files not found"):
DocumentService.save_document_with_dataset_id(dataset, knowledge_config, account_context)

View File

@@ -49,7 +49,7 @@ class TestSegmentServiceChildChunks:
patch("services.dataset_service.VectorService") as vector_service,
):
mock_redis.lock.return_value = _make_lock_context()
mock_db.session.query.return_value.where.return_value.scalar.return_value = 2
mock_db.session.scalar.return_value = 2
child_chunk = SegmentService.create_child_chunk("child content", segment, document, dataset)
@@ -75,7 +75,7 @@ class TestSegmentServiceChildChunks:
patch("services.dataset_service.VectorService") as vector_service,
):
mock_redis.lock.return_value = _make_lock_context()
mock_db.session.query.return_value.where.return_value.scalar.return_value = None
mock_db.session.scalar.return_value = None
vector_service.create_child_chunk_vector.side_effect = RuntimeError("vector failed")
with pytest.raises(ChildChunkIndexingError, match="vector failed"):
@@ -247,13 +247,13 @@ class TestSegmentServiceQueries:
child_chunk = _make_child_chunk()
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = child_chunk
mock_db.session.scalar.return_value = child_chunk
result = SegmentService.get_child_chunk_by_id("child-a", "tenant-1")
assert result is child_chunk
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = SimpleNamespace()
mock_db.session.scalar.return_value = SimpleNamespace()
result = SegmentService.get_child_chunk_by_id("child-a", "tenant-1")
assert result is None
@@ -295,13 +295,13 @@ class TestSegmentServiceQueries:
)
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = segment
mock_db.session.scalar.return_value = segment
result = SegmentService.get_segment_by_id("segment-1", "tenant-1")
assert result is segment
with patch("services.dataset_service.db") as mock_db:
mock_db.session.query.return_value.where.return_value.first.return_value = SimpleNamespace()
mock_db.session.scalar.return_value = SimpleNamespace()
result = SegmentService.get_segment_by_id("segment-1", "tenant-1")
assert result is None
@@ -401,11 +401,8 @@ class TestSegmentServiceMutations:
):
mock_redis.lock.return_value = _make_lock_context()
max_position_query = MagicMock()
max_position_query.where.return_value.scalar.return_value = 2
refresh_query = MagicMock()
refresh_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [max_position_query, refresh_query]
mock_db.session.scalar.return_value = 2
mock_db.session.get.return_value = refreshed_segment
def add_side_effect(obj):
if obj.__class__.__name__ == "DocumentSegment" and getattr(obj, "id", None) is None:
@@ -461,7 +458,7 @@ class TestSegmentServiceMutations:
):
mock_redis.lock.return_value = _make_lock_context()
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
mock_db.session.query.return_value.where.return_value.scalar.return_value = 1
mock_db.session.scalar.return_value = 1
vector_service.create_segments_vector.side_effect = RuntimeError("vector failed")
result = SegmentService.multi_create_segment(segments, document, dataset)
@@ -538,7 +535,7 @@ class TestSegmentServiceMutations:
patch("services.dataset_service.VectorService") as vector_service,
):
mock_redis.get.return_value = None
mock_db.session.query.return_value.where.return_value.first.return_value = refreshed_segment
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(args, segment, document, dataset)
@@ -574,13 +571,10 @@ class TestSegmentServiceMutations:
mock_redis.get.return_value = None
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model_instance
processing_rule_query = MagicMock()
processing_rule_query.where.return_value.first.return_value = processing_rule
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
refreshed_query = MagicMock()
refreshed_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [processing_rule_query, summary_query, refreshed_query]
# get calls: processing_rule, then refreshed_segment
mock_db.session.get.side_effect = [processing_rule, refreshed_segment]
# scalar call: existing_summary
mock_db.session.scalar.return_value = existing_summary
result = SegmentService.update_segment(args, segment, document, dataset)
@@ -621,11 +615,8 @@ class TestSegmentServiceMutations:
mock_redis.get.return_value = None
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
refreshed_query = MagicMock()
refreshed_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [summary_query, refreshed_query]
mock_db.session.scalar.return_value = existing_summary
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(args, segment, document, dataset)
@@ -664,11 +655,8 @@ class TestSegmentServiceMutations:
mock_redis.get.return_value = None
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
refreshed_query = MagicMock()
refreshed_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [summary_query, refreshed_query]
mock_db.session.scalar.return_value = existing_summary
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(args, segment, document, dataset)
@@ -688,7 +676,7 @@ class TestSegmentServiceMutations:
patch("services.dataset_service.delete_segment_from_index_task") as delete_task,
):
mock_redis.get.return_value = None
mock_db.session.query.return_value.where.return_value.all.return_value = [("child-1",), ("child-2",)]
mock_db.session.scalars.return_value.all.return_value = ["child-1", "child-2"]
SegmentService.delete_segment(segment, document, dataset)
@@ -727,15 +715,15 @@ class TestSegmentServiceMutations:
patch("services.dataset_service.delete_segment_from_index_task") as delete_task,
):
segments_query = MagicMock()
segments_query.with_entities.return_value.where.return_value.all.return_value = [
# execute().all() for segments_info (multi-column)
execute_result = MagicMock()
execute_result.all.return_value = [
("node-1", "segment-1", 2),
("node-2", "segment-2", 5),
]
child_query = MagicMock()
child_query.where.return_value.all.return_value = [("child-1",)]
delete_query = MagicMock()
delete_query.where.return_value.delete.return_value = 2
mock_db.session.query.side_effect = [segments_query, child_query, delete_query]
mock_db.session.execute.return_value = execute_result
# scalars() for child_node_ids
mock_db.session.scalars.return_value.all.return_value = ["child-1"]
SegmentService.delete_segments(["segment-1", "segment-2"], document, dataset)
@@ -748,7 +736,6 @@ class TestSegmentServiceMutations:
["segment-1", "segment-2"],
["child-1"],
)
delete_query.where.return_value.delete.assert_called_once()
mock_db.session.commit.assert_called_once()
def test_update_segments_status_enables_only_segments_without_indexing_cache(self):
@@ -868,7 +855,7 @@ class TestSegmentServiceAdditionalRegenerationBranches:
patch("services.dataset_service.VectorService") as vector_service,
):
mock_redis.get.return_value = None
mock_db.session.query.return_value.where.return_value.first.return_value = refreshed_segment
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(
SegmentUpdateArgs(content="question", answer="new answer"),
@@ -902,11 +889,8 @@ class TestSegmentServiceAdditionalRegenerationBranches:
):
mock_redis.get.return_value = None
model_manager_cls.for_tenant.return_value.get_model_instance.return_value = embedding_model
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = None
refreshed_query = MagicMock()
refreshed_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [summary_query, refreshed_query]
mock_db.session.scalar.return_value = None
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(
SegmentUpdateArgs(content="new question", answer="new answer", keywords=["kw-1"]),
@@ -951,13 +935,10 @@ class TestSegmentServiceAdditionalRegenerationBranches:
model_manager_cls.for_tenant.return_value.get_default_model_instance.return_value = embedding_model_instance
update_summary.side_effect = RuntimeError("summary failed")
processing_rule_query = MagicMock()
processing_rule_query.where.return_value.first.return_value = processing_rule
summary_query = MagicMock()
summary_query.where.return_value.first.return_value = existing_summary
refreshed_query = MagicMock()
refreshed_query.where.return_value.first.return_value = refreshed_segment
mock_db.session.query.side_effect = [processing_rule_query, summary_query, refreshed_query]
# get calls: processing_rule, then refreshed_segment
mock_db.session.get.side_effect = [processing_rule, refreshed_segment]
# scalar call: existing_summary
mock_db.session.scalar.return_value = existing_summary
result = SegmentService.update_segment(
SegmentUpdateArgs(content="new parent content", regenerate_child_chunks=True, summary="new summary"),
@@ -1000,7 +981,7 @@ class TestSegmentServiceAdditionalRegenerationBranches:
patch("services.dataset_service.VectorService") as vector_service,
):
mock_redis.get.return_value = None
mock_db.session.query.return_value.where.return_value.first.return_value = refreshed_segment
mock_db.session.get.return_value = refreshed_segment
result = SegmentService.update_segment(
SegmentUpdateArgs(content="same content", regenerate_child_chunks=True),

View File

@@ -57,6 +57,10 @@ class TestDatasourceProviderService:
q.count.return_value = 0
q.delete.return_value = 1
# Default values for select()-style calls (tests override per-case)
sess.scalar.return_value = None
sess.scalars.return_value.all.return_value = []
mock_cls.return_value.__enter__.return_value = sess
mock_cls.return_value.no_autoflush.__enter__.return_value = sess
@@ -183,11 +187,11 @@ class TestDatasourceProviderService:
# -----------------------------------------------------------------------
def test_should_return_true_when_tenant_oauth_params_enabled(self, service, mock_db_session):
mock_db_session.query().count.return_value = 1
mock_db_session.scalar.return_value = 1
assert service.is_tenant_oauth_params_enabled("t1", make_id()) is True
def test_should_return_false_when_tenant_oauth_params_disabled(self, service, mock_db_session):
mock_db_session.query().count.return_value = 0
mock_db_session.scalar.return_value = 0
assert service.is_tenant_oauth_params_enabled("t1", make_id()) is False
# -----------------------------------------------------------------------
@@ -401,7 +405,7 @@ class TestDatasourceProviderService:
def test_should_return_masked_credentials_when_mask_is_true(self, service, mock_db_session):
tenant_params = MagicMock()
tenant_params.client_params = {"k": "v"}
mock_db_session.query().first.return_value = tenant_params
mock_db_session.scalar.return_value = tenant_params
with patch.object(service, "get_oauth_encrypter", return_value=(self._enc, None)):
result = service.get_tenant_oauth_client("t1", make_id(), mask=True)
assert result == {"k": "mask"}
@@ -409,13 +413,13 @@ class TestDatasourceProviderService:
def test_should_return_decrypted_credentials_when_mask_is_false(self, service, mock_db_session):
tenant_params = MagicMock()
tenant_params.client_params = {"k": "v"}
mock_db_session.query().first.return_value = tenant_params
mock_db_session.scalar.return_value = tenant_params
with patch.object(service, "get_oauth_encrypter", return_value=(self._enc, None)):
result = service.get_tenant_oauth_client("t1", make_id(), mask=False)
assert result == {"k": "dec"}
def test_should_return_none_when_no_tenant_oauth_config_exists(self, service, mock_db_session):
mock_db_session.query().first.return_value = None
mock_db_session.scalar.return_value = None
assert service.get_tenant_oauth_client("t1", make_id()) is None
# -----------------------------------------------------------------------
@@ -616,7 +620,7 @@ class TestDatasourceProviderService:
# -----------------------------------------------------------------------
def test_should_return_empty_list_when_no_credentials_stored(self, service, mock_db_session):
mock_db_session.query().all.return_value = []
mock_db_session.scalars.return_value.all.return_value = []
assert service.list_datasource_credentials("t1", "prov", "org/plug") == []
def test_should_return_masked_credentials_list_when_credentials_exist(self, service, mock_db_session):
@@ -624,7 +628,7 @@ class TestDatasourceProviderService:
p.auth_type = "api_key"
p.encrypted_credentials = {"sk": "v"}
p.is_default = False
mock_db_session.query().all.return_value = [p]
mock_db_session.scalars.return_value.all.return_value = [p]
with patch.object(service, "extract_secret_variables", return_value=["sk"]):
result = service.list_datasource_credentials("t1", "prov", "org/plug")
assert len(result) == 1
@@ -676,14 +680,14 @@ class TestDatasourceProviderService:
# -----------------------------------------------------------------------
def test_should_return_empty_list_when_no_real_credentials_exist(self, service, mock_db_session):
mock_db_session.query().all.return_value = []
mock_db_session.scalars.return_value.all.return_value = []
assert service.get_real_datasource_credentials("t1", "prov", "org/plug") == []
def test_should_return_decrypted_credential_list_when_credentials_exist(self, service, mock_db_session):
p = MagicMock(spec=DatasourceProvider)
p.auth_type = "api_key"
p.encrypted_credentials = {"sk": "v"}
mock_db_session.query().all.return_value = [p]
mock_db_session.scalars.return_value.all.return_value = [p]
with patch.object(service, "extract_secret_variables", return_value=["sk"]):
result = service.get_real_datasource_credentials("t1", "prov", "org/plug")
assert len(result) == 1
@@ -751,13 +755,13 @@ class TestDatasourceProviderService:
def test_should_delete_provider_and_commit_when_found(self, service, mock_db_session):
p = MagicMock(spec=DatasourceProvider)
mock_db_session.query().first.return_value = p
mock_db_session.scalar.return_value = p
service.remove_datasource_credentials("t1", "id", "prov", "org/plug")
mock_db_session.delete.assert_called_once_with(p)
mock_db_session.commit.assert_called_once()
def test_should_do_nothing_when_credential_not_found_on_remove(self, service, mock_db_session):
"""No error raised; no delete called when record doesn't exist (lines 994 branch)."""
mock_db_session.query().first.return_value = None
mock_db_session.scalar.return_value = None
service.remove_datasource_credentials("t1", "id", "prov", "org/plug")
mock_db_session.delete.assert_not_called()

389
api/uv.lock generated
View File

@@ -143,16 +143,16 @@ sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf7
[[package]]
name = "alibabacloud-gpdb20160503"
version = "5.1.0"
version = "5.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-credentials" },
{ name = "alibabacloud-tea-openapi" },
{ name = "darabonba-core" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b3/36/69333c7fb7fb5267f338371b14fdd8dbdd503717c97bbc7a6419d155ab4c/alibabacloud_gpdb20160503-5.1.0.tar.gz", hash = "sha256:086ec6d5e39b64f54d0e44bb3fd4fde1a4822a53eb9f6ff7464dff7d19b07b63", size = 295641, upload-time = "2026-03-19T10:09:02.444Z" }
sdist = { url = "https://files.pythonhosted.org/packages/95/ba/606601479707f90138be38493b7b4d8457da10bbc58e84cd000108468a44/alibabacloud_gpdb20160503-5.2.0.tar.gz", hash = "sha256:d8f41bfcdc189f9d0283a87df2c3fa26a27617bc2d604652c7763bf9dd3ba22d", size = 299202, upload-time = "2026-04-02T19:27:25.639Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/7f/a91a2f9ad97c92fa9a6981587ea0ff789240cea05b17b17b7c244e5bac64/alibabacloud_gpdb20160503-5.1.0-py3-none-any.whl", hash = "sha256:580e4579285a54c7f04570782e0f60423a1997568684187fe88e4110acfb640e", size = 848784, upload-time = "2026-03-19T10:09:00.72Z" },
{ url = "https://files.pythonhosted.org/packages/8f/a3/eee56773d22b8ee4039f2a4754bcf957631302d2e59e5b110cdd768e25ac/alibabacloud_gpdb20160503-5.2.0-py3-none-any.whl", hash = "sha256:b2bad9d2f7e0247985120c25f6cd42e75447fb9157dff817f64eae1734abcbd7", size = 857108, upload-time = "2026-04-02T19:27:24.446Z" },
]
[[package]]
@@ -425,19 +425,19 @@ wheels = [
[[package]]
name = "basedpyright"
version = "1.38.4"
version = "1.39.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "nodejs-wheel-binaries" },
]
sdist = { url = "https://files.pythonhosted.org/packages/08/b4/26cb812eaf8ab56909c792c005fe1690706aef6f21d61107639e46e9c54c/basedpyright-1.38.4.tar.gz", hash = "sha256:8e7d4f37ffb6106621e06b9355025009cdf5b48f71c592432dd2dd304bf55e70", size = 25354730, upload-time = "2026-03-25T13:50:44.353Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ac/f4/4a77cc1ffb3dab7391642cde30163961d8ee973e9e6b6740c7d15aa3d3ba/basedpyright-1.39.0.tar.gz", hash = "sha256:6666f51c378c7ac45877c4c1c7041ee0b5b83d755ebc82f898f47b6fafe0cc4f", size = 25357403, upload-time = "2026-04-01T12:27:41.92Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/62/0b/3f95fd47def42479e61077523d3752086d5c12009192a7f1c9fd5507e687/basedpyright-1.38.4-py3-none-any.whl", hash = "sha256:90aa067cf3e8a3c17ad5836a72b9e1f046bc72a4ad57d928473d9368c9cd07a2", size = 12352258, upload-time = "2026-03-25T13:50:41.059Z" },
{ url = "https://files.pythonhosted.org/packages/97/47/08145d1bcc3083ed20059bdecbde404bd767f91b91e2764ec01cffec9f4b/basedpyright-1.39.0-py3-none-any.whl", hash = "sha256:91b8ad50bc85ee4a985b928f9368c35c99eee5a56c44e99b2442fa12ecc3d670", size = 12353868, upload-time = "2026-04-01T12:27:38.495Z" },
]
[[package]]
name = "bce-python-sdk"
version = "0.9.67"
version = "0.9.68"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "crc32c" },
@@ -445,9 +445,9 @@ dependencies = [
{ name = "pycryptodome" },
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b5/b9/5140cc02832fe3a7394c52949796d43f8c1f635aa016100f857f504e0348/bce_python_sdk-0.9.67.tar.gz", hash = "sha256:2c673d757c5c8952f1be6611da4ab77a63ecabaa3ff22b11531f46845ac99e58", size = 295251, upload-time = "2026-03-24T14:10:07.086Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/7c/8b4d9128e571f898f9f177dc9f41e31692d8ddb08a963b0c576f219d1304/bce_python_sdk-0.9.68.tar.gz", hash = "sha256:adf182868ed25e53cc3c1573dad9a2b1e9b72ed1ffd0d3ef326f5fa93da7cfa6", size = 296349, upload-time = "2026-03-30T02:57:32.948Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d4/a9/a58a63e2756e5d01901595af58c673f68de7621f28d71007479e00f45a6c/bce_python_sdk-0.9.67-py3-none-any.whl", hash = "sha256:3054879d098a92ceeb4b9ac1e64d2c658120a5a10e8e630f22410564b2170bf0", size = 410854, upload-time = "2026-03-24T14:09:54.29Z" },
{ url = "https://files.pythonhosted.org/packages/fa/4e/eaaba9264667d675c3de76485dc511f0f233c31bada8752411f7fc5170be/bce_python_sdk-0.9.68-py3-none-any.whl", hash = "sha256:fcb484db4a54aa2c4675834c10bc6c37d42929fd138faaf6c01f933d8fa927ed", size = 411932, upload-time = "2026-03-30T02:57:27.847Z" },
]
[[package]]
@@ -551,29 +551,29 @@ wheels = [
[[package]]
name = "boto3"
version = "1.42.78"
version = "1.42.83"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a8/2b/ebdad075934cf6bb78bf81fe31d83339bcd804ad6c856f7341376cbc88b6/boto3-1.42.78.tar.gz", hash = "sha256:cef2ebdb9be5c0e96822f8d3941ac4b816c90a5737a7ffb901d664c808964b63", size = 112789, upload-time = "2026-03-27T19:28:07.58Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9f/87/1ed88eaa1e814841a37e71fee74c2b74341d14b791c0c6038b7ba914bea1/boto3-1.42.83.tar.gz", hash = "sha256:cc5621e603982cb3145b7f6c9970e02e297a1a0eb94637cc7f7b69d3017640ee", size = 112719, upload-time = "2026-04-03T19:34:21.254Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/57/bb/1f6dade1f1e86858bef7bd332bc8106c445f2dbabec7b32ab5d7d118c9b6/boto3-1.42.78-py3-none-any.whl", hash = "sha256:480a34a077484a5ca60124dfd150ba3ea6517fc89963a679e45b30c6db614d26", size = 140556, upload-time = "2026-03-27T19:28:06.125Z" },
{ url = "https://files.pythonhosted.org/packages/c1/b1/8a066bc8f02937d49783c0b3948ab951d8284e6fde436cab9f359dbd4d93/boto3-1.42.83-py3-none-any.whl", hash = "sha256:544846fdb10585bb7837e409868e8e04c6b372fa04479ba1597ce82cf1242076", size = 140555, upload-time = "2026-04-03T19:34:17.935Z" },
]
[[package]]
name = "boto3-stubs"
version = "1.42.78"
version = "1.42.83"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore-stubs" },
{ name = "types-s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/03/16/4bdb3c1f69bf7b97dd8b22fe5b007e9da67ba3f00ed10e47146f5fd9d0ff/boto3_stubs-1.42.78.tar.gz", hash = "sha256:423335b8ce9a935e404054978589cdb98d9fa1d4bd46073d6821bf1c3fad8ca7", size = 101602, upload-time = "2026-03-27T19:35:51.149Z" }
sdist = { url = "https://files.pythonhosted.org/packages/2d/fe/6c43a048074d8567db38befe51bf0b770e8456aa2b91ce8fe6758f29ec3d/boto3_stubs-1.42.83.tar.gz", hash = "sha256:1ecbd88f4ae35764b9ea3579ca1e851b67ea0a73a442cb406de277fc1478daeb", size = 102188, upload-time = "2026-04-03T19:54:20.613Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/22/d5/bdedd4951c795899ac5a1f0b88d81b9e2c6333cb87457f2edd11ef3b7b7b/boto3_stubs-1.42.78-py3-none-any.whl", hash = "sha256:6ed07e734174751da8d01031d9ede8d81a88e4338d9e6b00ce7a6bc870075372", size = 70161, upload-time = "2026-03-27T19:35:46.336Z" },
{ url = "https://files.pythonhosted.org/packages/9c/4d/eee0444fd466ebe69fdb61cc1f24b97d8e21e9e545865f7c1d846294a413/boto3_stubs-1.42.83-py3-none-any.whl", hash = "sha256:06185ca5f11a1edc880286f5f33779a2b08be356bf270bf1ec128d0819782a20", size = 70448, upload-time = "2026-04-03T19:54:16.315Z" },
]
[package.optional-dependencies]
@@ -583,16 +583,16 @@ bedrock-runtime = [
[[package]]
name = "botocore"
version = "1.42.78"
version = "1.42.83"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/67/8e/cdb34c8ca71216d214e049ada2148ee08bcda12b1ac72af3a720dea300ff/botocore-1.42.78.tar.gz", hash = "sha256:61cbd49728e23f68cfd945406ab40044d49abed143362f7ffa4a4f4bd4311791", size = 15023592, upload-time = "2026-03-27T19:27:57.122Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4e/01/b46a3f8b6e9362258f78f1890db1a96d4ed73214d6a36420dc158dcfd221/botocore-1.42.83.tar.gz", hash = "sha256:34bc8cb64b17ac17f8901f073fe4fc9572a5cac9393a37b2b3ea372a83b87f4a", size = 15140337, upload-time = "2026-04-03T19:34:08.779Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/72/94bba1a375d45c685b00e051b56142359547837086a83861d76f6aec26f4/botocore-1.42.78-py3-none-any.whl", hash = "sha256:038ab63c7f898e8b5db58cb6a45e4da56c31dd984e7e995839a3540c735564ea", size = 14701729, upload-time = "2026-03-27T19:27:54.05Z" },
{ url = "https://files.pythonhosted.org/packages/a3/97/0d6f50822dc8c1df7f3eadb0bc6822fc0f98f02287c4efc7c7c88fde129a/botocore-1.42.83-py3-none-any.whl", hash = "sha256:ec0c3ecb3772936ed22a3bdda09883b34858933f71004686d460d829bab39d8e", size = 14818388, upload-time = "2026-04-03T19:34:03.333Z" },
]
[[package]]
@@ -927,7 +927,7 @@ wheels = [
[[package]]
name = "clickhouse-connect"
version = "0.15.0"
version = "0.15.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -936,16 +936,16 @@ dependencies = [
{ name = "urllib3" },
{ name = "zstandard" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ec/59/c0b0a2c2e4c204e5baeca4917a95cc95add651da3cec86ec464a8e54cfa0/clickhouse_connect-0.15.0.tar.gz", hash = "sha256:529fcf072df335d18ae16339d99389190f4bd543067dcdc174541c7a9c622ef5", size = 126344, upload-time = "2026-03-26T18:34:52.316Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a5/b1/a17eb4409e2741286ccdac06b6ea15db178cdf1f0ed997bbf9ad3448f78e/clickhouse_connect-0.15.1.tar.gz", hash = "sha256:f2aaf5fc0bb3098c24f0d8ca7e4ecbe605a26957481dfca2c8cef9d1fad7b7ca", size = 126840, upload-time = "2026-03-30T18:58:31.113Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f5/be/86e149c60822caed29e4435acac4fc73e20fddfb0b56ea6452bc7a08ab10/clickhouse_connect-0.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d51f49694e9007564bfd8dac51a1f9e60b94d6c93a07eb4027113a2e62bbb384", size = 286680, upload-time = "2026-03-26T18:33:30.219Z" },
{ url = "https://files.pythonhosted.org/packages/aa/65/c38cc5028afa2ccd9e8ff65611434063c0c5c1b6edadc507dbbc80a09bfd/clickhouse_connect-0.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a48fbad9ebc2b6d1cd01d1f9b5d6740081f1c84f1aacc9f91651be949f6b6ed", size = 277579, upload-time = "2026-03-26T18:33:31.474Z" },
{ url = "https://files.pythonhosted.org/packages/0a/ef/c8b2ef597fefd04e8b7c017c991552162cb89b7cb73bfdd6225b1c79e2fe/clickhouse_connect-0.15.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36e1ae470b94cc56d270461c8626c8fd4dac16e6c1ffa8477f21c012462e22cf", size = 1121630, upload-time = "2026-03-26T18:33:32.983Z" },
{ url = "https://files.pythonhosted.org/packages/de/f7/1b71819e825d44582c014a489618170b03ccdac3c9b710dfd56445f1c017/clickhouse_connect-0.15.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fa97f0ae8eb069a451d8577342dffeef5dc308a0eac7dba1809008c761e720c7", size = 1137988, upload-time = "2026-03-26T18:33:34.585Z" },
{ url = "https://files.pythonhosted.org/packages/7f/1f/41002b8d5ff146dc2835dc6b6f690bc361bd9a94b6195872abcb922f3788/clickhouse_connect-0.15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5b3baf70009174a4df9c8356c96d03e1c2dbf0d8b29f1b3270a641a59399b61", size = 1101376, upload-time = "2026-03-26T18:33:36.258Z" },
{ url = "https://files.pythonhosted.org/packages/2c/8a/bd090dab73fc9c47efcaaeb152a77610b9d233cd88ea73cf4535f9bac2a6/clickhouse_connect-0.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:af3fba93fd2efa8f856f3a88a6a710e06005fa48b6b6b0f116d462a4021957e2", size = 1133211, upload-time = "2026-03-26T18:33:38.003Z" },
{ url = "https://files.pythonhosted.org/packages/f1/8d/cf4eee7225bdee85a9b8a88c5bfff42ce48f37ee9277930ac8bc76f47126/clickhouse_connect-0.15.0-cp312-cp312-win32.whl", hash = "sha256:86ca76f8acaf7f3f6530e3e4139e174d54c4674910c69f4277d1b9cdf7c1cc98", size = 256767, upload-time = "2026-03-26T18:33:39.55Z" },
{ url = "https://files.pythonhosted.org/packages/26/6e/f5a2cb1e4624dfd77c1e226239360a9e3690db8056a0027bda2ab87d0085/clickhouse_connect-0.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a471d9a9cf06f0a4e90784547b6a2acb066b0d8642dfea9866960c4bdde6959", size = 275404, upload-time = "2026-03-26T18:33:40.885Z" },
{ url = "https://files.pythonhosted.org/packages/d1/b6/d0881ac34617b13ad555a4749aae042e0242bedbf8a258373719089885cd/clickhouse_connect-0.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0bef871fb9803ae82b4dc1f797b6e784de0a4dec351591191a0c1a6008548284", size = 287187, upload-time = "2026-03-30T18:57:18.962Z" },
{ url = "https://files.pythonhosted.org/packages/d6/6e/27823c38e54247ea22d96b3f4fde32831a10e5203761c0e2893bc2fc587f/clickhouse_connect-0.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:df93fa024d6ed46dbc3182b6202180be4cf2bbe9c331dcb21f85963b1b3fd1e5", size = 278086, upload-time = "2026-03-30T18:57:20.104Z" },
{ url = "https://files.pythonhosted.org/packages/6a/88/f1096e8b4f08e628674490e5d186c7bf09174bbbc5fefa530e28e6b39da3/clickhouse_connect-0.15.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6e98c0cf53db3b24dc0ff9f522fcf13205b1d191c632567d1744fbd4671741f", size = 1122144, upload-time = "2026-03-30T18:57:21.205Z" },
{ url = "https://files.pythonhosted.org/packages/af/e5/027f8b94b54a39dcdf9b314a7cd66cb882d8ba166efc584908997c6d5acb/clickhouse_connect-0.15.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bf70933ab860bd2f0a872db624603706bed400c915c7aeef382956cf8ebbdf3", size = 1138503, upload-time = "2026-03-30T18:57:22.554Z" },
{ url = "https://files.pythonhosted.org/packages/cb/46/a830bcb46f0081630a88cb932c29804553728645c17fd1cff874fe71b1ba/clickhouse_connect-0.15.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:60aa8c9c775d22db324260265f4c656f803fbc71de9193ef83cf8d8d0ef6ab9a", size = 1101890, upload-time = "2026-03-30T18:57:23.788Z" },
{ url = "https://files.pythonhosted.org/packages/4c/05/91cf7cc817ff91bc96f1e2afc84346b42e88831c9c0a7fd56e78907b5320/clickhouse_connect-0.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5462bad97d97919a4ed230e2ef28d0b76bec0354a343218647830aac7744a43b", size = 1133723, upload-time = "2026-03-30T18:57:25.105Z" },
{ url = "https://files.pythonhosted.org/packages/d7/b0/e7a71b96b7bc1df6bbacf9fa71f0cc3b8f195f58386535b72aa92304b1fb/clickhouse_connect-0.15.1-cp312-cp312-win32.whl", hash = "sha256:e1a157205efd47884c22bfe061fc6f8c9aea844929ee755c47b446093805d21a", size = 257279, upload-time = "2026-03-30T18:57:26.288Z" },
{ url = "https://files.pythonhosted.org/packages/b9/03/0ef116ef0efc6861d6e9674419709b9873603f330f95853220a145748576/clickhouse_connect-0.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:5de299ada0f7eb9090bb5a6304d8d78163d4d9cc8eb04d8f552bfb82bafb61d5", size = 275916, upload-time = "2026-03-30T18:57:27.372Z" },
]
[[package]]
@@ -1051,16 +1051,19 @@ wheels = [
[[package]]
name = "couchbase"
version = "4.5.0"
version = "4.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/2f/8f92e743a91c2f4e2ebad0bcfc31ef386c817c64415d89bf44e64dde227a/couchbase-4.5.0.tar.gz", hash = "sha256:fb74386ea5e807ae12cfa294fa6740fe6be3ecaf3bb9ce4fb9ea73706ed05982", size = 6562752, upload-time = "2025-09-30T01:27:37.423Z" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8d/be/1e6974158348dfa634ebbc32b76448f84945e15494852e0cea85607825b5/couchbase-4.6.0.tar.gz", hash = "sha256:61229d6112597f35f6aca687c255e12f495bde9051cd36063b4fddd532ab8f7f", size = 6697937, upload-time = "2026-03-31T23:29:50.602Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/bc/3b00403edd8b188a93f48b8231dbf7faf7b40d318d3e73bb0e68c4965bbd/couchbase-4.5.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:be1ac2bf7cbccf28eebd7fa8b1d7199fbe84c96b0f7f2c0d69963b1d6ce53985", size = 5128307, upload-time = "2025-09-30T01:25:53.615Z" },
{ url = "https://files.pythonhosted.org/packages/7f/52/2ccfa8c8650cc341813713a47eeeb8ad13a25e25b0f4747d224106602a24/couchbase-4.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:035c394d38297c484bd57fc92b27f6a571a36ab5675b4ec873fd15bf65e8f28e", size = 4326149, upload-time = "2025-09-30T01:25:57.524Z" },
{ url = "https://files.pythonhosted.org/packages/32/80/fe3f074f321474c824ec67b97c5c4aa99047d45c777bb29353f9397c6604/couchbase-4.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:117685f6827abbc332e151625b0a9890c2fafe0d3c3d9e564b903d5c411abe5d", size = 5184623, upload-time = "2025-09-30T01:26:02.166Z" },
{ url = "https://files.pythonhosted.org/packages/f3/e5/86381f49e4cf1c6db23c397b6a32b532cd4df7b9975b0cd2da3db2ffe269/couchbase-4.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:632a918f81a7373832991b79b6ab429e56ef4ff68dfb3517af03f0e2be7e3e4f", size = 5446579, upload-time = "2025-09-30T01:26:09.39Z" },
{ url = "https://files.pythonhosted.org/packages/c8/85/a68d04233a279e419062ceb1c6866b61852c016d1854cd09cde7f00bc53c/couchbase-4.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:67fc0fd1a4535b5be093f834116a70fb6609085399e6b63539241b919da737b7", size = 6104619, upload-time = "2025-09-30T01:26:15.525Z" },
{ url = "https://files.pythonhosted.org/packages/56/8c/0511bac5dd2d998aeabcfba6a2804ecd9eb3d83f9d21cc3293a56fbc70a8/couchbase-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:02199b4528f3106c231c00aaf85b7cc6723accbc654b903bb2027f78a04d12f4", size = 4274424, upload-time = "2025-09-30T01:26:21.484Z" },
{ url = "https://files.pythonhosted.org/packages/84/dc/bea38235bfabd4fcf3d11e05955e38311869f173328475c369199a6b076b/couchbase-4.6.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:8d1244fd0581cc23aaf2fa3148e9c2d8cfba1d5489c123ee6bf975624d861f7a", size = 5521692, upload-time = "2026-03-31T23:29:07.933Z" },
{ url = "https://files.pythonhosted.org/packages/d1/18/cd1c751005cb67d3e2b090cd11626b8922b9d6a882516e57c1a3aedeed18/couchbase-4.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8efa57a86e35ceb7ae249cfa192e3f2c32a4a5b37098830196d3936994d55a67", size = 4667116, upload-time = "2026-03-31T23:29:10.706Z" },
{ url = "https://files.pythonhosted.org/packages/64/e9/1212bd59347e1cecdb02c6735704650e25f9195b634bf8df73d3382ffa14/couchbase-4.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7106e334acdacab64ae3530a181b8fabf0a1b91e7a1a1e41e259f995bdc78330", size = 5511873, upload-time = "2026-03-31T23:29:13.414Z" },
{ url = "https://files.pythonhosted.org/packages/86/a3/f676ee10f8ea2370700c1c4d03cbe8c3064a3e0cf887941a39333f3bdd97/couchbase-4.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c84e625f3e2ac895fafd2053fa50af2fbb63ab3cdd812eff2bc4171d9f934bde", size = 5782875, upload-time = "2026-03-31T23:29:16.258Z" },
{ url = "https://files.pythonhosted.org/packages/c5/34/45d167bc18d5d91b9ff95dcd4e24df60d424567611d48191a29bf19fdbc8/couchbase-4.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2619c966b308948900e51f1e4e1488e09ad50b119b1d5c31b697870aa82a6ce", size = 7234591, upload-time = "2026-03-31T23:29:19.148Z" },
{ url = "https://files.pythonhosted.org/packages/41/1f/cc4d1503463cf243959532424a30e79f34aadafde5bcb21754b19b2b9dde/couchbase-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f64a017416958f10a07312a6d39c9b362827854de173fdef9bffdac71c8f3345", size = 4517477, upload-time = "2026-03-31T23:29:21.955Z" },
]
[[package]]
@@ -1473,7 +1476,7 @@ requires-dist = [
{ name = "azure-identity", specifier = "==1.25.3" },
{ name = "beautifulsoup4", specifier = "==4.14.3" },
{ name = "bleach", specifier = "~=6.3.0" },
{ name = "boto3", specifier = "==1.42.78" },
{ name = "boto3", specifier = "==1.42.83" },
{ name = "bs4", specifier = "~=0.0.1" },
{ name = "cachetools", specifier = "~=5.3.0" },
{ name = "celery", specifier = "~=5.6.2" },
@@ -1481,7 +1484,7 @@ requires-dist = [
{ name = "croniter", specifier = ">=6.0.0" },
{ name = "fastopenapi", extras = ["flask"], specifier = ">=0.7.0" },
{ name = "flask", specifier = "~=3.1.2" },
{ name = "flask-compress", specifier = ">=1.17,<1.24" },
{ name = "flask-compress", specifier = ">=1.17,<1.25" },
{ name = "flask-cors", specifier = "~=6.0.0" },
{ name = "flask-login", specifier = "~=0.6.3" },
{ name = "flask-migrate", specifier = "~=4.1.0" },
@@ -1493,7 +1496,7 @@ requires-dist = [
{ name = "google-api-core", specifier = ">=2.19.1" },
{ name = "google-api-python-client", specifier = "==2.193.0" },
{ name = "google-auth", specifier = ">=2.47.0" },
{ name = "google-auth-httplib2", specifier = "==0.3.0" },
{ name = "google-auth-httplib2", specifier = "==0.3.1" },
{ name = "google-cloud-aiplatform", specifier = ">=1.123.0" },
{ name = "googleapis-common-protos", specifier = ">=1.65.0" },
{ name = "graphon", specifier = ">=0.1.2" },
@@ -1558,16 +1561,16 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
{ name = "basedpyright", specifier = "~=1.38.2" },
{ name = "basedpyright", specifier = "~=1.39.0" },
{ name = "boto3-stubs", specifier = ">=1.38.20" },
{ name = "celery-types", specifier = ">=0.23.0" },
{ name = "coverage", specifier = "~=7.13.4" },
{ name = "dotenv-linter", specifier = "~=0.7.0" },
{ name = "faker", specifier = "~=40.11.0" },
{ name = "faker", specifier = "~=40.12.0" },
{ name = "hypothesis", specifier = ">=6.131.15" },
{ name = "import-linter", specifier = ">=2.3" },
{ name = "lxml-stubs", specifier = "~=0.5.1" },
{ name = "mypy", specifier = "~=1.19.1" },
{ name = "mypy", specifier = "~=1.20.0" },
{ name = "pandas-stubs", specifier = "~=3.0.0" },
{ name = "pyrefly", specifier = ">=0.59.1" },
{ name = "pytest", specifier = "~=9.0.2" },
@@ -1601,10 +1604,10 @@ dev = [
{ name = "types-olefile", specifier = "~=0.47.0" },
{ name = "types-openpyxl", specifier = "~=3.1.5" },
{ name = "types-pexpect", specifier = "~=4.9.0" },
{ name = "types-protobuf", specifier = "~=6.32.1" },
{ name = "types-protobuf", specifier = "~=7.34.1" },
{ name = "types-psutil", specifier = "~=7.2.2" },
{ name = "types-psycopg2", specifier = "~=2.9.21" },
{ name = "types-pygments", specifier = "~=2.19.0" },
{ name = "types-pygments", specifier = "~=2.20.0" },
{ name = "types-pymysql", specifier = "~=1.1.0" },
{ name = "types-pyopenssl", specifier = ">=24.1.0" },
{ name = "types-python-dateutil", specifier = "~=2.9.0" },
@@ -1612,7 +1615,7 @@ dev = [
{ name = "types-pywin32", specifier = "~=311.0.0" },
{ name = "types-pyyaml", specifier = "~=6.0.12" },
{ name = "types-redis", specifier = ">=4.6.0.20241004" },
{ name = "types-regex", specifier = "~=2026.3.32" },
{ name = "types-regex", specifier = "~=2026.4.4" },
{ name = "types-setuptools", specifier = ">=80.9.0" },
{ name = "types-shapely", specifier = "~=2.1.0" },
{ name = "types-simplejson", specifier = ">=3.20.0" },
@@ -1637,12 +1640,12 @@ tools = [
{ name = "nltk", specifier = "~=3.9.1" },
]
vdb = [
{ name = "alibabacloud-gpdb20160503", specifier = "~=5.1.0" },
{ name = "alibabacloud-gpdb20160503", specifier = "~=5.2.0" },
{ name = "alibabacloud-tea-openapi", specifier = "~=0.4.3" },
{ name = "chromadb", specifier = "==0.5.20" },
{ name = "clickhouse-connect", specifier = "~=0.15.0" },
{ name = "clickzetta-connector-python", specifier = ">=0.8.102" },
{ name = "couchbase", specifier = "~=4.5.0" },
{ name = "couchbase", specifier = "~=4.6.0" },
{ name = "elasticsearch", specifier = "==8.14.0" },
{ name = "holo-search-sdk", specifier = ">=0.4.1" },
{ name = "intersystems-irispython", specifier = ">=5.1.0" },
@@ -1653,10 +1656,10 @@ vdb = [
{ name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" },
{ name = "pgvector", specifier = "==0.4.2" },
{ name = "pymilvus", specifier = "~=2.6.10" },
{ name = "pymochow", specifier = "==2.3.6" },
{ name = "pymochow", specifier = "==2.4.0" },
{ name = "pyobvector", specifier = "~=0.2.17" },
{ name = "qdrant-client", specifier = "==1.9.0" },
{ name = "tablestore", specifier = "==6.4.2" },
{ name = "tablestore", specifier = "==6.4.3" },
{ name = "tcvectordb", specifier = "~=2.1.0" },
{ name = "tidb-vector", specifier = "==0.0.15" },
{ name = "upstash-vector", specifier = "==0.8.0" },
@@ -1825,14 +1828,14 @@ wheels = [
[[package]]
name = "faker"
version = "40.11.1"
version = "40.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fa/e5/b16bf568a2f20fe7423282db4a4059dbcadef70e9029c1c106836f8edd84/faker-40.11.1.tar.gz", hash = "sha256:61965046e79e8cfde4337d243eac04c0d31481a7c010033141103b43f603100c", size = 1957415, upload-time = "2026-03-23T14:05:50.233Z" }
sdist = { url = "https://files.pythonhosted.org/packages/66/c1/f8224fe97fea2f98d455c22438c1b09b10e14ef2cb95ae4f7cec9aa59659/faker-40.12.0.tar.gz", hash = "sha256:58b5a9054c367bd5fb2e948634105364cc570e78a98a8e5161a74691c45f158f", size = 1962003, upload-time = "2026-03-30T18:00:56.596Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/ec/3c4b78eb0d2f6a81fb8cc9286745845bff661e6815741eff7a6ac5fcc9ea/faker-40.11.1-py3-none-any.whl", hash = "sha256:3af3a213ba8fb33ce6ba2af7aef2ac91363dae35d0cec0b2b0337d189e5bee2a", size = 1989484, upload-time = "2026-03-23T14:05:48.793Z" },
{ url = "https://files.pythonhosted.org/packages/2b/5c/39452a6b6aa76ffa518fa7308e1975b37e9ba77caa6172a69d61e7180221/faker-40.12.0-py3-none-any.whl", hash = "sha256:6238a4058a8b581892e3d78fe5fdfa7568739e1c8283e4ede83f1dde0bfc1a3b", size = 1994601, upload-time = "2026-03-30T18:00:54.804Z" },
]
[[package]]
@@ -1933,7 +1936,7 @@ wheels = [
[[package]]
name = "flask-compress"
version = "1.23"
version = "1.24"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "backports-zstd" },
@@ -1941,9 +1944,9 @@ dependencies = [
{ name = "brotlicffi", marker = "platform_python_implementation == 'PyPy'" },
{ name = "flask" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5d/e4/2b54da5cf8ae5d38a495ca20154aa40d6d2ee6dc1756429a82856181aa2c/flask_compress-1.23.tar.gz", hash = "sha256:5580935b422e3f136b9a90909e4b1015ac2b29c9aebe0f8733b790fde461c545", size = 20135, upload-time = "2025-11-06T09:06:29.56Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c2/de/2ae0118051b38ab53437328074a696f3ee7d61e15bf7454b78a3088e5bc3/flask_compress-1.24.tar.gz", hash = "sha256:14097cefe59ecb3e466d52a6aeb62f34f125a9f7dadf1f33a53e430ce4a50f31", size = 21089, upload-time = "2026-03-31T15:01:39.005Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7d/9a/bebdcdba82d2786b33cd9f5fd65b8d309797c27176a9c4f357c1150c4ac0/flask_compress-1.23-py3-none-any.whl", hash = "sha256:52108afb4d133a5aab9809e6ac3c085ed7b9c788c75c6846c129faa28468f08c", size = 10515, upload-time = "2025-11-06T09:06:28.691Z" },
{ url = "https://files.pythonhosted.org/packages/4c/0f/fe51e0b2301bbd429af44273a923ff92127b18d13abba5ae5a1d60e8e497/flask_compress-1.24-py3-none-any.whl", hash = "sha256:1e63668eb6e3242bd4f6ad98825a924e3984409be90c125477893d586007d00c", size = 11033, upload-time = "2026-03-31T15:01:37.302Z" },
]
[[package]]
@@ -2157,7 +2160,7 @@ wheels = [
[[package]]
name = "google-api-core"
version = "2.30.0"
version = "2.30.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-auth" },
@@ -2166,9 +2169,9 @@ dependencies = [
{ name = "protobuf" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1a/2e/83ca41eb400eb228f9279ec14ed66f6475218b59af4c6daec2d5a509fe83/google_api_core-2.30.2.tar.gz", hash = "sha256:9a8113e1a88bdc09a7ff629707f2214d98d61c7f6ceb0ea38c42a095d02dc0f9", size = 176862, upload-time = "2026-04-02T21:23:44.876Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl", hash = "sha256:80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5", size = 173288, upload-time = "2026-02-18T20:28:10.367Z" },
{ url = "https://files.pythonhosted.org/packages/84/e1/ebd5100cbb202e561c0c8b59e485ef3bd63fa9beb610f3fdcaea443f0288/google_api_core-2.30.2-py3-none-any.whl", hash = "sha256:a4c226766d6af2580577db1f1a51bf53cd262f722b49731ce7414c43068a9594", size = 173236, upload-time = "2026-04-02T21:23:06.395Z" },
]
[package.optional-dependencies]
@@ -2213,20 +2216,20 @@ requests = [
[[package]]
name = "google-auth-httplib2"
version = "0.3.0"
version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-auth" },
{ name = "httplib2" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d5/ad/c1f2b1175096a8d04cf202ad5ea6065f108d26be6fc7215876bde4a7981d/google_auth_httplib2-0.3.0.tar.gz", hash = "sha256:177898a0175252480d5ed916aeea183c2df87c1f9c26705d74ae6b951c268b0b", size = 11134, upload-time = "2025-12-15T22:13:51.825Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ed/99/107612bef8d24b298bb5a7c8466f908ecda791d43f9466f5c3978f5b24c1/google_auth_httplib2-0.3.1.tar.gz", hash = "sha256:0af542e815784cb64159b4469aa5d71dd41069ba93effa006e1916b1dcd88e55", size = 11152, upload-time = "2026-03-30T22:50:26.766Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl", hash = "sha256:426167e5df066e3f5a0fc7ea18768c08e7296046594ce4c8c409c2457dd1f776", size = 9529, upload-time = "2025-12-15T22:13:51.048Z" },
{ url = "https://files.pythonhosted.org/packages/97/e9/93afb14d23a949acaa3f4e7cc51a0024671174e116e35f42850764b99634/google_auth_httplib2-0.3.1-py3-none-any.whl", hash = "sha256:682356a90ef4ba3d06548c37e9112eea6fc00395a11b0303a644c1a86abc275c", size = 9534, upload-time = "2026-03-30T22:49:03.384Z" },
]
[[package]]
name = "google-cloud-aiplatform"
version = "1.143.0"
version = "1.145.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docstring-parser" },
@@ -2242,9 +2245,9 @@ dependencies = [
{ name = "pydantic" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a7/08/939fb05870fdf155410a927e22f5b053d49f18e215618e102fba1d8bb147/google_cloud_aiplatform-1.143.0.tar.gz", hash = "sha256:1f0124a89795a6b473deb28724dd37d95334205df3a9c9c48d0b8d7a3d5d5cc4", size = 10215389, upload-time = "2026-03-25T18:30:15.444Z" }
sdist = { url = "https://files.pythonhosted.org/packages/26/e5/6442d9d2c019456638825d4665b1e87ec4eaf1d182950ba426d0f0210eab/google_cloud_aiplatform-1.145.0.tar.gz", hash = "sha256:7894c4f3d2684bdb60e9a122004c01678e3b585174a27298ae7a3ed1e5eaf3bd", size = 10222904, upload-time = "2026-04-02T14:06:58.322Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/90/14/16323e604e79dc63b528268f97a841c2c29dd8eb16395de6bf530c1a5ebe/google_cloud_aiplatform-1.143.0-py2.py3-none-any.whl", hash = "sha256:78df97d044859f743a9cc48b89a260d33579b0d548b1589bb3ae9f4c2afc0c5a", size = 8392705, upload-time = "2026-03-25T18:30:11.496Z" },
{ url = "https://files.pythonhosted.org/packages/3d/c6/23e98d3407d5e2416a3dfaecb0a053da899848c50db69e5f2b61a555ce06/google_cloud_aiplatform-1.145.0-py2.py3-none-any.whl", hash = "sha256:4d1c31797a8bd8f3342ed5f186dd30d1f6bca73ddbee2bde452777100d2ddc11", size = 8396640, upload-time = "2026-04-02T14:06:54.125Z" },
]
[[package]]
@@ -2360,14 +2363,14 @@ wheels = [
[[package]]
name = "googleapis-common-protos"
version = "1.73.1"
version = "1.74.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "protobuf" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a1/c0/4a54c386282c13449eca8bbe2ddb518181dc113e78d240458a68856b4d69/googleapis_common_protos-1.73.1.tar.gz", hash = "sha256:13114f0e9d2391756a0194c3a8131974ed7bffb06086569ba193364af59163b6", size = 147506, upload-time = "2026-03-26T22:17:38.451Z" }
sdist = { url = "https://files.pythonhosted.org/packages/20/18/a746c8344152d368a5aac738d4c857012f2c5d1fd2eac7e17b647a7861bd/googleapis_common_protos-1.74.0.tar.gz", hash = "sha256:57971e4eeeba6aad1163c1f0fc88543f965bb49129b8bb55b2b7b26ecab084f1", size = 151254, upload-time = "2026-04-02T21:23:26.679Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/82/fcb6520612bec0c39b973a6c0954b6a0d948aadfe8f7e9487f60ceb8bfa6/googleapis_common_protos-1.73.1-py3-none-any.whl", hash = "sha256:e51f09eb0a43a8602f5a915870972e6b4a394088415c79d79605a46d8e826ee8", size = 297556, upload-time = "2026-03-26T22:15:58.455Z" },
{ url = "https://files.pythonhosted.org/packages/b6/b0/be5d3329badb9230b765de6eea66b73abd5944bdeb5afb3562ddcd80ae84/googleapis_common_protos-1.74.0-py3-none-any.whl", hash = "sha256:702216f78610bb510e3f12ac3cafd281b7ac45cc5d86e90ad87e4d301a3426b5", size = 300743, upload-time = "2026-04-02T21:22:49.108Z" },
]
[package.optional-dependencies]
@@ -2783,14 +2786,14 @@ wheels = [
[[package]]
name = "hypothesis"
version = "6.151.10"
version = "6.151.11"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/dd/633e2cd62377333b7681628aee2ec1d88166f5bdf916b08c98b1e8288ad3/hypothesis-6.151.10.tar.gz", hash = "sha256:6c9565af8b4aa3a080b508f66ce9c2a77dd613c7e9073e27fc7e4ef9f45f8a27", size = 463762, upload-time = "2026-03-29T01:06:22.19Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a9/58/41af0d539b3c95644d1e4e353cbd6ac9473e892ea21802546a8886b79078/hypothesis-6.151.11.tar.gz", hash = "sha256:f33dcb68b62c7b07c9ac49664989be898fa8ce57583f0dc080259a197c6c7ff1", size = 463779, upload-time = "2026-04-05T17:35:55.935Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/40/da/439bb2e451979f5e88c13bbebc3e9e17754429cfb528c93677b2bd81783b/hypothesis-6.151.10-py3-none-any.whl", hash = "sha256:b0d7728f0c8c2be009f89fcdd6066f70c5439aa0f94adbb06e98261d05f49b05", size = 529493, upload-time = "2026-03-29T01:06:19.161Z" },
{ url = "https://files.pythonhosted.org/packages/1d/06/f49393eca84b87b17a67aaebf9f6251190ba1e9fe9f2236504049fc43fee/hypothesis-6.151.11-py3-none-any.whl", hash = "sha256:7ac05173206746cec8312f95164a30a4eb4916815413a278922e63ff1e404648", size = 529572, upload-time = "2026-04-05T17:35:53.438Z" },
]
[[package]]
@@ -2858,14 +2861,14 @@ wheels = [
[[package]]
name = "intersystems-irispython"
version = "5.3.1"
version = "5.3.2"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/33/5b/8eac672a6ef26bef6ef79a7c9557096167b50c4d3577d558ae6999c195fe/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-macosx_10_9_universal2.whl", hash = "sha256:634c9b4ec620837d830ff49543aeb2797a1ce8d8570a0e868398b85330dfcc4d", size = 6736686, upload-time = "2025-12-19T16:24:57.734Z" },
{ url = "https://files.pythonhosted.org/packages/ba/17/bab3e525ffb6711355f7feea18c1b7dced9c2484cecbcdd83f74550398c0/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf912f30f85e2a42f2c2ea77fbeb98a24154d5ea7428a50382786a684ec4f583", size = 16005259, upload-time = "2025-12-19T16:25:05.578Z" },
{ url = "https://files.pythonhosted.org/packages/39/59/9bb79d9e32e3e55fc9aed8071a797b4497924cbc6457cea9255bb09320b7/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be5659a6bb57593910f2a2417eddb9f5dc2f93a337ead6ddca778f557b8a359a", size = 15638040, upload-time = "2025-12-19T16:24:54.429Z" },
{ url = "https://files.pythonhosted.org/packages/cf/47/654ccf9c5cca4f5491f070888544165c9e2a6a485e320ea703e4e38d2358/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win32.whl", hash = "sha256:583e4f17088c1e0530f32efda1c0ccb02993cbc22035bc8b4c71d8693b04ee7e", size = 2879644, upload-time = "2025-12-19T16:24:59.945Z" },
{ url = "https://files.pythonhosted.org/packages/68/95/19cc13d09f1b4120bd41b1434509052e1d02afd27f2679266d7ad9cc1750/intersystems_irispython-5.3.1-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win_amd64.whl", hash = "sha256:1d5d40450a0cdeec2a1f48d12d946a8a8ffc7c128576fcae7d58e66e3a127eae", size = 3522092, upload-time = "2025-12-19T16:25:01.834Z" },
{ url = "https://files.pythonhosted.org/packages/d2/23/0a7bc92e68480d523015eb454aa0ec73a33320975d10d5500ba54ccd124e/intersystems_irispython-5.3.2-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-macosx_10_9_universal2.whl", hash = "sha256:8af5e31273ad97c391141111630e8303d510272360b609990a8c85e56a7850ac", size = 7121915, upload-time = "2026-03-31T18:53:12.205Z" },
{ url = "https://files.pythonhosted.org/packages/22/cc/2f066a0dc82fae884b655d2f862bd51dd21a4322d4b9f898117f74c010b4/intersystems_irispython-5.3.2-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:25663d3cce7b414451a781ffaeb785e8f8439d0275920ffd4f05add2c056abfd", size = 16247974, upload-time = "2026-03-31T18:53:13.798Z" },
{ url = "https://files.pythonhosted.org/packages/27/cd/cef09a8310541d99fdbe89b2eccc21a6d776384325a9a6e740ad01e8461f/intersystems_irispython-5.3.2-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d5cb6efc3e2b9651f1c37539a3f69a823e80c32210d11d745cffad1eca4c7995", size = 15900577, upload-time = "2026-03-31T18:53:15.958Z" },
{ url = "https://files.pythonhosted.org/packages/37/91/0e08555834de10f59810ef6c615af72c3f234920c70cc0421d455ba9c359/intersystems_irispython-5.3.2-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win32.whl", hash = "sha256:a250b21067c9e8275232ca798dcfe0719a970cd6ec9f2023923c810fffa46f41", size = 3046761, upload-time = "2026-03-31T18:53:09.151Z" },
{ url = "https://files.pythonhosted.org/packages/21/28/00b6b03b648005cb9c14dc75943e7cccce83eb5fd8fdba502028c25c7fc4/intersystems_irispython-5.3.2-cp38.cp39.cp310.cp311.cp312.cp313.cp314-cp38.cp39.cp310.cp311.cp312.cp313.cp314-win_amd64.whl", hash = "sha256:43feb7e23bc9f77db7bb140d1b55c22090b0c46691b570b1faaf6875baa6452d", size = 3742519, upload-time = "2026-03-31T18:53:10.597Z" },
]
[[package]]
@@ -3049,12 +3052,11 @@ sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2
[[package]]
name = "langfuse"
version = "4.0.1"
version = "4.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "backoff" },
{ name = "httpx" },
{ name = "openai" },
{ name = "opentelemetry-api" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
{ name = "opentelemetry-sdk" },
@@ -3062,14 +3064,14 @@ dependencies = [
{ name = "pydantic" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/94/ab00e21fa5977d6b9c68fb3a95de2aa1a1e586964ff2af3e37405bf65d9f/langfuse-4.0.1.tar.gz", hash = "sha256:40a6daf3ab505945c314246d5b577d48fcfde0a47e8c05267ea6bd494ae9608e", size = 272749, upload-time = "2026-03-19T14:03:34.508Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ab/d0/6d79ed5614f86f27f5df199cf10c6facf6874ff6f91b828ae4dad90aa86d/langfuse-4.0.6.tar.gz", hash = "sha256:83a6f8cc8f1431fa2958c91e2673bc4179f993297e9b1acd1dbf001785e6cf83", size = 274094, upload-time = "2026-04-01T20:04:15.153Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/27/8f/3145ef00940f9c29d7e0200fd040f35616eac21c6ab4610a1ba14f3a04c1/langfuse-4.0.1-py3-none-any.whl", hash = "sha256:e22f49ea31304f97fc31a97c014ba63baa8802d9568295d54f06b00b43c30524", size = 465049, upload-time = "2026-03-19T14:03:32.527Z" },
{ url = "https://files.pythonhosted.org/packages/50/b4/088048e37b6d7ec1b52c6a11bc33101454285a22eaab8303dcccfd78344d/langfuse-4.0.6-py3-none-any.whl", hash = "sha256:0562b1dcf83247f9d8349f0f755eaed9a7f952fee67e66580970f0738bf3adbf", size = 472841, upload-time = "2026-04-01T20:04:16.451Z" },
]
[[package]]
name = "langsmith"
version = "0.7.22"
version = "0.7.25"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
@@ -3082,9 +3084,9 @@ dependencies = [
{ name = "xxhash" },
{ name = "zstandard" },
]
sdist = { url = "https://files.pythonhosted.org/packages/be/2a/2d5e6c67396fd228670af278c4da7bd6db2b8d11deaf6f108490b6d3f561/langsmith-0.7.22.tar.gz", hash = "sha256:35bfe795d648b069958280760564632fd28ebc9921c04f3e209c0db6a6c7dc04", size = 1134923, upload-time = "2026-03-19T22:45:23.492Z" }
sdist = { url = "https://files.pythonhosted.org/packages/7e/d7/21ffae5ccdc3c9b8de283e8f8bf48a92039681df0d39f15133d8ff8965bd/langsmith-0.7.25.tar.gz", hash = "sha256:d17da71f156ca69eafd28ac9627c8e0e93170260ec37cd27cedc83205a067598", size = 1145410, upload-time = "2026-04-03T13:11:42.36Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1a/94/1f5d72655ab6534129540843776c40eff757387b88e798d8b3bf7e313fd4/langsmith-0.7.22-py3-none-any.whl", hash = "sha256:6e9d5148314d74e86748cb9d3898632cad0320c9323d95f70f969e5bc078eee4", size = 359927, upload-time = "2026-03-19T22:45:21.603Z" },
{ url = "https://files.pythonhosted.org/packages/29/13/67889d41baf7dbaf13ffd0b334a0f284e107fad1cc8782a1abb1e56e5eeb/langsmith-0.7.25-py3-none-any.whl", hash = "sha256:55ecc24c547f6c79b5a684ff8685c669eec34e52fcac5d2c0af7d613aef5a632", size = 359417, upload-time = "2026-04-03T13:11:40.729Z" },
]
[[package]]
@@ -3413,7 +3415,7 @@ wheels = [
[[package]]
name = "mypy"
version = "1.19.1"
version = "1.20.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "librt", marker = "platform_python_implementation != 'PyPy'" },
@@ -3421,15 +3423,16 @@ dependencies = [
{ name = "pathspec" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/b0089fe7fef0a994ae5ee07029ced0526082c6cfaaa4c10d40a10e33b097/mypy-1.20.0.tar.gz", hash = "sha256:eb96c84efcc33f0b5e0e04beacf00129dd963b67226b01c00b9dfc8affb464c3", size = 3815028, upload-time = "2026-03-31T16:55:14.959Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" },
{ url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" },
{ url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" },
{ url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" },
{ url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" },
{ url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" },
{ url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" },
{ url = "https://files.pythonhosted.org/packages/be/dd/3afa29b58c2e57c79116ed55d700721c3c3b15955e2b6251dd165d377c0e/mypy-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:002b613ae19f4ac7d18b7e168ffe1cb9013b37c57f7411984abbd3b817b0a214", size = 14509525, upload-time = "2026-03-31T16:55:01.824Z" },
{ url = "https://files.pythonhosted.org/packages/54/eb/227b516ab8cad9f2a13c5e7a98d28cd6aa75e9c83e82776ae6c1c4c046c7/mypy-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9336b5e6712f4adaf5afc3203a99a40b379049104349d747eb3e5a3aa23ac2e", size = 13326469, upload-time = "2026-03-31T16:51:41.23Z" },
{ url = "https://files.pythonhosted.org/packages/57/d4/1ddb799860c1b5ac6117ec307b965f65deeb47044395ff01ab793248a591/mypy-1.20.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f13b3e41bce9d257eded794c0f12878af3129d80aacd8a3ee0dee51f3a978651", size = 13705953, upload-time = "2026-03-31T16:48:55.69Z" },
{ url = "https://files.pythonhosted.org/packages/c5/b7/54a720f565a87b893182a2a393370289ae7149e4715859e10e1c05e49154/mypy-1.20.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9804c3ad27f78e54e58b32e7cb532d128b43dbfb9f3f9f06262b821a0f6bd3f5", size = 14710363, upload-time = "2026-03-31T16:53:26.948Z" },
{ url = "https://files.pythonhosted.org/packages/b2/2a/74810274848d061f8a8ea4ac23aaad43bd3d8c1882457999c2e568341c57/mypy-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:697f102c5c1d526bdd761a69f17c6070f9892eebcb94b1a5963d679288c09e78", size = 14947005, upload-time = "2026-03-31T16:50:17.591Z" },
{ url = "https://files.pythonhosted.org/packages/77/91/21b8ba75f958bcda75690951ce6fa6b7138b03471618959529d74b8544e2/mypy-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ecd63f75fdd30327e4ad8b5704bd6d91fc6c1b2e029f8ee14705e1207212489", size = 10880616, upload-time = "2026-03-31T16:52:19.986Z" },
{ url = "https://files.pythonhosted.org/packages/8a/15/3d8198ef97c1ca03aea010cce4f1d4f3bc5d9849e8c0140111ca2ead9fdd/mypy-1.20.0-cp312-cp312-win_arm64.whl", hash = "sha256:f194db59657c58593a3c47c6dfd7bad4ef4ac12dbc94d01b3a95521f78177e33", size = 9813091, upload-time = "2026-03-31T16:53:44.385Z" },
{ url = "https://files.pythonhosted.org/packages/21/66/4d734961ce167f0fd8380769b3b7c06dbdd6ff54c2190f3f2ecd22528158/mypy-1.20.0-py3-none-any.whl", hash = "sha256:a6e0641147cbfa7e4e94efdb95c2dab1aff8cfc159ded13e07f308ddccc8c48e", size = 2636365, upload-time = "2026-03-31T16:51:44.911Z" },
]
[[package]]
@@ -4006,7 +4009,7 @@ wheels = [
[[package]]
name = "opik"
version = "1.10.54"
version = "1.10.58"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "boto3-stubs", extra = ["bedrock-runtime"] },
@@ -4025,9 +4028,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "uuid6" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fa/c9/ecc68c5ae32bf5b1074bdc713cb1543b8e2a46c58c814bf150fecf50f272/opik-1.10.54.tar.gz", hash = "sha256:46e29abf4656bd80b9cb339659d24ecf97b61f37c3fde594de75e5f59953e9d3", size = 812757, upload-time = "2026-03-27T11:23:06.109Z" }
sdist = { url = "https://files.pythonhosted.org/packages/52/bc/54673138cf374226ab9fcdd5685e92442c0d5a95775ff22b870c767387e6/opik-1.10.58.tar.gz", hash = "sha256:058f8b3e3171a1f5e75f25cf1fea392b8f2e0ddba18765fafd24cd756783002b", size = 833671, upload-time = "2026-04-01T11:43:21.571Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/91/1ae4e8a349da0620a6f0a4fc51cd00c3e75176939d022e8684379aee2928/opik-1.10.54-py3-none-any.whl", hash = "sha256:5f8ddabe5283ebe08d455e81b188d6e09ce1d1efa989f8b05567ef70f1e9aeda", size = 1379008, upload-time = "2026-03-27T11:23:04.582Z" },
{ url = "https://files.pythonhosted.org/packages/33/9a/99cf048209f10f8444544202b007d5fbe0a6104465d29038b25932b1c79f/opik-1.10.58-py3-none-any.whl", hash = "sha256:29be9d7f846f3229a027250997195e583da840179ad03f3d28b1d613687963e3", size = 1400658, upload-time = "2026-04-01T11:43:20.096Z" },
]
[[package]]
@@ -4174,11 +4177,11 @@ wheels = [
[[package]]
name = "pathspec"
version = "0.12.1"
version = "1.0.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
{ url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" },
]
[[package]]
@@ -4688,16 +4691,16 @@ wheels = [
[[package]]
name = "pymochow"
version = "2.3.6"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "future" },
{ name = "orjson" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5e/04/2edda5447aa7c87a0b2b7c75406cc0fbcceeddd09c76b04edfb84eb47499/pymochow-2.3.6.tar.gz", hash = "sha256:6249a2fa410ef22e9e702710d725e7e052f492af87233ffe911845f931557632", size = 51123, upload-time = "2025-12-12T06:23:24.162Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/06/ba1b9ad8939a7289196df73934eb805bdd3e38473ccf2edcc06018f156c5/pymochow-2.4.0.tar.gz", hash = "sha256:63d9f9abc44d3643b4384fd233005978a0079b45bbb35700a81ccb99c1442cfd", size = 51300, upload-time = "2026-04-02T10:24:11.883Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/aa/86/588c75acbcc7dd9860252f1ef2233212f36b6751ac0cdec15867fc2fc4d6/pymochow-2.3.6-py3-none-any.whl", hash = "sha256:d46cb3af4d908f0c15d875190b1945c0353b907d7e32f068636ee04433cf06b1", size = 78963, upload-time = "2025-12-12T06:23:21.419Z" },
{ url = "https://files.pythonhosted.org/packages/f3/f8/d3c23f0e1d15c66ce3e431cf1866309c375c0685ff0ed6e4ae21f72161b2/pymochow-2.4.0-py3-none-any.whl", hash = "sha256:52d128aa9bea643f51aded91fed99af4d6421922e7696dfe9a1877684469d172", size = 79149, upload-time = "2026-04-02T10:24:10.029Z" },
]
[[package]]
@@ -5307,27 +5310,27 @@ wheels = [
[[package]]
name = "ruff"
version = "0.15.8"
version = "0.15.9"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e6/97/e9f1ca355108ef7194e38c812ef40ba98c7208f47b13ad78d023caa583da/ruff-0.15.9.tar.gz", hash = "sha256:29cbb1255a9797903f6dde5ba0188c707907ff44a9006eb273b5a17bfa0739a2", size = 4617361, upload-time = "2026-04-02T18:17:20.829Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" },
{ url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" },
{ url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" },
{ url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" },
{ url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" },
{ url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" },
{ url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" },
{ url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" },
{ url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" },
{ url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" },
{ url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" },
{ url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" },
{ url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" },
{ url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" },
{ url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" },
{ url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" },
{ url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" },
{ url = "https://files.pythonhosted.org/packages/0b/1f/9cdfd0ac4b9d1e5a6cf09bedabdf0b56306ab5e333c85c87281273e7b041/ruff-0.15.9-py3-none-linux_armv6l.whl", hash = "sha256:6efbe303983441c51975c243e26dff328aca11f94b70992f35b093c2e71801e1", size = 10511206, upload-time = "2026-04-02T18:16:41.574Z" },
{ url = "https://files.pythonhosted.org/packages/3d/f6/32bfe3e9c136b35f02e489778d94384118bb80fd92c6d92e7ccd97db12ce/ruff-0.15.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4965bac6ac9ea86772f4e23587746f0b7a395eccabb823eb8bfacc3fa06069f7", size = 10923307, upload-time = "2026-04-02T18:17:08.645Z" },
{ url = "https://files.pythonhosted.org/packages/ca/25/de55f52ab5535d12e7aaba1de37a84be6179fb20bddcbe71ec091b4a3243/ruff-0.15.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf05aad70ca5b5a0a4b0e080df3a6b699803916d88f006efd1f5b46302daab8", size = 10316722, upload-time = "2026-04-02T18:16:44.206Z" },
{ url = "https://files.pythonhosted.org/packages/48/11/690d75f3fd6278fe55fff7c9eb429c92d207e14b25d1cae4064a32677029/ruff-0.15.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9439a342adb8725f32f92732e2bafb6d5246bd7a5021101166b223d312e8fc59", size = 10623674, upload-time = "2026-04-02T18:16:50.951Z" },
{ url = "https://files.pythonhosted.org/packages/bd/ec/176f6987be248fc5404199255522f57af1b4a5a1b57727e942479fec98ad/ruff-0.15.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5e6faf9d97c8edc43877c3f406f47446fc48c40e1442d58cfcdaba2acea745", size = 10351516, upload-time = "2026-04-02T18:16:57.206Z" },
{ url = "https://files.pythonhosted.org/packages/b2/fc/51cffbd2b3f240accc380171d51446a32aa2ea43a40d4a45ada67368fbd2/ruff-0.15.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b34a9766aeec27a222373d0b055722900fbc0582b24f39661aa96f3fe6ad901", size = 11150202, upload-time = "2026-04-02T18:17:06.452Z" },
{ url = "https://files.pythonhosted.org/packages/d6/d4/25292a6dfc125f6b6528fe6af31f5e996e19bf73ca8e3ce6eb7fa5b95885/ruff-0.15.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89dd695bc72ae76ff484ae54b7e8b0f6b50f49046e198355e44ea656e521fef9", size = 11988891, upload-time = "2026-04-02T18:17:18.575Z" },
{ url = "https://files.pythonhosted.org/packages/13/e1/1eebcb885c10e19f969dcb93d8413dfee8172578709d7ee933640f5e7147/ruff-0.15.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce187224ef1de1bd225bc9a152ac7102a6171107f026e81f317e4257052916d5", size = 11480576, upload-time = "2026-04-02T18:16:52.986Z" },
{ url = "https://files.pythonhosted.org/packages/ff/6b/a1548ac378a78332a4c3dcf4a134c2475a36d2a22ddfa272acd574140b50/ruff-0.15.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0c7c341f68adb01c488c3b7d4b49aa8ea97409eae6462d860a79cf55f431b6", size = 11254525, upload-time = "2026-04-02T18:17:02.041Z" },
{ url = "https://files.pythonhosted.org/packages/42/aa/4bb3af8e61acd9b1281db2ab77e8b2c3c5e5599bf2a29d4a942f1c62b8d6/ruff-0.15.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:55cc15eee27dc0eebdfcb0d185a6153420efbedc15eb1d38fe5e685657b0f840", size = 11204072, upload-time = "2026-04-02T18:17:13.581Z" },
{ url = "https://files.pythonhosted.org/packages/69/48/d550dc2aa6e423ea0bcc1d0ff0699325ffe8a811e2dba156bd80750b86dc/ruff-0.15.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a6537f6eed5cda688c81073d46ffdfb962a5f29ecb6f7e770b2dc920598997ed", size = 10594998, upload-time = "2026-04-02T18:16:46.369Z" },
{ url = "https://files.pythonhosted.org/packages/63/47/321167e17f5344ed5ec6b0aa2cff64efef5f9e985af8f5622cfa6536043f/ruff-0.15.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6d3fcbca7388b066139c523bda744c822258ebdcfbba7d24410c3f454cc9af71", size = 10359769, upload-time = "2026-04-02T18:17:10.994Z" },
{ url = "https://files.pythonhosted.org/packages/67/5e/074f00b9785d1d2c6f8c22a21e023d0c2c1817838cfca4c8243200a1fa87/ruff-0.15.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:058d8e99e1bfe79d8a0def0b481c56059ee6716214f7e425d8e737e412d69677", size = 10850236, upload-time = "2026-04-02T18:16:48.749Z" },
{ url = "https://files.pythonhosted.org/packages/76/37/804c4135a2a2caf042925d30d5f68181bdbd4461fd0d7739da28305df593/ruff-0.15.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8e1ddb11dbd61d5983fa2d7d6370ef3eb210951e443cace19594c01c72abab4c", size = 11358343, upload-time = "2026-04-02T18:16:55.068Z" },
{ url = "https://files.pythonhosted.org/packages/88/3d/1364fcde8656962782aa9ea93c92d98682b1ecec2f184e625a965ad3b4a6/ruff-0.15.9-py3-none-win32.whl", hash = "sha256:bde6ff36eaf72b700f32b7196088970bf8fdb2b917b7accd8c371bfc0fd573ec", size = 10583382, upload-time = "2026-04-02T18:17:04.261Z" },
{ url = "https://files.pythonhosted.org/packages/4c/56/5c7084299bd2cacaa07ae63a91c6f4ba66edc08bf28f356b24f6b717c799/ruff-0.15.9-py3-none-win_amd64.whl", hash = "sha256:45a70921b80e1c10cf0b734ef09421f71b5aa11d27404edc89d7e8a69505e43d", size = 11744969, upload-time = "2026-04-02T18:16:59.611Z" },
{ url = "https://files.pythonhosted.org/packages/03/36/76704c4f312257d6dbaae3c959add2a622f63fcca9d864659ce6d8d97d3d/ruff-0.15.9-py3-none-win_arm64.whl", hash = "sha256:0694e601c028fd97dc5c6ee244675bc241aeefced7ef80cd9c6935a871078f53", size = 11005870, upload-time = "2026-04-02T18:17:15.773Z" },
]
[[package]]
@@ -5550,22 +5553,22 @@ wheels = [
[[package]]
name = "sqlalchemy"
version = "2.0.48"
version = "2.0.49"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1f/73/b4a9737255583b5fa858e0bb8e116eb94b88c910164ed2ed719147bde3de/sqlalchemy-2.0.48.tar.gz", hash = "sha256:5ca74f37f3369b45e1f6b7b06afb182af1fd5dde009e4ffd831830d98cbe5fe7", size = 9886075, upload-time = "2026-03-02T15:28:51.474Z" }
sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/91/a42ae716f8925e9659df2da21ba941f158686856107a61cc97a95e7647a3/sqlalchemy-2.0.48-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:348174f228b99f33ca1f773e85510e08927620caa59ffe7803b37170df30332b", size = 2155737, upload-time = "2026-03-02T15:49:13.207Z" },
{ url = "https://files.pythonhosted.org/packages/b9/52/f75f516a1f3888f027c1cfb5d22d4376f4b46236f2e8669dcb0cddc60275/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53667b5f668991e279d21f94ccfa6e45b4e3f4500e7591ae59a8012d0f010dcb", size = 3337020, upload-time = "2026-03-02T15:50:34.547Z" },
{ url = "https://files.pythonhosted.org/packages/37/9a/0c28b6371e0cdcb14f8f1930778cb3123acfcbd2c95bb9cf6b4a2ba0cce3/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34634e196f620c7a61d18d5cf7dc841ca6daa7961aed75d532b7e58b309ac894", size = 3349983, upload-time = "2026-03-02T15:53:25.542Z" },
{ url = "https://files.pythonhosted.org/packages/1c/46/0aee8f3ff20b1dcbceb46ca2d87fcc3d48b407925a383ff668218509d132/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:546572a1793cc35857a2ffa1fe0e58571af1779bcc1ffa7c9fb0839885ed69a9", size = 3279690, upload-time = "2026-03-02T15:50:36.277Z" },
{ url = "https://files.pythonhosted.org/packages/ce/8c/a957bc91293b49181350bfd55e6dfc6e30b7f7d83dc6792d72043274a390/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:07edba08061bc277bfdc772dd2a1a43978f5a45994dd3ede26391b405c15221e", size = 3314738, upload-time = "2026-03-02T15:53:27.519Z" },
{ url = "https://files.pythonhosted.org/packages/4b/44/1d257d9f9556661e7bdc83667cc414ba210acfc110c82938cb3611eea58f/sqlalchemy-2.0.48-cp312-cp312-win32.whl", hash = "sha256:908a3fa6908716f803b86896a09a2c4dde5f5ce2bb07aacc71ffebb57986ce99", size = 2115546, upload-time = "2026-03-02T15:54:31.591Z" },
{ url = "https://files.pythonhosted.org/packages/f2/af/c3c7e1f3a2b383155a16454df62ae8c62a30dd238e42e68c24cebebbfae6/sqlalchemy-2.0.48-cp312-cp312-win_amd64.whl", hash = "sha256:68549c403f79a8e25984376480959975212a670405e3913830614432b5daa07a", size = 2142484, upload-time = "2026-03-02T15:54:34.072Z" },
{ url = "https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl", hash = "sha256:a66fe406437dd65cacd96a72689a3aaaecaebbcd62d81c5ac1c0fdbeac835096", size = 1940202, upload-time = "2026-03-02T15:52:43.285Z" },
{ url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" },
{ url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" },
{ url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" },
{ url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" },
{ url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" },
{ url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" },
{ url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" },
{ url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" },
]
[[package]]
@@ -5706,7 +5709,7 @@ wheels = [
[[package]]
name = "tablestore"
version = "6.4.2"
version = "6.4.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
@@ -5719,9 +5722,9 @@ dependencies = [
{ name = "six" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/09/07/afa1d18521bab13bb813066892b73589937fcf68aea63a54b0b14dae17b5/tablestore-6.4.2.tar.gz", hash = "sha256:5251e14b7c7ebf3d49d37dde957b49c7dba04ee8715c2650109cc02f3b89cc77", size = 5071435, upload-time = "2026-03-26T15:39:06.498Z" }
sdist = { url = "https://files.pythonhosted.org/packages/85/0b/c875c2314d472eed9f9644a94ae0aa7e702a6084779a0136e539d5e7ed32/tablestore-6.4.3.tar.gz", hash = "sha256:4981139e68705052ade6341060a4b6238b1fb9a8c18b43a77383fda14f7554a9", size = 5072450, upload-time = "2026-03-31T04:34:37.832Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/3f/5fb3e8e5de36934fe38986b4e861657cebb3a6dfd97d32224cd40fc66359/tablestore-6.4.2-py3-none-any.whl", hash = "sha256:98c4cffa5eace4a3ea6fc2425263e733093c2baa43537f25dbaaf02e2b7882d8", size = 5114987, upload-time = "2026-03-26T15:39:04.074Z" },
{ url = "https://files.pythonhosted.org/packages/39/e0/e11626aea61e1352dafe7707c548d482769afd3ca28f45653d380ba85a5d/tablestore-6.4.3-py3-none-any.whl", hash = "sha256:207b89324cd4157db4559c7619d42b9510a55c0565f00a439389f14426d114c5", size = 5115764, upload-time = "2026-03-31T04:34:35.761Z" },
]
[[package]]
@@ -5747,7 +5750,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/20/81/be13f417065200182
[[package]]
name = "tcvectordb"
version = "2.1.0"
version = "2.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cachetools" },
@@ -5760,9 +5763,9 @@ dependencies = [
{ name = "ujson" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/34/4c/3510489c20823c045a4f84c3f656b1af00b3fbbfa36efc494cf01492521f/tcvectordb-2.1.0.tar.gz", hash = "sha256:382615573f2b6d3e21535b686feac8895169b8eb56078fc73abb020676a1622f", size = 85691, upload-time = "2026-03-25T12:55:27.509Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/10/41a7cc192720a79f40d470cabec308f8d0ed2547371294eafde0dfd8136b/tcvectordb-2.1.1.tar.gz", hash = "sha256:37d4a14f22c23f777e99069a102ceae786713117fc848c067a8e8e363252e621", size = 93896, upload-time = "2026-03-30T10:05:27.788Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/cf/7f340b4dc30ed0d2758915d1c2a4b2e9f0c90ce4f322b7cf17e571c80a45/tcvectordb-2.1.0-py3-none-any.whl", hash = "sha256:afbfc5f82bda70480921b2308148cbd0c51c8b45b3eef6cea64ddd003c7577e9", size = 99615, upload-time = "2026-03-25T12:55:26.004Z" },
{ url = "https://files.pythonhosted.org/packages/e0/b6/2ab105d612165d274e1257b085a2cd64738220c4cbc0341887096b4d1977/tcvectordb-2.1.1-py3-none-any.whl", hash = "sha256:9a5090d3491ea087b25e5b72ffe5100f6330c05593d77f82bf8f893553dfae98", size = 107672, upload-time = "2026-03-30T10:05:25.949Z" },
]
[[package]]
@@ -6000,14 +6003,14 @@ wheels = [
[[package]]
name = "types-cffi"
version = "2.0.0.20260316"
version = "2.0.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/07/4c/805b40b094eb3fd60f8d17fa7b3c58a33781311a95d0e6a74da0751ce294/types_cffi-2.0.0.20260316.tar.gz", hash = "sha256:8fb06ed4709675c999853689941133affcd2250cd6121cc11fd22c0d81ad510c", size = 17399, upload-time = "2026-03-16T07:54:43.059Z" }
sdist = { url = "https://files.pythonhosted.org/packages/cb/85/3896bfcb4e7c32904f762c36ff0afa96d3e39bfce5a95a41635af79c8761/types_cffi-2.0.0.20260402.tar.gz", hash = "sha256:47e1320c009f630c59c55c8e3d2b8c501e280babf52e92f6109cbfb0864ba367", size = 17476, upload-time = "2026-04-02T04:21:09.332Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/5e/9f1a709225ad9d0e1d7a6e4366ff285f0113c749e882d6cbeb40eab32e75/types_cffi-2.0.0.20260316-py3-none-any.whl", hash = "sha256:dd504698029db4c580385f679324621cc64d886e6a23e9821d52bc5169251302", size = 20096, upload-time = "2026-03-16T07:54:41.994Z" },
{ url = "https://files.pythonhosted.org/packages/ae/26/aacfef05841e31c65f889ae4225c6bce6b84cd5d3882c42a3661030f29ee/types_cffi-2.0.0.20260402-py3-none-any.whl", hash = "sha256:f647a400fba0a31d603479169d82ee5359db79bd1136e41dc7e6489296e3a2b2", size = 20103, upload-time = "2026-04-02T04:21:08.199Z" },
]
[[package]]
@@ -6021,20 +6024,20 @@ wheels = [
[[package]]
name = "types-defusedxml"
version = "0.7.0.20250822"
version = "0.7.0.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7d/4a/5b997ae87bf301d1796f72637baa4e0e10d7db17704a8a71878a9f77f0c0/types_defusedxml-0.7.0.20250822.tar.gz", hash = "sha256:ba6c395105f800c973bba8a25e41b215483e55ec79c8ca82b6fe90ba0bc3f8b2", size = 10590, upload-time = "2025-08-22T03:02:59.547Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d3/3c/8e1243dda2fef73be93081d896503352fb92e2351b0b17ac172bbdb70ebf/types_defusedxml-0.7.0.20260402.tar.gz", hash = "sha256:4cc91b225e77c7fcf88b3fb7d821a37fb4e14530727c790b6b8a19f2968d6074", size = 10604, upload-time = "2026-04-02T04:19:00.265Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/73/8a36998cee9d7c9702ed64a31f0866c7f192ecffc22771d44dbcc7878f18/types_defusedxml-0.7.0.20250822-py3-none-any.whl", hash = "sha256:5ee219f8a9a79c184773599ad216123aedc62a969533ec36737ec98601f20dcf", size = 13430, upload-time = "2025-08-22T03:02:58.466Z" },
{ url = "https://files.pythonhosted.org/packages/ad/4e/68f85712dfbcc929c54d57e9b0e7503c198fa65896cae2f6337840ab1cc5/types_defusedxml-0.7.0.20260402-py3-none-any.whl", hash = "sha256:200f3cb340c3c576adeb28cf365399e9bb059b34662b86ad4617692284c98bdb", size = 13434, upload-time = "2026-04-02T04:18:59.263Z" },
]
[[package]]
name = "types-deprecated"
version = "1.3.1.20260130"
version = "1.3.1.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/97/9924e496f88412788c432891cacd041e542425fe0bffff4143a7c1c89ac4/types_deprecated-1.3.1.20260130.tar.gz", hash = "sha256:726b05e5e66d42359b1d6631835b15de62702588c8a59b877aa4b1e138453450", size = 8455, upload-time = "2026-01-30T03:58:17.401Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e2/ff/7e237c5118c1bd15e5205789901f7e01db232b0c61ca7c7c05de0394f5da/types_deprecated-1.3.1.20260402.tar.gz", hash = "sha256:00828ef7dce735d778583d00611f97da05b86b783ee14b0f22af2f945363cd12", size = 8481, upload-time = "2026-04-02T04:18:28.704Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/b2/6f920582af7efcd37165cd6321707f3ad5839dd24565a8a982f2bd9c6fd1/types_deprecated-1.3.1.20260130-py3-none-any.whl", hash = "sha256:593934d85c38ca321a9d301f00c42ffe13e4cf830b71b10579185ba0ce172d9a", size = 9077, upload-time = "2026-01-30T03:58:16.633Z" },
{ url = "https://files.pythonhosted.org/packages/ed/3c/59aa775db5f69eba978390c33e1fd617817381cd87424ac1cff4bf2fb6c5/types_deprecated-1.3.1.20260402-py3-none-any.whl", hash = "sha256:ddf1813bd99cd1c00358cb0cb079878fdaa74509e7e482b79627f74f768f31a9", size = 9077, upload-time = "2026-04-02T04:18:27.867Z" },
]
[[package]]
@@ -6048,40 +6051,40 @@ wheels = [
[[package]]
name = "types-flask-cors"
version = "6.0.0.20250809"
version = "6.0.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "flask" },
]
sdist = { url = "https://files.pythonhosted.org/packages/45/e0/e5dd841bf475765fb61cb04c1e70d2fd0675a0d4ddfacd50a333eafe7267/types_flask_cors-6.0.0.20250809.tar.gz", hash = "sha256:24380a2b82548634c0931d50b9aafab214eea9f85dcc04f15ab1518752a7e6aa", size = 9951, upload-time = "2025-08-09T03:16:37.454Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/59/84d8ed3801cbf28876067387e1055467e94e3dd404e93e35fe2ec5e46729/types_flask_cors-6.0.0.20260402.tar.gz", hash = "sha256:57350b504328df7ec13a12599e67939189cb644c5d0efec9af80ed03c592052c", size = 10126, upload-time = "2026-04-02T04:20:57.954Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9f/5e/1e60c29eb5796233d4d627ca4979c4ae8da962fd0aae0cdb6e3e6a807bbc/types_flask_cors-6.0.0.20250809-py3-none-any.whl", hash = "sha256:f6d660dddab946779f4263cb561bffe275d86cb8747ce02e9fec8d340780131b", size = 9971, upload-time = "2025-08-09T03:16:36.593Z" },
{ url = "https://files.pythonhosted.org/packages/51/71/d86f7644a18a8ccdddf50b9969fc94abbecd0ac52594880dc5667ca53e5e/types_flask_cors-6.0.0.20260402-py3-none-any.whl", hash = "sha256:e018d34946c110f5acfa71cc708ec66b47c4292131647e54889600c20892ca26", size = 9990, upload-time = "2026-04-02T04:20:57.12Z" },
]
[[package]]
name = "types-flask-migrate"
version = "4.1.0.20250809"
version = "4.1.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "flask" },
{ name = "flask-sqlalchemy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d5/d1/d11799471725b7db070c4f1caa3161f556230d4fb5dad76d23559da1be4d/types_flask_migrate-4.1.0.20250809.tar.gz", hash = "sha256:fdf97a262c86aca494d75874a2374e84f2d37bef6467d9540fa3b054b67db04e", size = 8636, upload-time = "2025-08-09T03:17:03.957Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a8/85/291317e13f72d5b2b6c1fe2c59c77a45d07bb225bf5bb2768da6a7b96351/types_flask_migrate-4.1.0.20260402.tar.gz", hash = "sha256:8e0062f063ecbe5c73b53ffc1e86f4d6de5ab970142c7d2dea939c5680ba817a", size = 8717, upload-time = "2026-04-02T04:21:45.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/53/f5fd40fb6c21c1f8e7da8325f3504492d027a7921d5c80061cd434c3a0fc/types_flask_migrate-4.1.0.20250809-py3-none-any.whl", hash = "sha256:92ad2c0d4000a53bf1e2f7813dd067edbbcc4c503961158a763e2b0ae297555d", size = 8648, upload-time = "2025-08-09T03:17:02.952Z" },
{ url = "https://files.pythonhosted.org/packages/d4/d9/716b9cb9fca0f87e95f573e21e5ffe83d1cf9919ceb2e1cca8bc71488746/types_flask_migrate-4.1.0.20260402-py3-none-any.whl", hash = "sha256:6989d40d3cfae1c5f70c8f20ba39e714949b633329cc23b2dd00e82fd5b07d1c", size = 8669, upload-time = "2026-04-02T04:21:44.967Z" },
]
[[package]]
name = "types-gevent"
version = "25.9.0.20260322"
version = "25.9.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-greenlet" },
{ name = "types-psutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/34/f0/14a99ddcaa69b559fa7cec8c9de880b792bebb0b848ae865d94ea9058533/types_gevent-25.9.0.20260322.tar.gz", hash = "sha256:91257920845762f09753c08aa20fad1743ac13d2de8bcf23f4b8fe967d803732", size = 38241, upload-time = "2026-03-22T04:08:55.213Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1c/2f/a2056079f14aeacf538b51b0e6585328c3584fa8e6f4758214c9773ea4b0/types_gevent-25.9.0.20260402.tar.gz", hash = "sha256:24297e6f5733e187a517f08dde6df7b2147e14f7de4d343148f410dffebb5381", size = 38270, upload-time = "2026-04-02T04:22:00.125Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/0f/964440b57eb4ddb4aca03479a4093852e1ce79010d1c5967234e6f5d6bd9/types_gevent-25.9.0.20260322-py3-none-any.whl", hash = "sha256:21b3c269b3a20ecb0e4668289c63b97d21694d84a004ab059c1e32ab970eacc2", size = 55500, upload-time = "2026-03-22T04:08:54.103Z" },
{ url = "https://files.pythonhosted.org/packages/9e/2f/995920b5cc58bc9041ded8ea2fda32719f6c513bc6e43a0c5234780936db/types_gevent-25.9.0.20260402-py3-none-any.whl", hash = "sha256:178ba12e426c987dd69ef0b8ce9f1095a965103a0d673294831f49f7127bc5ba", size = 55494, upload-time = "2026-04-02T04:21:59.144Z" },
]
[[package]]
@@ -6095,14 +6098,14 @@ wheels = [
[[package]]
name = "types-html5lib"
version = "1.1.11.20251117"
version = "1.1.11.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-webencodings" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c8/f3/d9a1bbba7b42b5558a3f9fe017d967f5338cf8108d35991d9b15fdea3e0d/types_html5lib-1.1.11.20251117.tar.gz", hash = "sha256:1a6a3ac5394aa12bf547fae5d5eff91dceec46b6d07c4367d9b39a37f42f201a", size = 18100, upload-time = "2025-11-17T03:08:00.78Z" }
sdist = { url = "https://files.pythonhosted.org/packages/13/95/74eabb3bd0bb2f2b3a8ba56a55e87ee4b76f2b39e2a690eca399deffc837/types_html5lib-1.1.11.20260402.tar.gz", hash = "sha256:a167a30b9619a6eea82ec8b8948044859e033966a4721db34187d647c3a6c1f3", size = 18268, upload-time = "2026-04-02T04:21:56.528Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f0/ab/f5606db367c1f57f7400d3cb3bead6665ee2509621439af1b29c35ef6f9e/types_html5lib-1.1.11.20251117-py3-none-any.whl", hash = "sha256:2a3fc935de788a4d2659f4535002a421e05bea5e172b649d33232e99d4272d08", size = 24302, upload-time = "2025-11-17T03:07:59.996Z" },
{ url = "https://files.pythonhosted.org/packages/79/a9/fac9d4313b1851620610f46d086ba288482c0d5384ebf6feafb5bc4bdd15/types_html5lib-1.1.11.20260402-py3-none-any.whl", hash = "sha256:245d02cf53ef62d7342268c53dbc2af2d200849feec03f77f5909655cb54ab0d", size = 24314, upload-time = "2026-04-02T04:21:55.659Z" },
]
[[package]]
@@ -6152,11 +6155,11 @@ wheels = [
[[package]]
name = "types-openpyxl"
version = "3.1.5.20260322"
version = "3.1.5.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/77/bf/15240de4d68192d2a1f385ef2f6f1ecb29b85d2f3791dd2e2d5b980be30f/types_openpyxl-3.1.5.20260322.tar.gz", hash = "sha256:a61d66ebe1e49697853c6db8e0929e1cda2c96755e71fb676ed7fc48dfdcf697", size = 101325, upload-time = "2026-03-22T04:08:40.426Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6b/8f/d9daf094e0bb468b26e74c1bf9e0170e58c3f16e583d244e9f32078b6bcc/types_openpyxl-3.1.5.20260402.tar.gz", hash = "sha256:855ad28d47c0965048082dfca424d6ebd54d8861d72abcee9106ba5868899e7f", size = 101310, upload-time = "2026-04-02T04:17:37.6Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/b4/c14191b30bcb266365b124b2bb4e67ecd68425a78ba77ee026f33667daa9/types_openpyxl-3.1.5.20260322-py3-none-any.whl", hash = "sha256:2f515f0b0bbfb04bfb587de34f7522d90b5151a8da7bbbd11ecec4ca40f64238", size = 166102, upload-time = "2026-03-22T04:08:39.174Z" },
{ url = "https://files.pythonhosted.org/packages/58/ee/a0b22012076cf23b73fbb82d9c40843cbf6b1d228d7a2dc883da0a905a16/types_openpyxl-3.1.5.20260402-py3-none-any.whl", hash = "sha256:1d149989f0aad4e2074e96b87a045136399e27bc2a33cfefcd0eb4cad8ea5b4c", size = 166046, upload-time = "2026-04-02T04:17:36.162Z" },
]
[[package]]
@@ -6170,20 +6173,20 @@ wheels = [
[[package]]
name = "types-protobuf"
version = "6.32.1.20260221"
version = "7.34.1.20260403"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5f/e2/9aa4a3b2469508bd7b4e2ae11cbedaf419222a09a1b94daffcd5efca4023/types_protobuf-6.32.1.20260221.tar.gz", hash = "sha256:6d5fb060a616bfb076cbb61b4b3c3969f5fc8bec5810f9a2f7e648ee5cbcbf6e", size = 64408, upload-time = "2026-02-21T03:55:13.916Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ae/b3/c2e407ea36e0e4355c135127cee1b88a2cc9a2c92eafca50a360ab9f2708/types_protobuf-7.34.1.20260403.tar.gz", hash = "sha256:8d7881867888e667eb9563c08a916fccdc12bdb5f9f34c31d217cce876e36765", size = 68782, upload-time = "2026-04-03T04:18:09.428Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/e8/1fd38926f9cf031188fbc5a96694203ea6f24b0e34bd64a225ec6f6291ba/types_protobuf-6.32.1.20260221-py3-none-any.whl", hash = "sha256:da7cdd947975964a93c30bfbcc2c6841ee646b318d3816b033adc2c4eb6448e4", size = 77956, upload-time = "2026-02-21T03:55:12.894Z" },
{ url = "https://files.pythonhosted.org/packages/7d/95/24fb0f6fe37b41cf94f9b9912712645e17d8048d4becaf37c1607ddd8e32/types_protobuf-7.34.1.20260403-py3-none-any.whl", hash = "sha256:16d9bbca52ab0f306279958878567df2520f3f5579059419b0ce149a0ad1e332", size = 86011, upload-time = "2026-04-03T04:18:08.245Z" },
]
[[package]]
name = "types-psutil"
version = "7.2.2.20260130"
version = "7.2.2.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/69/14/fc5fb0a6ddfadf68c27e254a02ececd4d5c7fdb0efcb7e7e917a183497fb/types_psutil-7.2.2.20260130.tar.gz", hash = "sha256:15b0ab69c52841cf9ce3c383e8480c620a4d13d6a8e22b16978ebddac5590950", size = 26535, upload-time = "2026-01-30T03:58:14.116Z" }
sdist = { url = "https://files.pythonhosted.org/packages/31/a2/a608db0caf0d71bd231305dc3ab3f5d65624d77761003696a3ca8c6fad40/types_psutil-7.2.2.20260402.tar.gz", hash = "sha256:9f36eebf15ad8487f8004ed67c8e008b84b63ba00cfb709a3f60275058217329", size = 26522, upload-time = "2026-04-02T04:18:47.916Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/d7/60974b7e31545d3768d1770c5fe6e093182c3bfd819429b33133ba6b3e89/types_psutil-7.2.2.20260130-py3-none-any.whl", hash = "sha256:15523a3caa7b3ff03ac7f9b78a6470a59f88f48df1d74a39e70e06d2a99107da", size = 32876, upload-time = "2026-01-30T03:58:13.172Z" },
{ url = "https://files.pythonhosted.org/packages/81/8a/f4b3ca3154e8a77df91eb7a28c208af721d48f8a4aca667f582523a0beff/types_psutil-7.2.2.20260402-py3-none-any.whl", hash = "sha256:653d1fd908e68cc0666754b16a0cee28efbded0c401caa5314d2aeea67f227cd", size = 32860, upload-time = "2026-04-02T04:18:46.671Z" },
]
[[package]]
@@ -6197,14 +6200,14 @@ wheels = [
[[package]]
name = "types-pygments"
version = "2.19.0.20251121"
version = "2.20.0.20260406"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-docutils" },
]
sdist = { url = "https://files.pythonhosted.org/packages/90/3b/cd650700ce9e26b56bd1a6aa4af397bbbc1784e22a03971cb633cdb0b601/types_pygments-2.19.0.20251121.tar.gz", hash = "sha256:eef114fde2ef6265365522045eac0f8354978a566852f69e75c531f0553822b1", size = 18590, upload-time = "2025-11-21T03:03:46.623Z" }
sdist = { url = "https://files.pythonhosted.org/packages/08/bd/d17c28a4c65c556bc4c4bc8f363aa2fbfc91b397e3c0019839d74d9ead31/types_pygments-2.20.0.20260406.tar.gz", hash = "sha256:d3ed7ecd7c34a382459d28ce624b87e1dee03d6844e43aa7590ef4b8c7c9dfce", size = 19486, upload-time = "2026-04-06T04:33:59.632Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/8a/9244b21f1d60dcc62e261435d76b02f1853b4771663d7ec7d287e47a9ba9/types_pygments-2.19.0.20251121-py3-none-any.whl", hash = "sha256:cb3bfde34eb75b984c98fb733ce4f795213bd3378f855c32e75b49318371bb25", size = 25674, upload-time = "2025-11-21T03:03:45.72Z" },
{ url = "https://files.pythonhosted.org/packages/eb/00/dca7518e6f99ce0f235ec1c6512593ee4bd25109ae1c912bf9ee836a26e1/types_pygments-2.20.0.20260406-py3-none-any.whl", hash = "sha256:6bb0c79874c304977e1c097f7007140e16fe78c443329154db803d7910d945b3", size = 27278, upload-time = "2026-04-06T04:33:58.744Z" },
]
[[package]]
@@ -6231,11 +6234,11 @@ wheels = [
[[package]]
name = "types-python-dateutil"
version = "2.9.0.20260323"
version = "2.9.0.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e9/02/f72df9ef5ffc4f959b83cb80c8aa03eb8718a43e563ecd99ccffe265fa89/types_python_dateutil-2.9.0.20260323.tar.gz", hash = "sha256:a107aef5841db41ace381dbbbd7e4945220fc940f7a72172a0be5a92d9ab7164", size = 16897, upload-time = "2026-03-23T04:15:14.829Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a7/30/c5d9efbff5422b20c9551dc5af237d1ab0c3d33729a9b3239a876ca47dd4/types_python_dateutil-2.9.0.20260402.tar.gz", hash = "sha256:a980142b9966713acb382c467e35c5cc4208a2f91b10b8d785a0ae6765df6c0b", size = 16941, upload-time = "2026-04-02T04:18:35.834Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/92/c1/b661838b97453e699a215451f2e22cee750eaaf4ea4619b34bdaf01221a4/types_python_dateutil-2.9.0.20260323-py3-none-any.whl", hash = "sha256:a23a50a07f6eb87e729d4cb0c2eb511c81761eeb3f505db2c1413be94aae8335", size = 18433, upload-time = "2026-03-23T04:15:13.683Z" },
{ url = "https://files.pythonhosted.org/packages/e6/d7/fe753bf8329c8c3c1addcba1d2bf716c33898216757abb24f8b80f82d040/types_python_dateutil-2.9.0.20260402-py3-none-any.whl", hash = "sha256:7827e6a9c93587cc18e766944254d1351a2396262e4abe1510cbbd7601c5e01f", size = 18436, upload-time = "2026-04-02T04:18:34.806Z" },
]
[[package]]
@@ -6249,11 +6252,11 @@ wheels = [
[[package]]
name = "types-pywin32"
version = "311.0.0.20260323"
version = "311.0.0.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/cc/f03ddb7412ac2fc2238358b617c2d5919ba96812dff8d3081f3b2754bb83/types_pywin32-311.0.0.20260323.tar.gz", hash = "sha256:2e8dc6a59fedccbc51b241651ce1e8aa58488934f517debf23a9c6d0ff329b4b", size = 332263, upload-time = "2026-03-23T04:15:20.004Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b3/f0/fc3c923b5d7822f3a93c7b242a69de0e1945e7c153cc5367074621a6509f/types_pywin32-311.0.0.20260402.tar.gz", hash = "sha256:637f041065f02fb49cbaba530ae8cf2e483b5d2c145a9bf97fd084c3e913c7e3", size = 332312, upload-time = "2026-04-02T04:18:52.748Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/82/d786d5d8b846e3cbe1ee52da8945560b111c789b42c3771b2129b312ab94/types_pywin32-311.0.0.20260323-py3-none-any.whl", hash = "sha256:2f2b03fc72ae77ccbb0ee258da0f181c3a38bd8602f6e332e42587b3b0d5f095", size = 395435, upload-time = "2026-03-23T04:15:18.76Z" },
{ url = "https://files.pythonhosted.org/packages/80/0c/a2ee20785df4ebcda6d6ec62d58b7c08a37072f9d00cda4f9548e9c8e5aa/types_pywin32-311.0.0.20260402-py3-none-any.whl", hash = "sha256:4db644fcf40ee85a3ee2551f110d009e427c01569ed4670bb53cfe999df0929f", size = 395413, upload-time = "2026-04-02T04:18:51.529Z" },
]
[[package]]
@@ -6280,11 +6283,11 @@ wheels = [
[[package]]
name = "types-regex"
version = "2026.3.32.20260329"
version = "2026.4.4.20260405"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/d8/a3aca5775c573e56d201bbd76a827b84d851a4bce28e189e5acb9c7a0d15/types_regex-2026.3.32.20260329.tar.gz", hash = "sha256:12653e44694cb3e3ccdc39bab3d433d2a83fec1c01220e6871fd6f3cf434675c", size = 13111, upload-time = "2026-03-29T04:27:04.759Z" }
sdist = { url = "https://files.pythonhosted.org/packages/74/9c/dd7b36fe87902a161a69c4a6959e3a6afae09c2c600916beb1aecd300870/types_regex-2026.4.4.20260405.tar.gz", hash = "sha256:993b76a255d9b83fd68eed2fc52b2746be51a93b833796be4fcf9412efa0da51", size = 13143, upload-time = "2026-04-05T04:26:56.614Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/f4/a1db307e56753c49fb15fc88d70fadeb3f38897b28cab645cddd18054c79/types_regex-2026.3.32.20260329-py3-none-any.whl", hash = "sha256:861d0893bcfe08a57eb7486a502014e29dc2721d46dd5130798fbccafdb31cc0", size = 11128, upload-time = "2026-03-29T04:27:03.854Z" },
{ url = "https://files.pythonhosted.org/packages/51/83/5dbae203616699890efcdb2a2670d62baf5ed93634f75d793157f1edefb3/types_regex-2026.4.4.20260405-py3-none-any.whl", hash = "sha256:40443cb88c43b9940dd4c904e251be7e65dab3798b2cf6f5ff19501ae99b2ab5", size = 11119, upload-time = "2026-04-05T04:26:55.636Z" },
]
[[package]]
@@ -6310,32 +6313,32 @@ wheels = [
[[package]]
name = "types-setuptools"
version = "82.0.0.20260210"
version = "82.0.0.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4b/90/796ac8c774a7f535084aacbaa6b7053d16fff5c630eff87c3ecff7896c37/types_setuptools-82.0.0.20260210.tar.gz", hash = "sha256:d9719fbbeb185254480ade1f25327c4654f8c00efda3fec36823379cebcdee58", size = 44768, upload-time = "2026-02-10T04:22:02.107Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e9/f8/74f8a76b4311e70772c0df8f2d432040a3b0facd7bcce6b72b0b26e1746b/types_setuptools-82.0.0.20260402.tar.gz", hash = "sha256:63d2b10ba7958396ad79bbc24d2f6311484e452daad4637ffd40407983a27069", size = 44805, upload-time = "2026-04-02T04:17:49.229Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3e/54/3489432b1d9bc713c9d8aa810296b8f5b0088403662959fb63a8acdbd4fc/types_setuptools-82.0.0.20260210-py3-none-any.whl", hash = "sha256:5124a7daf67f195c6054e0f00f1d97c69caad12fdcf9113eba33eff0bce8cd2b", size = 68433, upload-time = "2026-02-10T04:22:00.876Z" },
{ url = "https://files.pythonhosted.org/packages/0e/e9/22451997f70ac2c5f18dc5f988750c986011fb049d9021767277119e63fa/types_setuptools-82.0.0.20260402-py3-none-any.whl", hash = "sha256:4b9a9f6c3c4c65107a3956ad6a6acbccec38e398ff6d5f78d5df7f103dadb8d6", size = 68429, upload-time = "2026-04-02T04:17:48.11Z" },
]
[[package]]
name = "types-shapely"
version = "2.1.0.20250917"
version = "2.1.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363, upload-time = "2025-09-17T02:47:44.604Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a3/f7/46e95b09434105d7b772d05657495f2900bae8e108fdf4e6d8b5902aa28c/types_shapely-2.1.0.20260402.tar.gz", hash = "sha256:0eb592328170433b4724430a64c309bf07ba69d5d11489d3dba21382d78f5297", size = 26481, upload-time = "2026-04-02T04:20:03.104Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/a9/554ac40810e530263b6163b30a2b623bc16aae3fb64416f5d2b3657d0729/types_shapely-2.1.0.20250917-py3-none-any.whl", hash = "sha256:9334a79339504d39b040426be4938d422cec419168414dc74972aa746a8bf3a1", size = 37813, upload-time = "2025-09-17T02:47:43.788Z" },
{ url = "https://files.pythonhosted.org/packages/14/3a/1aa3a62f5b85d4a9e649e7b42842a9e5503fef7eb50c480137a6b94f8bb1/types_shapely-2.1.0.20260402-py3-none-any.whl", hash = "sha256:8d70a16f615a104fd8abdd73e684d4e83b9dedf31d6432ecf86945b5ef0e35de", size = 37817, upload-time = "2026-04-02T04:20:02.17Z" },
]
[[package]]
name = "types-simplejson"
version = "3.20.0.20250822"
version = "3.20.0.20260402"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/df/6b/96d43a90cd202bd552cdd871858a11c138fe5ef11aeb4ed8e8dc51389257/types_simplejson-3.20.0.20250822.tar.gz", hash = "sha256:2b0bfd57a6beed3b932fd2c3c7f8e2f48a7df3978c9bba43023a32b3741a95b0", size = 10608, upload-time = "2025-08-22T03:03:35.36Z" }
sdist = { url = "https://files.pythonhosted.org/packages/94/93/2ff2f4b8ccd942ee3a4b62c013d2c1779e416d303950060ed8b3f1a4fc11/types_simplejson-3.20.0.20260402.tar.gz", hash = "sha256:ee2bbf65830fe93270a1c0406f3474c952fe1232532c7b6f3eb9500edb308c5a", size = 10650, upload-time = "2026-04-02T04:19:26.266Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/9f/8e2c9e6aee9a2ff34f2ffce6ccd9c26edeef6dfd366fde611dc2e2c00ab9/types_simplejson-3.20.0.20250822-py3-none-any.whl", hash = "sha256:b5e63ae220ac7a1b0bb9af43b9cb8652237c947981b2708b0c776d3b5d8fa169", size = 10417, upload-time = "2025-08-22T03:03:34.485Z" },
{ url = "https://files.pythonhosted.org/packages/2c/2a/7ba2bede9c2b25fb338d0bda9925a23b73a5ac99fd97304ebe067c090e33/types_simplejson-3.20.0.20260402-py3-none-any.whl", hash = "sha256:b3bdef21bc24fee26b80385ffea5163b6b10381089aa619fe2f8f8d3790e6148", size = 10419, upload-time = "2026-04-02T04:19:25.464Z" },
]
[[package]]
@@ -6349,28 +6352,28 @@ wheels = [
[[package]]
name = "types-tensorflow"
version = "2.18.0.20260322"
version = "2.18.0.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "types-protobuf" },
{ name = "types-requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4a/cb/81dfaa2680031a6e087bcdfaf1c0556371098e229aee541e21c81a381065/types_tensorflow-2.18.0.20260322.tar.gz", hash = "sha256:135dc6ca06cc647a002e1bca5c5c99516fde51efd08e46c48a9b1916fc5df07f", size = 259030, upload-time = "2026-03-22T04:09:14.069Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/d9/1ca68336ce7ad8c4a19001fce85f47ffae9d7ac335e5ddd73497b6bfbca4/types_tensorflow-2.18.0.20260402.tar.gz", hash = "sha256:607c4a5895d44c88c7c465410093ee050aa760c3cedab5b9662f475c5e2137d3", size = 259058, upload-time = "2026-04-02T04:22:39.113Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5b/0c/a178061450b640e53577e2c423ad22bf5d3f692f6bfeeb12156d02b531ef/types_tensorflow-2.18.0.20260322-py3-none-any.whl", hash = "sha256:d8776b6daacdb279e64f105f9dcbc0b8e3544b9a2f2eb71ec6ea5955081f65e6", size = 329771, upload-time = "2026-03-22T04:09:12.844Z" },
{ url = "https://files.pythonhosted.org/packages/c1/6c/0ad58c7246a5369ceb2ae16c146ac0684a0827f499a8141fc3d13743c38b/types_tensorflow-2.18.0.20260402-py3-none-any.whl", hash = "sha256:0d4a74921c457ade8f46eb09cf728a1732156678e497ce15a88b9c0c16dc2fe5", size = 329776, upload-time = "2026-04-02T04:22:37.903Z" },
]
[[package]]
name = "types-tqdm"
version = "4.67.3.20260303"
version = "4.67.3.20260402"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e1/64/3e7cb0f40c4bf9578098b6873df33a96f7e0de90f3a039e614d22bfde40a/types_tqdm-4.67.3.20260303.tar.gz", hash = "sha256:7bfddb506a75aedb4030fabf4f05c5638c9a3bbdf900d54ec6c82be9034bfb96", size = 18117, upload-time = "2026-03-03T04:03:49.679Z" }
sdist = { url = "https://files.pythonhosted.org/packages/54/42/e9e6688891d8db77b5795ec02b329524170892ff81bec63c4c4ca7425b30/types_tqdm-4.67.3.20260402.tar.gz", hash = "sha256:e0739f3bc5d1c801999a202f0537280aa1bc2e669c49f5be91bfb99376690624", size = 18077, upload-time = "2026-04-02T04:22:23.049Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/37/32/e4a1fce59155c74082f1a42d0ffafa59652bfb8cff35b04d56333877748e/types_tqdm-4.67.3.20260303-py3-none-any.whl", hash = "sha256:459decf677e4b05cef36f9012ef8d6e20578edefb6b78c15bd0b546247eda62d", size = 24572, upload-time = "2026-03-03T04:03:48.913Z" },
{ url = "https://files.pythonhosted.org/packages/4f/73/a6cf75de5be376d7b57ce6c934ae9bc90aa5be6ada4ac50a99ecbdf9763e/types_tqdm-4.67.3.20260402-py3-none-any.whl", hash = "sha256:b5d1a65fe3286e1a855e51ddebf63d3641daf9bad285afd1ec56808eb59df76e", size = 24562, upload-time = "2026-04-02T04:22:22.114Z" },
]
[[package]]

View File

@@ -1358,6 +1358,18 @@ SSRF_POOL_KEEPALIVE_EXPIRY=5.0
# ------------------------------
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}
# ------------------------------
# Worker health check configuration for worker and worker_beat services.
# Set to false to enable the health check.
# Note: enabling the health check may cause periodic CPU spikes and increased load,
# as it establishes a broker connection and sends a Celery ping on every check interval.
# ------------------------------
COMPOSE_WORKER_HEALTHCHECK_DISABLED=true
# Interval between health checks (e.g. 30s, 1m)
COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s
# Timeout for each health check (e.g. 30s, 1m)
COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s
# ------------------------------
# Docker Compose Service Expose Host Port Configurations
# ------------------------------

View File

@@ -102,11 +102,12 @@ services:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
healthcheck:
test: ["CMD-SHELL", "celery -A celery_entrypoint.celery inspect ping"]
interval: 30s
timeout: 10s
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default
@@ -139,11 +140,12 @@ services:
redis:
condition: service_started
healthcheck:
test: ["CMD-SHELL", "celery -A app.celery inspect ping"]
interval: 30s
timeout: 10s
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default

View File

@@ -811,11 +811,12 @@ services:
# Mount the storage directory to the container, for storing user files.
- ./volumes/app/storage:/app/api/storage
healthcheck:
test: ["CMD-SHELL", "celery -A celery_entrypoint.celery inspect ping"]
interval: 30s
timeout: 10s
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default
@@ -848,11 +849,12 @@ services:
redis:
condition: service_started
healthcheck:
test: ["CMD-SHELL", "celery -A app.celery inspect ping"]
interval: 30s
timeout: 10s
test: ["CMD-SHELL", "celery -A celery_healthcheck.celery inspect ping"]
interval: ${COMPOSE_WORKER_HEALTHCHECK_INTERVAL:-30s}
timeout: ${COMPOSE_WORKER_HEALTHCHECK_TIMEOUT:-30s}
retries: 3
start_period: 60s
disable: ${COMPOSE_WORKER_HEALTHCHECK_DISABLED:-true}
networks:
- ssrf_proxy_network
- default

View File

@@ -3,6 +3,20 @@ import os
import re
import sys
# Variables that exist only for Docker Compose orchestration and must NOT be
# injected into containers as environment variables.
SHARED_ENV_EXCLUDE = frozenset(
[
# Docker Compose profile selection
"COMPOSE_PROFILES",
# Worker health check orchestration flags (consumed by docker-compose,
# not by the application running inside the container)
"COMPOSE_WORKER_HEALTHCHECK_DISABLED",
"COMPOSE_WORKER_HEALTHCHECK_INTERVAL",
"COMPOSE_WORKER_HEALTHCHECK_TIMEOUT",
]
)
def parse_env_example(file_path):
"""
@@ -37,7 +51,7 @@ def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"):
"""
lines = [f"x-shared-env: &{anchor_name}"]
for key, default in env_vars.items():
if key == "COMPOSE_PROFILES":
if key in SHARED_ENV_EXCLUDE:
continue
# If default value is empty, use ${KEY:-}
if default == "":
@@ -54,6 +68,7 @@ def insert_shared_env(template_path, output_path, shared_env_block, header_comme
"""
Inserts the shared environment variables block and header comments into the template file,
removing any existing x-shared-env anchors, and generates the final docker-compose.yaml file.
Always writes with LF line endings.
"""
with open(template_path, "r", encoding="utf-8") as f:
template_content = f.read()
@@ -69,7 +84,7 @@ def insert_shared_env(template_path, output_path, shared_env_block, header_comme
# Prepare the final content with header comments and shared env block
final_content = f"{header_comments}\n{shared_env_block}\n\n{template_content}"
with open(output_path, "w", encoding="utf-8") as f:
with open(output_path, "w", encoding="utf-8", newline="\n") as f:
f.write(final_content)
print(f"Generated {output_path}")

View File

@@ -25,6 +25,7 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml /app/
COPY web/package.json /app/web/
COPY e2e/package.json /app/e2e/
COPY sdks/nodejs-client/package.json /app/sdks/nodejs-client/
COPY packages /app/packages
# Use packageManager from package.json
RUN corepack install

View File

@@ -7,6 +7,9 @@
!web/**
!e2e/
!e2e/package.json
!packages/
!packages/**/
!packages/**/package.json
!sdks/
!sdks/nodejs-client/
!sdks/nodejs-client/package.json

View File

@@ -501,6 +501,16 @@ describe('Question component', () => {
expect(onRegenerate).toHaveBeenCalled()
})
it('should render default question avatar icon when questionIcon is not provided', () => {
const { container } = renderWithProvider(
makeItem(),
vi.fn() as unknown as OnRegenerate,
)
const defaultIcon = container.querySelector('.question-default-user-icon')
expect(defaultIcon).toBeInTheDocument()
})
it('should render custom questionIcon when provided', () => {
const { container } = renderWithProvider(
makeItem(),
@@ -509,7 +519,7 @@ describe('Question component', () => {
)
expect(screen.getByTestId('custom-question-icon')).toBeInTheDocument()
const defaultIcon = container.querySelector('.i-custom-public-avatar-user')
const defaultIcon = container.querySelector('.question-default-user-icon')
expect(defaultIcon).not.toBeInTheDocument()
})

View File

@@ -15,6 +15,7 @@ import {
import { useTranslation } from 'react-i18next'
import Textarea from 'react-textarea-autosize'
import { FileList } from '@/app/components/base/file-uploader'
import { User } from '@/app/components/base/icons/src/public/avatar'
import { Markdown } from '@/app/components/base/markdown'
import { cn } from '@/utils/classnames'
import ActionButton from '../../action-button'
@@ -243,7 +244,7 @@ const Question: FC<QuestionProps> = ({
{
questionIcon || (
<div className="h-full w-full rounded-full border-[0.5px] border-black/5">
<div className="i-custom-public-avatar-user h-full w-full" />
<User className="question-default-user-icon h-full w-full" />
</div>
)
}

View File

@@ -38,7 +38,7 @@ Treat this as an escape hatch—fix these errors when time permits.
### The Auto-Fix Workflow and Suppression Strategy
To streamline your development process, we recommend configuring your editor to automatically fix lint errors on save.
As a fallback, any remaining autofixable errors will be corrected automatically when you commit.
As a fallback, the commit hook runs `vp staged`, which applies autofixable ESLint changes to staged files before the commit continues.
To prevent workflow disruptions, these commit hooks are intentionally bypassed when you are merging branches, rebasing, or cherry-picking.
Additionally, we currently track many existing legacy errors in eslint-suppressions.json.