diff --git a/api/.env.example b/api/.env.example index 71953fdc1e..f25b4d24cd 100644 --- a/api/.env.example +++ b/api/.env.example @@ -718,6 +718,7 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5 # Sandbox expired records clean configuration SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200 SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000 diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index d5ad105d8f..4e3673f768 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -1371,6 +1371,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings): description="Maximum number of records to process in each batch", default=1000, ) + SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field( + description="Maximum interval in milliseconds between batches", + default=200, + ) SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field( description="Retention days for sandbox expired workflow_run records and message records", default=30, diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 88a9ce3a79..b7a2f230e1 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -1,6 +1,7 @@ import urllib.parse import httpx +from flask_restx import Resource from pydantic import BaseModel, Field import services @@ -10,12 +11,12 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) -from controllers.fastopenapi import console_router +from controllers.console import console_ns from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db from fields.file_fields import FileWithSignedUrl, RemoteFileInfo -from libs.login import current_account_with_tenant +from libs.login import current_account_with_tenant, login_required from services.file_service import FileService @@ -23,69 +24,73 @@ class RemoteFileUploadPayload(BaseModel): url: str = Field(..., description="URL to fetch") -@console_router.get( - "/remote-files/", - response_model=RemoteFileInfo, - tags=["console"], -) -def get_remote_file_info(url: str) -> RemoteFileInfo: - decoded_url = urllib.parse.unquote(url) - resp = ssrf_proxy.head(decoded_url) - if resp.status_code != httpx.codes.OK: - resp = ssrf_proxy.get(decoded_url, timeout=3) - resp.raise_for_status() - return RemoteFileInfo( - file_type=resp.headers.get("Content-Type", "application/octet-stream"), - file_length=int(resp.headers.get("Content-Length", 0)), - ) - - -@console_router.post( - "/remote-files/upload", - response_model=FileWithSignedUrl, - tags=["console"], - status_code=201, -) -def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl: - url = payload.url - - try: - resp = ssrf_proxy.head(url=url) +@console_ns.route("/remote-files/") +class GetRemoteFileInfo(Resource): + @login_required + def get(self, url: str): + decoded_url = urllib.parse.unquote(url) + resp = ssrf_proxy.head(decoded_url) if resp.status_code != httpx.codes.OK: - resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True) - if resp.status_code != httpx.codes.OK: - raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}") - except httpx.RequestError as e: - raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}") + resp = ssrf_proxy.get(decoded_url, timeout=3) + resp.raise_for_status() + return RemoteFileInfo( + file_type=resp.headers.get("Content-Type", "application/octet-stream"), + file_length=int(resp.headers.get("Content-Length", 0)), + ).model_dump(mode="json") - file_info = helpers.guess_file_info_from_response(resp) - if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): - raise FileTooLargeError +@console_ns.route("/remote-files/upload") +class RemoteFileUpload(Resource): + @login_required + def post(self): + payload = RemoteFileUploadPayload.model_validate(console_ns.payload) + url = payload.url - content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content + # Try to fetch remote file metadata/content first + try: + resp = ssrf_proxy.head(url=url) + if resp.status_code != httpx.codes.OK: + resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True) + if resp.status_code != httpx.codes.OK: + # Normalize into a user-friendly error message expected by tests + raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}") + except httpx.RequestError as e: + raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}") - try: - user, _ = current_account_with_tenant() - upload_file = FileService(db.engine).upload_file( - filename=file_info.filename, - content=content, - mimetype=file_info.mimetype, - user=user, - source_url=url, + file_info = helpers.guess_file_info_from_response(resp) + + # Enforce file size limit with 400 (Bad Request) per tests' expectation + if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): + raise FileTooLargeError() + + # Load content if needed + content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content + + try: + user, _ = current_account_with_tenant() + upload_file = FileService(db.engine).upload_file( + filename=file_info.filename, + content=content, + mimetype=file_info.mimetype, + user=user, + source_url=url, + ) + except services.errors.file.FileTooLargeError as file_too_large_error: + raise FileTooLargeError(file_too_large_error.description) + except services.errors.file.UnsupportedFileTypeError: + raise UnsupportedFileTypeError() + + # Success: return created resource with 201 status + return ( + FileWithSignedUrl( + id=upload_file.id, + name=upload_file.name, + size=upload_file.size, + extension=upload_file.extension, + url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + mime_type=upload_file.mime_type, + created_by=upload_file.created_by, + created_at=int(upload_file.created_at.timestamp()), + ).model_dump(mode="json"), + 201, ) - except services.errors.file.FileTooLargeError as file_too_large_error: - raise FileTooLargeError(file_too_large_error.description) - except services.errors.file.UnsupportedFileTypeError: - raise UnsupportedFileTypeError() - - return FileWithSignedUrl( - id=upload_file.id, - name=upload_file.name, - size=upload_file.size, - extension=upload_file.extension, - url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), - mime_type=upload_file.mime_type, - created_by=upload_file.created_by, - created_at=int(upload_file.created_at.timestamp()), - ) diff --git a/api/migrations/versions/2026_02_11_1549-fce013ca180e_fix_index_to_optimize_message_clean_job_.py b/api/migrations/versions/2026_02_11_1549-fce013ca180e_fix_index_to_optimize_message_clean_job_.py new file mode 100644 index 0000000000..ed482fbd6d --- /dev/null +++ b/api/migrations/versions/2026_02_11_1549-fce013ca180e_fix_index_to_optimize_message_clean_job_.py @@ -0,0 +1,39 @@ +"""fix index to optimize message clean job performance + +Revision ID: fce013ca180e +Revises: f55813ffe2c8 +Create Date: 2026-02-11 15:49:17.603638 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'fce013ca180e' +down_revision = 'f55813ffe2c8' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('message_created_at_idx')) + + with op.batch_alter_table('saved_messages', schema=None) as batch_op: + batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('saved_messages', schema=None) as batch_op: + batch_op.drop_index('saved_message_message_id_idx') + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False) + + # ### end Alembic commands ### diff --git a/api/models/model.py b/api/models/model.py index a105342b0a..c30de64d58 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1042,7 +1042,6 @@ class Message(Base): Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"), Index("message_account_idx", "app_id", "from_source", "from_account_id"), Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), - Index("message_created_at_idx", "created_at"), Index("message_app_mode_idx", "app_mode"), Index("message_created_at_id_idx", "created_at", "id"), ) diff --git a/api/models/web.py b/api/models/web.py index b2832aa163..5f6a7b40bf 100644 --- a/api/models/web.py +++ b/api/models/web.py @@ -16,6 +16,7 @@ class SavedMessage(TypeBase): __table_args__ = ( sa.PrimaryKeyConstraint("id", name="saved_message_pkey"), sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"), + sa.Index("saved_message_message_id_idx", "message_id"), ) id: Mapped[str] = mapped_column( diff --git a/api/pyproject.toml b/api/pyproject.toml index 327c5bda87..5aac2ae69f 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.12.1" +version = "1.13.0" requires-python = ">=3.11,<3.13" dependencies = [ @@ -25,7 +25,7 @@ dependencies = [ "gevent-websocket~=0.10.1", "gmpy2~=2.2.1", "google-api-core==2.18.0", - "google-api-python-client==2.90.0", + "google-api-python-client==2.189.0", "google-auth==2.29.0", "google-auth-httplib2==0.2.0", "google-cloud-aiplatform==1.49.0", diff --git a/api/services/retention/conversation/messages_clean_service.py b/api/services/retention/conversation/messages_clean_service.py index 3ca5d82860..f7836a2b14 100644 --- a/api/services/retention/conversation/messages_clean_service.py +++ b/api/services/retention/conversation/messages_clean_service.py @@ -1,10 +1,13 @@ import datetime import logging +import os import random +import time from collections.abc import Sequence from typing import cast -from sqlalchemy import delete, select +import sqlalchemy as sa +from sqlalchemy import delete, select, tuple_ from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session @@ -193,11 +196,15 @@ class MessagesCleanService: self._end_before, ) + max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200)) + while True: stats["batches"] += 1 + batch_start = time.monotonic() # Step 1: Fetch a batch of messages using cursor with Session(db.engine, expire_on_commit=False) as session: + fetch_messages_start = time.monotonic() msg_stmt = ( select(Message.id, Message.app_id, Message.created_at) .where(Message.created_at < self._end_before) @@ -209,13 +216,13 @@ class MessagesCleanService: msg_stmt = msg_stmt.where(Message.created_at >= self._start_from) # Apply cursor condition: (created_at, id) > (last_created_at, last_message_id) - # This translates to: - # created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id) if _cursor: - # Continuing from previous batch msg_stmt = msg_stmt.where( - (Message.created_at > _cursor[0]) - | ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1])) + tuple_(Message.created_at, Message.id) + > tuple_( + sa.literal(_cursor[0], type_=sa.DateTime()), + sa.literal(_cursor[1], type_=Message.id.type), + ) ) raw_messages = list(session.execute(msg_stmt).all()) @@ -223,6 +230,12 @@ class MessagesCleanService: SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at) for msg_id, app_id, msg_created_at in raw_messages ] + logger.info( + "clean_messages (batch %s): fetched %s messages in %sms", + stats["batches"], + len(messages), + int((time.monotonic() - fetch_messages_start) * 1000), + ) # Track total messages fetched across all batches stats["total_messages"] += len(messages) @@ -241,8 +254,16 @@ class MessagesCleanService: logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"]) continue + fetch_apps_start = time.monotonic() app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids)) apps = list(session.execute(app_stmt).all()) + logger.info( + "clean_messages (batch %s): fetched %s apps for %s app_ids in %sms", + stats["batches"], + len(apps), + len(app_ids), + int((time.monotonic() - fetch_apps_start) * 1000), + ) if not apps: logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"]) @@ -252,7 +273,15 @@ class MessagesCleanService: app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps} # Step 3: Delegate to policy to determine which messages to delete + policy_start = time.monotonic() message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant) + logger.info( + "clean_messages (batch %s): policy selected %s/%s messages in %sms", + stats["batches"], + len(message_ids_to_delete), + len(messages), + int((time.monotonic() - policy_start) * 1000), + ) if not message_ids_to_delete: logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"]) @@ -263,14 +292,20 @@ class MessagesCleanService: # Step 4: Batch delete messages and their relations if not self._dry_run: with Session(db.engine, expire_on_commit=False) as session: + delete_relations_start = time.monotonic() # Delete related records first self._batch_delete_message_relations(session, message_ids_to_delete) + delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000) # Delete messages + delete_messages_start = time.monotonic() delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete)) delete_result = cast(CursorResult, session.execute(delete_stmt)) messages_deleted = delete_result.rowcount + delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000) + commit_start = time.monotonic() session.commit() + commit_ms = int((time.monotonic() - commit_start) * 1000) stats["total_deleted"] += messages_deleted @@ -280,6 +315,19 @@ class MessagesCleanService: len(messages), messages_deleted, ) + logger.info( + "clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms", + stats["batches"], + delete_relations_ms, + delete_messages_ms, + commit_ms, + int((time.monotonic() - batch_start) * 1000), + ) + + # Random sleep between batches to avoid overwhelming the database + sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311 + logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms) + time.sleep(sleep_ms / 1000) else: # Log random sample of message IDs that would be deleted (up to 10) sample_size = min(10, len(message_ids_to_delete)) diff --git a/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py b/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py index c3e0dce399..2c94cb5324 100644 --- a/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py +++ b/api/services/retention/workflow_run/clear_free_plan_expired_workflow_run_logs.py @@ -1,5 +1,8 @@ import datetime import logging +import os +import random +import time from collections.abc import Iterable, Sequence import click @@ -72,7 +75,12 @@ class WorkflowRunCleanup: batch_index = 0 last_seen: tuple[datetime.datetime, str] | None = None + max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200)) + while True: + batch_start = time.monotonic() + + fetch_start = time.monotonic() run_rows = self.workflow_run_repo.get_runs_batch_by_time_range( start_from=self.window_start, end_before=self.window_end, @@ -80,12 +88,30 @@ class WorkflowRunCleanup: batch_size=self.batch_size, ) if not run_rows: + logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1) break batch_index += 1 last_seen = (run_rows[-1].created_at, run_rows[-1].id) + logger.info( + "workflow_run_cleanup (batch #%s): fetched %s rows in %sms", + batch_index, + len(run_rows), + int((time.monotonic() - fetch_start) * 1000), + ) + tenant_ids = {row.tenant_id for row in run_rows} + + filter_start = time.monotonic() free_tenants = self._filter_free_tenants(tenant_ids) + logger.info( + "workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms", + batch_index, + len(free_tenants), + len(tenant_ids), + int((time.monotonic() - filter_start) * 1000), + ) + free_runs = [row for row in run_rows if row.tenant_id in free_tenants] paid_or_skipped = len(run_rows) - len(free_runs) @@ -104,11 +130,17 @@ class WorkflowRunCleanup: total_runs_targeted += len(free_runs) if self.dry_run: + count_start = time.monotonic() batch_counts = self.workflow_run_repo.count_runs_with_related( free_runs, count_node_executions=self._count_node_executions, count_trigger_logs=self._count_trigger_logs, ) + logger.info( + "workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms", + batch_index, + int((time.monotonic() - count_start) * 1000), + ) if related_totals is not None: for key in related_totals: related_totals[key] += batch_counts.get(key, 0) @@ -120,14 +152,21 @@ class WorkflowRunCleanup: fg="yellow", ) ) + logger.info( + "workflow_run_cleanup (batch #%s, dry_run): batch total %sms", + batch_index, + int((time.monotonic() - batch_start) * 1000), + ) continue try: + delete_start = time.monotonic() counts = self.workflow_run_repo.delete_runs_with_related( free_runs, delete_node_executions=self._delete_node_executions, delete_trigger_logs=self._delete_trigger_logs, ) + delete_ms = int((time.monotonic() - delete_start) * 1000) except Exception: logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0]) raise @@ -143,6 +182,17 @@ class WorkflowRunCleanup: fg="green", ) ) + logger.info( + "workflow_run_cleanup (batch #%s): delete %sms, batch total %sms", + batch_index, + delete_ms, + int((time.monotonic() - batch_start) * 1000), + ) + + # Random sleep between batches to avoid overwhelming the database + sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311 + logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms) + time.sleep(sleep_ms / 1000) if self.dry_run: if self.window_start: diff --git a/api/tests/unit_tests/controllers/console/test_fastopenapi_remote_files.py b/api/tests/unit_tests/controllers/console/test_fastopenapi_remote_files.py index cb2604cf1c..c0a984e216 100644 --- a/api/tests/unit_tests/controllers/console/test_fastopenapi_remote_files.py +++ b/api/tests/unit_tests/controllers/console/test_fastopenapi_remote_files.py @@ -1,92 +1,286 @@ -import builtins +"""Tests for remote file upload API endpoints using Flask-RESTX.""" + +import contextlib from datetime import datetime from types import SimpleNamespace -from unittest.mock import patch +from unittest.mock import Mock, patch import httpx import pytest -from flask import Flask -from flask.views import MethodView - -from extensions import ext_fastopenapi - -if not hasattr(builtins, "MethodView"): - builtins.MethodView = MethodView # type: ignore[attr-defined] +from flask import Flask, g @pytest.fixture def app() -> Flask: + """Create Flask app for testing.""" app = Flask(__name__) app.config["TESTING"] = True + app.config["SECRET_KEY"] = "test-secret-key" return app -def test_console_remote_files_fastopenapi_get_info(app: Flask): - ext_fastopenapi.init_app(app) +@pytest.fixture +def client(app): + """Create test client with console blueprint registered.""" + from controllers.console import bp - response = httpx.Response( - 200, - request=httpx.Request("HEAD", "http://example.com/file.txt"), - headers={"Content-Type": "text/plain", "Content-Length": "10"}, - ) - - with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response): - client = app.test_client() - encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt" - resp = client.get(f"/console/api/remote-files/{encoded_url}") - - assert resp.status_code == 200 - assert resp.get_json() == {"file_type": "text/plain", "file_length": 10} + app.register_blueprint(bp) + return app.test_client() -def test_console_remote_files_fastopenapi_upload(app: Flask): - ext_fastopenapi.init_app(app) +@pytest.fixture +def mock_account(): + """Create a mock account for testing.""" + from models import Account - head_response = httpx.Response( - 200, - request=httpx.Request("GET", "http://example.com/file.txt"), - content=b"hello", - ) - file_info = SimpleNamespace( - extension="txt", - size=5, - filename="file.txt", - mimetype="text/plain", - ) - uploaded = SimpleNamespace( - id="file-id", - name="file.txt", - size=5, - extension="txt", - mime_type="text/plain", - created_by="user-id", - created_at=datetime(2024, 1, 1), - ) + account = Mock(spec=Account) + account.id = "test-account-id" + account.current_tenant_id = "test-tenant-id" + return account - with ( - patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())), - patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response), - patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info), - patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True), - patch("controllers.console.remote_files.FileService.__init__", return_value=None), - patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")), - patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded), - patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"), - ): - client = app.test_client() - resp = client.post( - "/console/api/remote-files/upload", - json={"url": "http://example.com/file.txt"}, + +@pytest.fixture +def auth_ctx(app, mock_account): + """Context manager to set auth/tenant context in flask.g for a request.""" + + @contextlib.contextmanager + def _ctx(): + with app.test_request_context(): + g._login_user = mock_account + g._current_tenant = mock_account.current_tenant_id + yield + + return _ctx + + +class TestGetRemoteFileInfo: + """Test GET /console/api/remote-files/ endpoint.""" + + def test_get_remote_file_info_success(self, app, client, mock_account): + """Test successful retrieval of remote file info.""" + response = httpx.Response( + 200, + request=httpx.Request("HEAD", "http://example.com/file.txt"), + headers={"Content-Type": "text/plain", "Content-Length": "1024"}, ) - assert resp.status_code == 201 - assert resp.get_json() == { - "id": "file-id", - "name": "file.txt", - "size": 5, - "extension": "txt", - "url": "signed-url", - "mime_type": "text/plain", - "created_by": "user-id", - "created_at": int(uploaded.created_at.timestamp()), - } + with ( + patch( + "controllers.console.remote_files.current_account_with_tenant", + return_value=(mock_account, "test-tenant-id"), + ), + patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response), + patch("libs.login.check_csrf_token", return_value=None), + ): + with app.test_request_context(): + g._login_user = mock_account + g._current_tenant = mock_account.current_tenant_id + encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt" + resp = client.get(f"/console/api/remote-files/{encoded_url}") + + assert resp.status_code == 200 + data = resp.get_json() + assert data["file_type"] == "text/plain" + assert data["file_length"] == 1024 + + def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account): + """Test fallback to GET when HEAD returns non-200 status.""" + head_response = httpx.Response( + 404, + request=httpx.Request("HEAD", "http://example.com/file.pdf"), + ) + get_response = httpx.Response( + 200, + request=httpx.Request("GET", "http://example.com/file.pdf"), + headers={"Content-Type": "application/pdf", "Content-Length": "2048"}, + ) + + with ( + patch( + "controllers.console.remote_files.current_account_with_tenant", + return_value=(mock_account, "test-tenant-id"), + ), + patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response), + patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response), + patch("libs.login.check_csrf_token", return_value=None), + ): + with app.test_request_context(): + g._login_user = mock_account + g._current_tenant = mock_account.current_tenant_id + encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf" + resp = client.get(f"/console/api/remote-files/{encoded_url}") + + assert resp.status_code == 200 + data = resp.get_json() + assert data["file_type"] == "application/pdf" + assert data["file_length"] == 2048 + + +class TestRemoteFileUpload: + """Test POST /console/api/remote-files/upload endpoint.""" + + @pytest.mark.parametrize( + ("head_status", "use_get"), + [ + (200, False), # HEAD succeeds + (405, True), # HEAD fails -> fallback GET + ], + ) + def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get): + url = "http://example.com/file.pdf" + head_resp = httpx.Response( + head_status, + request=httpx.Request("HEAD", url), + headers={"Content-Type": "application/pdf", "Content-Length": "1024"}, + ) + get_resp = httpx.Response( + 200, + request=httpx.Request("GET", url), + headers={"Content-Type": "application/pdf", "Content-Length": "1024"}, + content=b"file content", + ) + + file_info = SimpleNamespace( + extension="pdf", + size=1024, + filename="file.pdf", + mimetype="application/pdf", + ) + uploaded_file = SimpleNamespace( + id="uploaded-file-id", + name="file.pdf", + size=1024, + extension="pdf", + mime_type="application/pdf", + created_by="test-account-id", + created_at=datetime(2024, 1, 1, 12, 0, 0), + ) + + with ( + patch( + "controllers.console.remote_files.current_account_with_tenant", + return_value=(mock_account, "test-tenant-id"), + ), + patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head, + patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get, + patch( + "controllers.console.remote_files.helpers.guess_file_info_from_response", + return_value=file_info, + ), + patch( + "controllers.console.remote_files.FileService.is_file_size_within_limit", + return_value=True, + ), + patch("controllers.console.remote_files.db", spec=["engine"]), + patch("controllers.console.remote_files.FileService") as mock_file_service, + patch( + "controllers.console.remote_files.file_helpers.get_signed_file_url", + return_value="http://example.com/signed-url", + ), + patch("libs.login.check_csrf_token", return_value=None), + ): + mock_file_service.return_value.upload_file.return_value = uploaded_file + + with auth_ctx(): + resp = client.post( + "/console/api/remote-files/upload", + json={"url": url}, + ) + + assert resp.status_code == 201 + p_head.assert_called_once() + # GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds + p_get.assert_called_once() + mock_file_service.return_value.upload_file.assert_called_once() + + data = resp.get_json() + assert data["id"] == "uploaded-file-id" + assert data["name"] == "file.pdf" + assert data["size"] == 1024 + assert data["extension"] == "pdf" + assert data["url"] == "http://example.com/signed-url" + assert data["mime_type"] == "application/pdf" + assert data["created_by"] == "test-account-id" + + @pytest.mark.parametrize( + ("size_ok", "raises", "expected_status", "expected_msg"), + [ + # When size check fails in controller, API returns 413 with message "File size exceeded..." + (False, None, 413, "file size exceeded"), + # When service raises unsupported type, controller maps to 415 with message "File type not allowed." + (True, "unsupported", 415, "file type not allowed"), + ], + ) + def test_upload_remote_file_errors( + self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg + ): + url = "http://example.com/x.pdf" + head_resp = httpx.Response( + 200, + request=httpx.Request("HEAD", url), + headers={"Content-Type": "application/pdf", "Content-Length": "9"}, + ) + file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf") + + with ( + patch( + "controllers.console.remote_files.current_account_with_tenant", + return_value=(mock_account, "test-tenant-id"), + ), + patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp), + patch( + "controllers.console.remote_files.helpers.guess_file_info_from_response", + return_value=file_info, + ), + patch( + "controllers.console.remote_files.FileService.is_file_size_within_limit", + return_value=size_ok, + ), + patch("controllers.console.remote_files.db", spec=["engine"]), + patch("libs.login.check_csrf_token", return_value=None), + ): + if raises == "unsupported": + from services.errors.file import UnsupportedFileTypeError + + with patch("controllers.console.remote_files.FileService") as mock_file_service: + mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad") + with auth_ctx(): + resp = client.post( + "/console/api/remote-files/upload", + json={"url": url}, + ) + else: + with auth_ctx(): + resp = client.post( + "/console/api/remote-files/upload", + json={"url": url}, + ) + + assert resp.status_code == expected_status + data = resp.get_json() + msg = (data.get("error") or {}).get("message") or data.get("message", "") + assert expected_msg in msg.lower() + + def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx): + """Test upload when fetching of remote file fails.""" + with ( + patch( + "controllers.console.remote_files.current_account_with_tenant", + return_value=(mock_account, "test-tenant-id"), + ), + patch( + "controllers.console.remote_files.ssrf_proxy.head", + side_effect=httpx.RequestError("Connection failed"), + ), + patch("libs.login.check_csrf_token", return_value=None), + ): + with auth_ctx(): + resp = client.post( + "/console/api/remote-files/upload", + json={"url": "http://unreachable.com/file.pdf"}, + ) + + assert resp.status_code == 400 + data = resp.get_json() + msg = (data.get("error") or {}).get("message") or data.get("message", "") + assert "failed to fetch" in msg.lower() diff --git a/api/tests/unit_tests/core/schemas/test_resolver.py b/api/tests/unit_tests/core/schemas/test_resolver.py index eda8bf4343..239ee85346 100644 --- a/api/tests/unit_tests/core/schemas/test_resolver.py +++ b/api/tests/unit_tests/core/schemas/test_resolver.py @@ -496,6 +496,9 @@ class TestSchemaResolverClass: avg_time_no_cache = sum(results1) / len(results1) # Second run (with cache) - run multiple times + # Warm up cache first + resolve_dify_schema_refs(schema) + results2 = [] for _ in range(3): start = time.perf_counter() diff --git a/api/uv.lock b/api/uv.lock index eaac82db1b..a1a6cfaffd 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1269,47 +1269,47 @@ wheels = [ [[package]] name = "cryptography" -version = "46.0.4" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, - { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, - { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, - { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, - { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, - { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, - { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, - { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, - { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, - { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, - { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, - { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, - { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, - { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, - { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, - { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, - { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, - { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, - { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, - { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, - { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, - { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, - { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, - { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, - { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] [[package]] @@ -1486,7 +1486,7 @@ wheels = [ [[package]] name = "dify-api" -version = "1.12.1" +version = "1.13.0" source = { virtual = "." } dependencies = [ { name = "aliyun-log-python-sdk" }, @@ -1723,7 +1723,7 @@ requires-dist = [ { name = "gevent-websocket", specifier = "~=0.10.1" }, { name = "gmpy2", specifier = "~=2.2.1" }, { name = "google-api-core", specifier = "==2.18.0" }, - { name = "google-api-python-client", specifier = "==2.90.0" }, + { name = "google-api-python-client", specifier = "==2.189.0" }, { name = "google-auth", specifier = "==2.29.0" }, { name = "google-auth-httplib2", specifier = "==0.2.0" }, { name = "google-cloud-aiplatform", specifier = "==1.49.0" }, @@ -2503,7 +2503,7 @@ grpc = [ [[package]] name = "google-api-python-client" -version = "2.90.0" +version = "2.189.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -2512,9 +2512,9 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" }, + { url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" }, ] [[package]] diff --git a/docker/.env.example b/docker/.env.example index 1018f04c12..94810be1ab 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1562,6 +1562,7 @@ AMPLITUDE_API_KEY= # Sandbox expired records clean configuration SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200 SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index a5919a7a9a..e17c7128fe 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -694,6 +694,7 @@ x-shared-env: &shared-api-worker-env AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-} SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21} SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000} + SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200} SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30} PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-} PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub} diff --git a/web/app/components/base/markdown-blocks/code-block.tsx b/web/app/components/base/markdown-blocks/code-block.tsx index e227f49b83..daf27562a2 100644 --- a/web/app/components/base/markdown-blocks/code-block.tsx +++ b/web/app/components/base/markdown-blocks/code-block.tsx @@ -204,23 +204,10 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any } } catch { - try { - // eslint-disable-next-line no-new-func - const result = new Function(`return ${trimmedContent}`)() - if (typeof result === 'object' && result !== null) { - setFinalChartOption(result) - setChartState('success') - processedRef.current = true - return - } - } - catch { - // If we have a complete JSON structure but it doesn't parse, - // it's likely an error rather than incomplete data - setChartState('error') - processedRef.current = true - return - } + // Avoid executing arbitrary code; require valid JSON for chart options. + setChartState('error') + processedRef.current = true + return } } @@ -249,19 +236,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any } } catch { - try { - // eslint-disable-next-line no-new-func - const result = new Function(`return ${trimmedContent}`)() - if (typeof result === 'object' && result !== null) { - setFinalChartOption(result) - isValidOption = true - } - } - catch { - // Both parsing methods failed, but content looks complete - setChartState('error') - processedRef.current = true - } + // Only accept JSON to avoid executing arbitrary code from the message. + setChartState('error') + processedRef.current = true } if (isValidOption) { diff --git a/web/app/components/header/account-setting/members-page/index.tsx b/web/app/components/header/account-setting/members-page/index.tsx index da6c4c0003..0786ec0d87 100644 --- a/web/app/components/header/account-setting/members-page/index.tsx +++ b/web/app/components/header/account-setting/members-page/index.tsx @@ -104,7 +104,7 @@ const MembersPage = () => { )}
- setInviteModalVisible(true)} /> + {isCurrentWorkspaceManager && setInviteModalVisible(true)} />}
diff --git a/web/app/components/rag-pipeline/hooks/use-pipeline-run.spec.ts b/web/app/components/rag-pipeline/hooks/use-pipeline-run.spec.ts index 2b21001839..c8a4a0ebb7 100644 --- a/web/app/components/rag-pipeline/hooks/use-pipeline-run.spec.ts +++ b/web/app/components/rag-pipeline/hooks/use-pipeline-run.spec.ts @@ -92,8 +92,10 @@ vi.mock('@/service/workflow', () => ({ })) const mockInvalidAllLastRun = vi.fn() +const mockInvalidateRunHistory = vi.fn() vi.mock('@/service/use-workflow', () => ({ useInvalidAllLastRun: () => mockInvalidAllLastRun, + useInvalidateWorkflowRunHistory: () => mockInvalidateRunHistory, })) // Mock FlowType @@ -472,6 +474,7 @@ describe('usePipelineRun', () => { }) expect(onWorkflowStarted).toHaveBeenCalledWith({ task_id: 'task-1' }) + expect(mockInvalidateRunHistory).toHaveBeenCalled() }) it('should call onWorkflowFinished callback when provided', async () => { @@ -493,6 +496,7 @@ describe('usePipelineRun', () => { }) expect(onWorkflowFinished).toHaveBeenCalledWith({ status: 'succeeded' }) + expect(mockInvalidateRunHistory).toHaveBeenCalled() }) it('should call onError callback when provided', async () => { @@ -514,6 +518,7 @@ describe('usePipelineRun', () => { }) expect(onError).toHaveBeenCalledWith({ message: 'error' }) + expect(mockInvalidateRunHistory).toHaveBeenCalled() }) it('should call onNodeStarted callback when provided', async () => { diff --git a/web/app/components/rag-pipeline/hooks/use-pipeline-run.ts b/web/app/components/rag-pipeline/hooks/use-pipeline-run.ts index dc2a234d1e..b35441365b 100644 --- a/web/app/components/rag-pipeline/hooks/use-pipeline-run.ts +++ b/web/app/components/rag-pipeline/hooks/use-pipeline-run.ts @@ -12,7 +12,7 @@ import { useWorkflowRunEvent } from '@/app/components/workflow/hooks/use-workflo import { useStore, useWorkflowStore } from '@/app/components/workflow/store' import { WorkflowRunningStatus } from '@/app/components/workflow/types' import { ssePost } from '@/service/base' -import { useInvalidAllLastRun } from '@/service/use-workflow' +import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow' import { stopWorkflowRun } from '@/service/workflow' import { FlowType } from '@/types/common' import { useNodesSyncDraft } from './use-nodes-sync-draft' @@ -93,6 +93,7 @@ export const usePipelineRun = () => { const pipelineId = useStore(s => s.pipelineId) const invalidAllLastRun = useInvalidAllLastRun(FlowType.ragPipeline, pipelineId) + const invalidateRunHistory = useInvalidateWorkflowRunHistory() const { fetchInspectVars } = useSetWorkflowVarsWithValue({ flowType: FlowType.ragPipeline, flowId: pipelineId!, @@ -132,6 +133,7 @@ export const usePipelineRun = () => { ...restCallback } = callback || {} const { pipelineId } = workflowStore.getState() + const runHistoryUrl = `/rag/pipelines/${pipelineId}/workflow-runs` workflowStore.setState({ historyWorkflowData: undefined }) const workflowContainer = document.getElementById('workflow-container') @@ -170,12 +172,14 @@ export const usePipelineRun = () => { }, onWorkflowStarted: (params) => { handleWorkflowStarted(params) + invalidateRunHistory(runHistoryUrl) if (onWorkflowStarted) onWorkflowStarted(params) }, onWorkflowFinished: (params) => { handleWorkflowFinished(params) + invalidateRunHistory(runHistoryUrl) fetchInspectVars({}) invalidAllLastRun() @@ -184,6 +188,7 @@ export const usePipelineRun = () => { }, onError: (params) => { handleWorkflowFailed() + invalidateRunHistory(runHistoryUrl) if (onError) onError(params) @@ -275,7 +280,7 @@ export const usePipelineRun = () => { ...restCallback, }, ) - }, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace]) + }, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace]) const handleStopRun = useCallback((taskId: string) => { const { pipelineId } = workflowStore.getState() diff --git a/web/app/components/workflow-app/hooks/use-workflow-run.ts b/web/app/components/workflow-app/hooks/use-workflow-run.ts index e07257da39..9c50863fb2 100644 --- a/web/app/components/workflow-app/hooks/use-workflow-run.ts +++ b/web/app/components/workflow-app/hooks/use-workflow-run.ts @@ -24,7 +24,7 @@ import { WorkflowRunningStatus } from '@/app/components/workflow/types' import { handleStream, post, sseGet, ssePost } from '@/service/base' import { ContentType } from '@/service/fetch' import { useInvalidateSandboxFiles } from '@/service/use-sandbox-file' -import { useInvalidAllLastRun } from '@/service/use-workflow' +import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow' import { stopWorkflowRun } from '@/service/workflow' import { AppModeEnum } from '@/types/app' import { useSetWorkflowVarsWithValue } from '../../workflow/hooks/use-fetch-workflow-inspect-vars' @@ -67,6 +67,7 @@ export const useWorkflowRun = () => { const configsMap = useConfigsMap() const { flowId, flowType } = configsMap const invalidAllLastRun = useInvalidAllLastRun(flowType, flowId) + const invalidateRunHistory = useInvalidateWorkflowRunHistory() const invalidateSandboxFiles = useInvalidateSandboxFiles() const { fetchInspectVars } = useSetWorkflowVarsWithValue({ @@ -191,6 +192,9 @@ export const useWorkflowRun = () => { } = callback || {} workflowStore.setState({ historyWorkflowData: undefined }) const appDetail = useAppStore.getState().appDetail + const runHistoryUrl = appDetail?.mode === AppModeEnum.ADVANCED_CHAT + ? `/apps/${appDetail.id}/advanced-chat/workflow-runs` + : `/apps/${appDetail?.id}/workflow-runs` const workflowContainer = document.getElementById('workflow-container') const { @@ -365,6 +369,7 @@ export const useWorkflowRun = () => { const wrappedOnError = (params: any) => { clearAbortController() handleWorkflowFailed() + invalidateRunHistory(runHistoryUrl) clearListeningState() if (onError) @@ -383,6 +388,7 @@ export const useWorkflowRun = () => { ...restCallback, onWorkflowStarted: (params) => { handleWorkflowStarted(params) + invalidateRunHistory(runHistoryUrl) if (onWorkflowStarted) onWorkflowStarted(params) @@ -390,6 +396,7 @@ export const useWorkflowRun = () => { onWorkflowFinished: (params) => { clearListeningState() handleWorkflowFinished(params) + invalidateRunHistory(runHistoryUrl) if (onWorkflowFinished) onWorkflowFinished(params) @@ -499,6 +506,7 @@ export const useWorkflowRun = () => { }, onWorkflowPaused: (params) => { handleWorkflowPaused() + invalidateRunHistory(runHistoryUrl) if (onWorkflowPaused) onWorkflowPaused(params) const url = `/workflow/${params.workflow_run_id}/events` @@ -697,6 +705,7 @@ export const useWorkflowRun = () => { }, onWorkflowFinished: (params) => { handleWorkflowFinished(params) + invalidateRunHistory(runHistoryUrl) if (onWorkflowFinished) onWorkflowFinished(params) @@ -707,6 +716,7 @@ export const useWorkflowRun = () => { }, onError: (params) => { handleWorkflowFailed() + invalidateRunHistory(runHistoryUrl) if (onError) onError(params) @@ -806,6 +816,7 @@ export const useWorkflowRun = () => { }, onWorkflowPaused: (params) => { handleWorkflowPaused() + invalidateRunHistory(runHistoryUrl) if (onWorkflowPaused) onWorkflowPaused(params) const url = `/workflow/${params.workflow_run_id}/events` @@ -840,7 +851,7 @@ export const useWorkflowRun = () => { }, finalCallbacks, ) - }, [invalidateSandboxFiles, store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout]) + }, [invalidateSandboxFiles, store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout]) const handleStopRun = useCallback((taskId: string) => { const setStoppedState = () => { diff --git a/web/app/components/workflow/header/view-history.tsx b/web/app/components/workflow/header/view-history.tsx index f9b446e930..94963e29fc 100644 --- a/web/app/components/workflow/header/view-history.tsx +++ b/web/app/components/workflow/header/view-history.tsx @@ -1,18 +1,8 @@ -import { - RiCheckboxCircleLine, - RiCloseLine, - RiErrorWarningLine, -} from '@remixicon/react' import { memo, useState, } from 'react' import { useTranslation } from 'react-i18next' -import { AlertTriangle } from '@/app/components/base/icons/src/vender/line/alertsAndFeedback' -import { - ClockPlay, - ClockPlaySlim, -} from '@/app/components/base/icons/src/vender/line/time' import Loading from '@/app/components/base/loading' import { PortalToFollowElem, @@ -89,9 +79,7 @@ const ViewHistory = ({ open && 'bg-components-button-secondary-bg-hover', )} > - + {t('common.showRunHistory', { ns: 'workflow' })}
) @@ -107,7 +95,7 @@ const ViewHistory = ({ onClearLogAndMessageModal?.() }} > - + ) @@ -129,7 +117,7 @@ const ViewHistory = ({ setOpen(false) }} > - + { @@ -145,7 +133,7 @@ const ViewHistory = ({ { !data?.data.length && (
- +
{t('common.notRunning', { ns: 'workflow' })}
@@ -175,18 +163,18 @@ const ViewHistory = ({ }} > { - !isChatMode && item.status === WorkflowRunningStatus.Stopped && ( - + !isChatMode && [WorkflowRunningStatus.Stopped, WorkflowRunningStatus.Paused].includes(item.status) && ( + ) } { !isChatMode && item.status === WorkflowRunningStatus.Failed && ( - + ) } { !isChatMode && item.status === WorkflowRunningStatus.Succeeded && ( - + ) }
@@ -196,7 +184,7 @@ const ViewHistory = ({ item.id === historyWorkflowData?.id && 'text-text-accent', )} > - {`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at)}`} + {`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at, item.status)}`}
{item.created_by_account?.name} diff --git a/web/app/components/workflow/panel/workflow-preview.tsx b/web/app/components/workflow/panel/workflow-preview.tsx index beee21a96a..2c2b16c8c8 100644 --- a/web/app/components/workflow/panel/workflow-preview.tsx +++ b/web/app/components/workflow/panel/workflow-preview.tsx @@ -1,7 +1,3 @@ -import { - RiClipboardLine, - RiCloseLine, -} from '@remixicon/react' import copy from 'copy-to-clipboard' import { memo, @@ -141,7 +137,7 @@ const WorkflowPreview = () => {
handleCancelDebugAndPreviewPanel()}> - +
@@ -246,7 +242,7 @@ const WorkflowPreview = () => { Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) }) }} > - +
{t('operation.copy', { ns: 'common' })}
)} diff --git a/web/eslint-suppressions.json b/web/eslint-suppressions.json index ccc80aa896..fd572579e1 100644 --- a/web/eslint-suppressions.json +++ b/web/eslint-suppressions.json @@ -1246,7 +1246,7 @@ }, "app/components/base/markdown-blocks/code-block.tsx": { "react-hooks-extra/no-direct-set-state-in-use-effect": { - "count": 10 + "count": 7 }, "ts/no-explicit-any": { "count": 9 diff --git a/web/package.json b/web/package.json index 434fafeec9..2e59d607dd 100644 --- a/web/package.json +++ b/web/package.json @@ -1,7 +1,7 @@ { "name": "dify-web", "type": "module", - "version": "1.12.1", + "version": "1.13.0", "private": true, "packageManager": "pnpm@10.28.1+sha512.7d7dbbca9e99447b7c3bf7a73286afaaf6be99251eb9498baefa7d406892f67b879adb3a1d7e687fc4ccc1a388c7175fbaae567a26ab44d1067b54fcb0d6a316", "imports": { diff --git a/web/service/use-workflow.ts b/web/service/use-workflow.ts index 2f9f5d2fb7..fe20b906fc 100644 --- a/web/service/use-workflow.ts +++ b/web/service/use-workflow.ts @@ -26,14 +26,26 @@ export const useAppWorkflow = (appID: string) => { }) } +const WorkflowRunHistoryKey = [NAME_SPACE, 'runHistory'] + export const useWorkflowRunHistory = (url?: string, enabled = true) => { return useQuery({ - queryKey: [NAME_SPACE, 'runHistory', url], + queryKey: [...WorkflowRunHistoryKey, url], queryFn: () => get(url as string), enabled: !!url && enabled, + staleTime: 0, }) } +export const useInvalidateWorkflowRunHistory = () => { + const queryClient = useQueryClient() + return (url: string) => { + queryClient.invalidateQueries({ + queryKey: [...WorkflowRunHistoryKey, url], + }) + } +} + export const useInvalidateAppWorkflow = () => { const queryClient = useQueryClient() return (appID: string) => { diff --git a/web/types/workflow.ts b/web/types/workflow.ts index 0903a08b58..3419659b44 100644 --- a/web/types/workflow.ts +++ b/web/types/workflow.ts @@ -4,7 +4,19 @@ import type { BeforeRunFormProps } from '@/app/components/workflow/nodes/_base/c import type { ErrorHandleTypeEnum } from '@/app/components/workflow/nodes/_base/components/error-handle/types' import type { FormInputItem, UserAction } from '@/app/components/workflow/nodes/human-input/types' import type { SpecialResultPanelProps } from '@/app/components/workflow/run/special-result-panel' -import type { BlockEnum, CommonNodeType, ConversationVariable, Edge, EnvironmentVariable, InputVar, Node, ValueSelector, Variable, VarType } from '@/app/components/workflow/types' +import type { + BlockEnum, + CommonNodeType, + ConversationVariable, + Edge, + EnvironmentVariable, + InputVar, + Node, + ValueSelector, + Variable, + VarType, + WorkflowRunningStatus, +} from '@/app/components/workflow/types' import type { RAGPipelineVariables } from '@/models/pipeline' import type { TransferMethod } from '@/types/app' @@ -471,7 +483,7 @@ export type WorkflowRunHistory = { viewport?: Viewport } inputs: Record - status: string + status: WorkflowRunningStatus outputs: Record error?: string elapsed_time: number