From 2333d75c56b3af1ff81fcd04f4d31c36ce83e62b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= Date: Tue, 14 Apr 2026 13:57:27 +0800 Subject: [PATCH 01/47] chore: allow disabling app-level PostgreSQL timezone injection (#35129) --- api/configs/middleware/__init__.py | 23 ++++++++++++----- .../unit_tests/configs/test_dify_config.py | 25 ++++++++++++++----- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 817284d26f..c392b8840f 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -160,6 +160,16 @@ class DatabaseConfig(BaseSettings): default="", ) + DB_SESSION_TIMEZONE_OVERRIDE: str = Field( + description=( + "PostgreSQL session timezone override injected via startup options." + " Default is 'UTC' for out-of-the-box consistency." + " Set to empty string to disable app-level timezone injection, for example when using RDS Proxy" + " together with a database-side default timezone." + ), + default="UTC", + ) + @computed_field # type: ignore[prop-decorator] @property def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str: @@ -227,12 +237,13 @@ class DatabaseConfig(BaseSettings): connect_args: dict[str, str] = {} # Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"): - timezone_opt = "-c timezone=UTC" - if options: - merged_options = f"{options} {timezone_opt}" - else: - merged_options = timezone_opt - connect_args = {"options": merged_options} + merged_options = options.strip() + session_timezone_override = self.DB_SESSION_TIMEZONE_OVERRIDE.strip() + if session_timezone_override: + timezone_opt = f"-c timezone={session_timezone_override}" + merged_options = f"{merged_options} {timezone_opt}".strip() if merged_options else timezone_opt + if merged_options: + connect_args = {"options": merged_options} result: SQLAlchemyEngineOptionsDict = { "pool_size": self.SQLALCHEMY_POOL_SIZE, diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index d6933e2180..3089750c3e 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -145,7 +145,7 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): def test_db_extras_options_merging(monkeypatch: pytest.MonkeyPatch): - """Test that DB_EXTRAS options are properly merged with default timezone setting""" + """Test that DB_EXTRAS options are merged with the default timezone startup option.""" # Set environment variables monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") @@ -158,15 +158,28 @@ def test_db_extras_options_merging(monkeypatch: pytest.MonkeyPatch): # Create config config = DifyConfig() - # Get engine options - engine_options = config.SQLALCHEMY_ENGINE_OPTIONS - - # Verify options contains both search_path and timezone - options = engine_options["connect_args"]["options"] + options = config.SQLALCHEMY_ENGINE_OPTIONS["connect_args"]["options"] assert "search_path=myschema" in options assert "timezone=UTC" in options +def test_db_session_timezone_override_can_disable_app_level_timezone_injection(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv("DB_TYPE", "postgresql") + monkeypatch.setenv("DB_USERNAME", "postgres") + monkeypatch.setenv("DB_PASSWORD", "postgres") + monkeypatch.setenv("DB_HOST", "localhost") + monkeypatch.setenv("DB_PORT", "5432") + monkeypatch.setenv("DB_DATABASE", "dify") + monkeypatch.setenv("DB_EXTRAS", "options=-c search_path=myschema") + monkeypatch.setenv("DB_SESSION_TIMEZONE_OVERRIDE", "") + + config = DifyConfig() + + assert config.SQLALCHEMY_ENGINE_OPTIONS["connect_args"] == { + "options": "-c search_path=myschema", + } + + def test_pubsub_redis_url_default(monkeypatch: pytest.MonkeyPatch): os.environ.clear() From c8b372dba0b1f0845c1a8edcd93aed6e06fad567 Mon Sep 17 00:00:00 2001 From: Benjamin Date: Tue, 14 Apr 2026 14:02:43 +0800 Subject: [PATCH 02/47] chore(deps): update pyarrow to version 23.0.1 and add override deps (#35137) --- api/pyproject.toml | 3 +++ api/uv.lock | 22 ++++++++++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 3b7e5f8e1f..f22bafb03a 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -136,6 +136,9 @@ dify-vdb-weaviate = { workspace = true } [tool.uv] default-groups = ["storage", "tools", "vdb-all"] package = false +override-dependencies = [ + "pyarrow>=18.0.0", +] [dependency-groups] diff --git a/api/uv.lock b/api/uv.lock index 71d3a14880..9ed8d16107 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -42,6 +42,7 @@ members = [ "dify-vdb-vikingdb", "dify-vdb-weaviate", ] +overrides = [{ name = "pyarrow", specifier = ">=18.0.0" }] [[package]] name = "abnf" @@ -4986,20 +4987,17 @@ wheels = [ [[package]] name = "pyarrow" -version = "14.0.2" +version = "23.0.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d7/8b/d18b7eb6fb22e5ed6ffcbc073c85dae635778dbd1270a6cf5d750b031e84/pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025", size = 1063645, upload-time = "2023-12-18T15:43:41.625Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/5b/d8ab6c20c43b598228710e4e4a6cba03a01f6faa3d08afff9ce76fd0fd47/pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944", size = 26819585, upload-time = "2023-12-18T15:41:27.59Z" }, - { url = "https://files.pythonhosted.org/packages/2d/29/bed2643d0dd5e9570405244a61f6db66c7f4704a6e9ce313f84fa5a3675a/pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5", size = 23965222, upload-time = "2023-12-18T15:41:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/2a/34/da464632e59a8cdd083370d69e6c14eae30221acb284f671c6bc9273fadd/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422", size = 35942036, upload-time = "2023-12-18T15:41:38.767Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ff/cbed4836d543b29f00d2355af67575c934999ff1d43e3f438ab0b1b394f1/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07", size = 38089266, upload-time = "2023-12-18T15:41:47.617Z" }, - { url = "https://files.pythonhosted.org/packages/38/41/345011cb831d3dbb2dab762fc244c745a5df94b199223a99af52a5f7dff6/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591", size = 35404468, upload-time = "2023-12-18T15:41:54.49Z" }, - { url = "https://files.pythonhosted.org/packages/fd/af/2fc23ca2068ff02068d8dabf0fb85b6185df40ec825973470e613dbd8790/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379", size = 38003134, upload-time = "2023-12-18T15:42:01.593Z" }, - { url = "https://files.pythonhosted.org/packages/95/1f/9d912f66a87e3864f694e000977a6a70a644ea560289eac1d733983f215d/pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d", size = 25043754, upload-time = "2023-12-18T15:42:07.108Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, + { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, + { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a5/da83046273d990f256cb79796a190bbf7ec999269705ddc609403f8c6b06/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:813d99f31275919c383aab17f0f455a04f5a429c261cc411b1e9a8f5e4aaaa05", size = 47586063, upload-time = "2026-02-16T10:10:17.95Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/b7d2ebcff47a514f47f9da1e74b7949138c58cfeb108cdd4ee62f43f0cf3/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bf5842f960cddd2ef757d486041d57c96483efc295a8c4a0e20e704cbbf39c67", size = 48173045, upload-time = "2026-02-16T10:10:25.363Z" }, + { url = "https://files.pythonhosted.org/packages/43/b2/b40961262213beaba6acfc88698eb773dfce32ecdf34d19291db94c2bd73/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564baf97c858ecc03ec01a41062e8f4698abc3e6e2acd79c01c2e97880a19730", size = 50621741, upload-time = "2026-02-16T10:10:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/f6/70/1fdda42d65b28b078e93d75d371b2185a61da89dda4def8ba6ba41ebdeb4/pyarrow-23.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:07deae7783782ac7250989a7b2ecde9b3c343a643f82e8a4df03d93b633006f0", size = 27620678, upload-time = "2026-02-16T10:10:39.31Z" }, ] [[package]] From fc389a54c575dae11b8eeb5578c70a69ee86d279 Mon Sep 17 00:00:00 2001 From: dataCenter430 <161712630+dataCenter430@users.noreply.github.com> Date: Mon, 13 Apr 2026 23:09:55 -0700 Subject: [PATCH 03/47] refactor: replace bare dict with typed annotations in core tools module (#35098) --- api/core/tools/entities/tool_bundle.py | 3 ++- api/core/tools/tool_engine.py | 4 ++-- .../tools/utils/dataset_retriever/dataset_retriever_tool.py | 4 ++-- api/core/tools/workflow_as_tool/tool.py | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/api/core/tools/entities/tool_bundle.py b/api/core/tools/entities/tool_bundle.py index 10710c4376..4e07b7157a 100644 --- a/api/core/tools/entities/tool_bundle.py +++ b/api/core/tools/entities/tool_bundle.py @@ -1,4 +1,5 @@ from collections.abc import Mapping +from typing import Any from pydantic import BaseModel, Field @@ -26,6 +27,6 @@ class ApiToolBundle(BaseModel): # icon icon: str | None = None # openapi operation - openapi: dict + openapi: dict[str, Any] # output schema output_schema: Mapping[str, object] = Field(default_factory=dict) diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 1afaa9cfaf..d060fa8b49 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -47,7 +47,7 @@ class ToolEngine: @staticmethod def agent_invoke( tool: Tool, - tool_parameters: Union[str, dict], + tool_parameters: Union[str, dict[str, Any]], user_id: str, tenant_id: str, message: Message, @@ -85,7 +85,7 @@ class ToolEngine: invocation_meta_dict: dict[str, ToolInvokeMeta] = {} def message_callback( - invocation_meta_dict: dict[str, Any], + invocation_meta_dict: dict[str, ToolInvokeMeta], messages: Generator[ToolInvokeMessage | ToolInvokeMeta, None, None], ): for message in messages: diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index 6a189fa6aa..0d1dc7273b 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import Any, cast from pydantic import BaseModel, Field from sqlalchemy import select @@ -39,7 +39,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): dataset_id: str user_id: str | None = None retrieve_config: DatasetRetrieveConfigEntity - inputs: dict + inputs: dict[str, Any] @classmethod def from_dataset(cls, dataset: Dataset, **kwargs): diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 552fbab1a4..7c4f8ee03a 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -277,7 +277,7 @@ class WorkflowTool(Tool): session.expunge(app) return app - def _transform_args(self, tool_parameters: dict[str, Any]) -> tuple[dict[str, Any], list[dict[str, Any]]]: + def _transform_args(self, tool_parameters: dict[str, Any]) -> tuple[dict[str, Any], list[dict[str, str | None]]]: """ transform the tool parameters @@ -355,7 +355,7 @@ class WorkflowTool(Tool): return result, files - def _update_file_mapping(self, file_dict: dict[str, Any]): + def _update_file_mapping(self, file_dict: dict[str, Any]) -> dict[str, Any]: file_id = resolve_file_record_id(file_dict.get("reference") or file_dict.get("related_id")) transfer_method = FileTransferMethod.value_of(file_dict.get("transfer_method")) match transfer_method: From ed401728eb77f3f711a0d64f45720c499e7f0c70 Mon Sep 17 00:00:00 2001 From: dataCenter430 <161712630+dataCenter430@users.noreply.github.com> Date: Mon, 13 Apr 2026 23:11:00 -0700 Subject: [PATCH 04/47] refactor: replace bare dict with typed annotations in app_config/extension/provider (#35099) --- api/core/agent/plugin_entities.py | 2 +- .../app_config/common/sensitive_word_avoidance/manager.py | 4 ++-- .../app_config/easy_ui_based_app/model_config/manager.py | 2 +- api/core/extension/api_based_extension_requestor.py | 6 +++--- api/core/extension/extensible.py | 4 ++-- api/core/external_data_tool/factory.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/api/core/agent/plugin_entities.py b/api/core/agent/plugin_entities.py index 90aa7b5fd4..8d25863a91 100644 --- a/api/core/agent/plugin_entities.py +++ b/api/core/agent/plugin_entities.py @@ -84,7 +84,7 @@ class AgentStrategyEntity(BaseModel): identity: AgentStrategyIdentity parameters: list[AgentStrategyParameter] = Field(default_factory=list) description: I18nObject = Field(..., description="The description of the agent strategy") - output_schema: dict | None = None + output_schema: dict[str, Any] | None = None features: list[AgentFeature] | None = None meta_version: str | None = None # pydantic configs diff --git a/api/core/app/app_config/common/sensitive_word_avoidance/manager.py b/api/core/app/app_config/common/sensitive_word_avoidance/manager.py index 7d1b11c008..c8ec7cb44d 100644 --- a/api/core/app/app_config/common/sensitive_word_avoidance/manager.py +++ b/api/core/app/app_config/common/sensitive_word_avoidance/manager.py @@ -22,8 +22,8 @@ class SensitiveWordAvoidanceConfigManager: @classmethod def validate_and_set_defaults( - cls, tenant_id: str, config: dict, only_structure_validate: bool = False - ) -> tuple[dict, list[str]]: + cls, tenant_id: str, config: dict[str, Any], only_structure_validate: bool = False + ) -> tuple[dict[str, Any], list[str]]: if not config.get("sensitive_word_avoidance"): config["sensitive_word_avoidance"] = {"enabled": False} diff --git a/api/core/app/app_config/easy_ui_based_app/model_config/manager.py b/api/core/app/app_config/easy_ui_based_app/model_config/manager.py index 981bd26961..9d980e5ca3 100644 --- a/api/core/app/app_config/easy_ui_based_app/model_config/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/model_config/manager.py @@ -41,7 +41,7 @@ class ModelConfigManager: ) @classmethod - def validate_and_set_defaults(cls, tenant_id: str, config: Mapping[str, Any]) -> tuple[dict, list[str]]: + def validate_and_set_defaults(cls, tenant_id: str, config: Mapping[str, Any]) -> tuple[dict[str, Any], list[str]]: """ Validate and set defaults for model config diff --git a/api/core/extension/api_based_extension_requestor.py b/api/core/extension/api_based_extension_requestor.py index f9e6099049..01139d07e2 100644 --- a/api/core/extension/api_based_extension_requestor.py +++ b/api/core/extension/api_based_extension_requestor.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import Any, cast import httpx @@ -14,7 +14,7 @@ class APIBasedExtensionRequestor: self.api_endpoint = api_endpoint self.api_key = api_key - def request(self, point: APIBasedExtensionPoint, params: dict): + def request(self, point: APIBasedExtensionPoint, params: dict[str, Any]) -> dict[str, Any]: """ Request the api. @@ -49,4 +49,4 @@ class APIBasedExtensionRequestor: if response.status_code != 200: raise ValueError(f"request error, status_code: {response.status_code}, content: {response.text[:100]}") - return cast(dict, response.json()) + return cast(dict[str, Any], response.json()) diff --git a/api/core/extension/extensible.py b/api/core/extension/extensible.py index b79dbeb7e0..c08e319aac 100644 --- a/api/core/extension/extensible.py +++ b/api/core/extension/extensible.py @@ -21,8 +21,8 @@ class ExtensionModule(StrEnum): class ModuleExtension(BaseModel): extension_class: Any | None = None name: str - label: dict | None = None - form_schema: list | None = None + label: dict[str, Any] | None = None + form_schema: list[dict[str, Any]] | None = None builtin: bool = True position: int | None = None diff --git a/api/core/external_data_tool/factory.py b/api/core/external_data_tool/factory.py index f7a64cea1b..f404aa7286 100644 --- a/api/core/external_data_tool/factory.py +++ b/api/core/external_data_tool/factory.py @@ -13,7 +13,7 @@ class ExternalDataToolFactory: ) @classmethod - def validate_config(cls, name: str, tenant_id: str, config: dict[str, Any]): + def validate_config(cls, name: str, tenant_id: str, config: dict[str, Any]) -> None: """ Validate the incoming form config data. From 974d2f16274a6059813be37cdbd9e9c03b2ba1af Mon Sep 17 00:00:00 2001 From: dataCenter430 <161712630+dataCenter430@users.noreply.github.com> Date: Mon, 13 Apr 2026 23:15:52 -0700 Subject: [PATCH 05/47] refactor: replace bare dict with typed annotations in llm_generator and prompt (#35100) --- api/core/llm_generator/llm_generator.py | 4 ++-- api/core/llm_generator/output_parser/structured_output.py | 8 ++++---- api/core/prompt/simple_prompt_transform.py | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index aa258c9f89..c43c0274cd 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -2,7 +2,7 @@ import json import logging import re from collections.abc import Sequence -from typing import Protocol, TypedDict, cast +from typing import Any, Protocol, TypedDict, cast import json_repair from graphon.enums import WorkflowNodeExecutionMetadataKey @@ -533,7 +533,7 @@ class LLMGenerator: def __instruction_modify_common( tenant_id: str, model_config: ModelConfig, - last_run: dict | None, + last_run: dict[str, Any] | None, current: str | None, error_message: str | None, instruction: str, diff --git a/api/core/llm_generator/output_parser/structured_output.py b/api/core/llm_generator/output_parser/structured_output.py index 9bdca1e83b..a8ad7c9179 100644 --- a/api/core/llm_generator/output_parser/structured_output.py +++ b/api/core/llm_generator/output_parser/structured_output.py @@ -202,7 +202,7 @@ def _handle_native_json_schema( structured_output_schema: Mapping, model_parameters: dict[str, Any], rules: list[ParameterRule], -): +) -> dict[str, Any]: """ Handle structured output for models with native JSON schema support. @@ -224,7 +224,7 @@ def _handle_native_json_schema( return model_parameters -def _set_response_format(model_parameters: dict[str, Any], rules: list[ParameterRule]): +def _set_response_format(model_parameters: dict[str, Any], rules: list[ParameterRule]) -> None: """ Set the appropriate response format parameter based on model rules. @@ -326,7 +326,7 @@ def _prepare_schema_for_model(provider: str, model_schema: AIModelEntity, schema return {"schema": processed_schema, "name": "llm_response"} -def remove_additional_properties(schema: dict[str, Any]): +def remove_additional_properties(schema: dict[str, Any]) -> None: """ Remove additionalProperties fields from JSON schema. Used for models like Gemini that don't support this property. @@ -349,7 +349,7 @@ def remove_additional_properties(schema: dict[str, Any]): remove_additional_properties(item) -def convert_boolean_to_string(schema: dict): +def convert_boolean_to_string(schema: dict[str, Any]) -> None: """ Convert boolean type specifications to string in JSON schema. diff --git a/api/core/prompt/simple_prompt_transform.py b/api/core/prompt/simple_prompt_transform.py index d4e17613a2..dc8391a6a5 100644 --- a/api/core/prompt/simple_prompt_transform.py +++ b/api/core/prompt/simple_prompt_transform.py @@ -313,7 +313,7 @@ class SimplePromptTransform(PromptTransform): return prompt_message - def _get_prompt_rule(self, app_mode: AppMode, provider: str, model: str): + def _get_prompt_rule(self, app_mode: AppMode, provider: str, model: str) -> dict[str, Any]: """ Get simple prompt rule. :param app_mode: app mode @@ -325,7 +325,7 @@ class SimplePromptTransform(PromptTransform): # Check if the prompt file is already loaded if prompt_file_name in prompt_file_contents: - return cast(dict, prompt_file_contents[prompt_file_name]) + return cast(dict[str, Any], prompt_file_contents[prompt_file_name]) # Get the absolute path of the subdirectory prompt_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "prompt_templates") @@ -338,7 +338,7 @@ class SimplePromptTransform(PromptTransform): # Store the content of the prompt file prompt_file_contents[prompt_file_name] = content - return cast(dict, content) + return cast(dict[str, Any], content) def _prompt_file_name(self, app_mode: AppMode, provider: str, model: str) -> str: # baichuan From fbedb603718e34c7e6f5db3890be747ea374bbf8 Mon Sep 17 00:00:00 2001 From: dataCenter430 <161712630+dataCenter430@users.noreply.github.com> Date: Mon, 13 Apr 2026 23:16:16 -0700 Subject: [PATCH 06/47] refactor: replace bare dict with typed annotations in core rag module (#35097) --- api/core/indexing_runner.py | 4 ++-- api/core/rag/embedding/cached_embedding.py | 2 +- api/core/rag/embedding/embedding_base.py | 2 +- api/core/rag/retrieval/output_parser/react_output.py | 6 +++--- api/core/rag/retrieval/router/multi_dataset_react_route.py | 4 ++-- api/core/rag/splitter/text_splitter.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 9aaf85dc0f..8d0a8b99b4 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -737,7 +737,7 @@ class IndexingRunner: def _update_document_index_status( document_id: str, after_indexing_status: IndexingStatus, - extra_update_params: dict[Any, Any] | None = None, + extra_update_params: Mapping[Any, Any] | None = None, ): """ Update the document indexing status. @@ -764,7 +764,7 @@ class IndexingRunner: db.session.commit() @staticmethod - def _update_segments_by_document(dataset_document_id: str, update_params: dict[Any, Any]): + def _update_segments_by_document(dataset_document_id: str, update_params: Mapping[Any, Any]): """ Update the document segment by document id. """ diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index f5f5f541da..9f1c73ec88 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -106,7 +106,7 @@ class CacheEmbedding(Embeddings): return text_embeddings - def embed_multimodal_documents(self, multimodel_documents: list[dict]) -> list[list[float]]: + def embed_multimodal_documents(self, multimodel_documents: list[dict[str, Any]]) -> list[list[float]]: """Embed file documents.""" # use doc embedding cache or store if not exists multimodel_embeddings: list[Any] = [None for _ in range(len(multimodel_documents))] diff --git a/api/core/rag/embedding/embedding_base.py b/api/core/rag/embedding/embedding_base.py index ab190d2c42..7ae5c09ab7 100644 --- a/api/core/rag/embedding/embedding_base.py +++ b/api/core/rag/embedding/embedding_base.py @@ -11,7 +11,7 @@ class Embeddings(ABC): raise NotImplementedError @abstractmethod - def embed_multimodal_documents(self, multimodel_documents: list[dict]) -> list[list[float]]: + def embed_multimodal_documents(self, multimodel_documents: list[dict[str, Any]]) -> list[list[float]]: """Embed file documents.""" raise NotImplementedError diff --git a/api/core/rag/retrieval/output_parser/react_output.py b/api/core/rag/retrieval/output_parser/react_output.py index 9a14d41716..29abae4280 100644 --- a/api/core/rag/retrieval/output_parser/react_output.py +++ b/api/core/rag/retrieval/output_parser/react_output.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import NamedTuple, Union +from typing import Any, NamedTuple, Union @dataclass @@ -10,7 +10,7 @@ class ReactAction: tool: str """The name of the Tool to execute.""" - tool_input: Union[str, dict] + tool_input: Union[str, dict[str, Any]] """The input to pass in to the Tool.""" log: str """Additional information to log about the action.""" @@ -19,7 +19,7 @@ class ReactAction: class ReactFinish(NamedTuple): """The final return value of an ReactFinish.""" - return_values: dict + return_values: dict[str, Any] """Dictionary of return values.""" log: str """Additional information to log about the return value""" diff --git a/api/core/rag/retrieval/router/multi_dataset_react_route.py b/api/core/rag/retrieval/router/multi_dataset_react_route.py index dd280cdf6a..9b223075d8 100644 --- a/api/core/rag/retrieval/router/multi_dataset_react_route.py +++ b/api/core/rag/retrieval/router/multi_dataset_react_route.py @@ -1,5 +1,5 @@ from collections.abc import Generator, Sequence -from typing import Union +from typing import Any, Union from graphon.model_runtime.entities.llm_entities import LLMResult, LLMUsage from graphon.model_runtime.entities.message_entities import PromptMessage, PromptMessageRole, PromptMessageTool @@ -139,7 +139,7 @@ class ReactMultiDatasetRouter: def _invoke_llm( self, - completion_param: dict, + completion_param: dict[str, Any], model_instance: ModelInstance, prompt_messages: list[PromptMessage], stop: list[str], diff --git a/api/core/rag/splitter/text_splitter.py b/api/core/rag/splitter/text_splitter.py index 8977611f93..7f2117e2dd 100644 --- a/api/core/rag/splitter/text_splitter.py +++ b/api/core/rag/splitter/text_splitter.py @@ -63,7 +63,7 @@ class TextSplitter(BaseDocumentTransformer, ABC): def split_text(self, text: str) -> list[str]: """Split text into multiple components.""" - def create_documents(self, texts: list[str], metadatas: list[dict] | None = None) -> list[Document]: + def create_documents(self, texts: list[str], metadatas: list[dict[str, Any]] | None = None) -> list[Document]: """Create documents from a list of texts.""" _metadatas = metadatas or [{}] * len(texts) documents = [] From 711fe6ba2ce4c776641e6548b3ad8e7d73e8c5bd Mon Sep 17 00:00:00 2001 From: aether <144865106+aether-png@users.noreply.github.com> Date: Tue, 14 Apr 2026 12:33:53 +0530 Subject: [PATCH 07/47] refactor: convert plugin permission if/elif to match/case (#30001) (#35140) --- api/controllers/console/workspace/__init__.py | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py index 60f712e476..59dd29fdac 100644 --- a/api/controllers/console/workspace/__init__.py +++ b/api/controllers/console/workspace/__init__.py @@ -35,22 +35,24 @@ def plugin_permission_required( return view(*args, **kwargs) if install_required: - if permission.install_permission == TenantPluginPermission.InstallPermission.NOBODY: - raise Forbidden() - if permission.install_permission == TenantPluginPermission.InstallPermission.ADMINS: - if not user.is_admin_or_owner: + match permission.install_permission: + case TenantPluginPermission.InstallPermission.NOBODY: raise Forbidden() - if permission.install_permission == TenantPluginPermission.InstallPermission.EVERYONE: - pass + case TenantPluginPermission.InstallPermission.ADMINS: + if not user.is_admin_or_owner: + raise Forbidden() + case TenantPluginPermission.InstallPermission.EVERYONE: + pass if debug_required: - if permission.debug_permission == TenantPluginPermission.DebugPermission.NOBODY: - raise Forbidden() - if permission.debug_permission == TenantPluginPermission.DebugPermission.ADMINS: - if not user.is_admin_or_owner: + match permission.debug_permission: + case TenantPluginPermission.DebugPermission.NOBODY: raise Forbidden() - if permission.debug_permission == TenantPluginPermission.DebugPermission.EVERYONE: - pass + case TenantPluginPermission.DebugPermission.ADMINS: + if not user.is_admin_or_owner: + raise Forbidden() + case TenantPluginPermission.DebugPermission.EVERYONE: + pass return view(*args, **kwargs) From f7c6270f7400d2de52fae911234c56e19c959fc4 Mon Sep 17 00:00:00 2001 From: HeYinKazune <70251095+HeYin-OS@users.noreply.github.com> Date: Tue, 14 Apr 2026 16:23:29 +0900 Subject: [PATCH 08/47] refactor: use sessionmaker in tool_label_manager.py (#34895) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/core/tools/tool_label_manager.py | 73 +++++++++--- .../tools/workflow_tools_manage_service.py | 90 ++++++++------ .../core/tools/test_tool_label_manager.py | 111 ++++++++++++++++-- 3 files changed, 209 insertions(+), 65 deletions(-) diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 58190d1089..d8969a3391 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -1,4 +1,5 @@ from sqlalchemy import delete, select +from sqlalchemy.orm import Session, sessionmaker from core.tools.__base.tool_provider import ToolProviderController from core.tools.builtin_tool.provider import BuiltinToolProviderController @@ -19,10 +20,18 @@ class ToolLabelManager: return list(set(tool_labels)) @classmethod - def update_tool_labels(cls, controller: ToolProviderController, labels: list[str]): + def update_tool_labels( + cls, controller: ToolProviderController, labels: list[str], session: Session | None = None + ) -> None: """ Update tool labels + + :param controller: tool provider controller + :param labels: list of tool labels + :param session: database session, if None, a new session will be created + :return: None """ + labels = cls.filter_tool_labels(labels) if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): @@ -30,26 +39,46 @@ class ToolLabelManager: else: raise ValueError("Unsupported tool type") + if session is not None: + cls._update_tool_labels_logics(session, provider_id, controller, labels) + else: + with sessionmaker(db.engine).begin() as _session: + cls._update_tool_labels_logics(_session, provider_id, controller, labels) + + @classmethod + def _update_tool_labels_logics( + cls, session: Session, provider_id: str, controller: ToolProviderController, labels: list[str] + ) -> None: + """ + Update tool labels logics + + :param session: database session + :param provider_id: tool provider ID + :param controller: tool provider controller + :param labels: list of tool labels + :return: None + """ + # delete old labels - db.session.execute(delete(ToolLabelBinding).where(ToolLabelBinding.tool_id == provider_id)) + _ = session.execute( + delete(ToolLabelBinding).where( + ToolLabelBinding.tool_id == provider_id, ToolLabelBinding.tool_type == controller.provider_type + ) + ) # insert new labels for label in labels: - db.session.add( - ToolLabelBinding( - tool_id=provider_id, - tool_type=controller.provider_type, - label_name=label, - ) - ) - - db.session.commit() + session.add(ToolLabelBinding(tool_id=provider_id, tool_type=controller.provider_type, label_name=label)) @classmethod def get_tool_labels(cls, controller: ToolProviderController) -> list[str]: """ Get tool labels + + :param controller: tool provider controller + :return: list of tool labels (str) """ + if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): provider_id = controller.provider_id elif isinstance(controller, BuiltinToolProviderController): @@ -60,9 +89,11 @@ class ToolLabelManager: ToolLabelBinding.tool_id == provider_id, ToolLabelBinding.tool_type == controller.provider_type, ) - labels = db.session.scalars(stmt).all() - return list(labels) + with sessionmaker(db.engine, expire_on_commit=False).begin() as _session: + labels: list[str] = list(_session.scalars(stmt).all()) + + return labels @classmethod def get_tools_labels(cls, tool_providers: list[ToolProviderController]) -> dict[str, list[str]]: @@ -78,16 +109,22 @@ class ToolLabelManager: if not tool_providers: return {} + provider_ids: list[str] = [] + provider_types: set[str] = set() + for controller in tool_providers: if not isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): raise ValueError("Unsupported tool type") - - provider_ids = [] - for controller in tool_providers: - assert isinstance(controller, ApiToolProviderController | WorkflowToolProviderController) provider_ids.append(controller.provider_id) + provider_types.add(controller.provider_type) - labels = db.session.scalars(select(ToolLabelBinding).where(ToolLabelBinding.tool_id.in_(provider_ids))).all() + labels: list[ToolLabelBinding] = [] + + with sessionmaker(db.engine, expire_on_commit=False).begin() as _session: + stmt = select(ToolLabelBinding).where( + ToolLabelBinding.tool_id.in_(provider_ids), ToolLabelBinding.tool_type.in_(list(provider_types)) + ) + labels = list(_session.scalars(stmt).all()) tool_labels: dict[str, list[str]] = {label.tool_id: [] for label in labels} diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 779f7c4511..be2572b592 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -139,62 +139,82 @@ class WorkflowToolManageService: :param labels: labels :return: the updated tool """ - # check if the name is unique - existing_workflow_tool_provider = db.session.scalar( - select(WorkflowToolProvider) - .where( - WorkflowToolProvider.tenant_id == tenant_id, - WorkflowToolProvider.name == name, - WorkflowToolProvider.id != workflow_tool_id, - ) - .limit(1) - ) + existing_workflow_tool_provider: WorkflowToolProvider | None = None + with sessionmaker(db.engine, expire_on_commit=False).begin() as _session: + # query if the name exists for other tools + existing_workflow_tool_provider = _session.scalar( + select(WorkflowToolProvider) + .where( + WorkflowToolProvider.tenant_id == tenant_id, + WorkflowToolProvider.name == name, + WorkflowToolProvider.id != workflow_tool_id, + ) + .limit(1) + ) + + # if the name exists raise error if existing_workflow_tool_provider is not None: raise ValueError(f"Tool with name {name} already exists") - workflow_tool_provider: WorkflowToolProvider | None = db.session.scalar( - select(WorkflowToolProvider) - .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) - .limit(1) - ) + # query the workflow tool provider + workflow_tool_provider: WorkflowToolProvider | None = None + with sessionmaker(db.engine, expire_on_commit=False).begin() as _session: + workflow_tool_provider = _session.scalar( + select(WorkflowToolProvider) + .where(WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == workflow_tool_id) + .limit(1) + ) + # if not found raise error if workflow_tool_provider is None: raise ValueError(f"Tool {workflow_tool_id} not found") - app: App | None = db.session.scalar( - select(App).where(App.id == workflow_tool_provider.app_id, App.tenant_id == tenant_id).limit(1) - ) + # query the app + app: App | None = None + with sessionmaker(db.engine, expire_on_commit=False).begin() as _session: + app = _session.scalar( + select(App).where(App.id == workflow_tool_provider.app_id, App.tenant_id == tenant_id).limit(1) + ) + # if not found raise error if app is None: raise ValueError(f"App {workflow_tool_provider.app_id} not found") + # query the workflow workflow: Workflow | None = app.workflow + + # if not found raise error if workflow is None: raise ValueError(f"Workflow not found for app {workflow_tool_provider.app_id}") + # check if workflow configuration is synced WorkflowToolConfigurationUtils.ensure_no_human_input_nodes(workflow.graph_dict) - workflow_tool_provider.name = name - workflow_tool_provider.label = label - workflow_tool_provider.icon = json.dumps(icon) - workflow_tool_provider.description = description - workflow_tool_provider.parameter_configuration = json.dumps([p.model_dump() for p in parameters]) - workflow_tool_provider.privacy_policy = privacy_policy - workflow_tool_provider.version = workflow.version - workflow_tool_provider.updated_at = datetime.now() + with sessionmaker(db.engine).begin() as _session: + _session.add(workflow_tool_provider) - try: - WorkflowToolProviderController.from_db(workflow_tool_provider) - except Exception as e: - raise ValueError(str(e)) + # update workflow tool provider + workflow_tool_provider.name = name + workflow_tool_provider.label = label + workflow_tool_provider.icon = json.dumps(icon) + workflow_tool_provider.description = description + workflow_tool_provider.parameter_configuration = json.dumps([p.model_dump() for p in parameters]) + workflow_tool_provider.privacy_policy = privacy_policy + workflow_tool_provider.version = workflow.version + workflow_tool_provider.updated_at = datetime.now() - db.session.commit() + try: + WorkflowToolProviderController.from_db(workflow_tool_provider) + except Exception as e: + raise ValueError(str(e)) - if labels is not None: - ToolLabelManager.update_tool_labels( - ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels - ) + if labels is not None: + ToolLabelManager.update_tool_labels( + ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), + labels, + session=_session, + ) return {"result": "success"} diff --git a/api/tests/unit_tests/core/tools/test_tool_label_manager.py b/api/tests/unit_tests/core/tools/test_tool_label_manager.py index 8c0e7e9419..e13f430f9b 100644 --- a/api/tests/unit_tests/core/tools/test_tool_label_manager.py +++ b/api/tests/unit_tests/core/tools/test_tool_label_manager.py @@ -2,7 +2,7 @@ from __future__ import annotations from types import SimpleNamespace from typing import Any -from unittest.mock import PropertyMock, patch +from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -12,11 +12,13 @@ from core.tools.tool_label_manager import ToolLabelManager from core.tools.workflow_as_tool.provider import WorkflowToolProviderController +# Create a mock class for testing abstract/base classes class _ConcreteBuiltinToolProviderController(BuiltinToolProviderController): def _validate_credentials(self, user_id: str, credentials: dict[str, Any]): return None +# Factory function to create a "lightweight" controller for testing def _api_controller(provider_id: str = "api-1") -> ApiToolProviderController: controller = object.__new__(ApiToolProviderController) controller.provider_id = provider_id @@ -29,6 +31,7 @@ def _workflow_controller(provider_id: str = "wf-1") -> WorkflowToolProviderContr return controller +# Test pure logic: filtering and deduplication def test_tool_label_manager_filter_tool_labels(): filtered = ToolLabelManager.filter_tool_labels(["search", "search", "invalid", "news"]) assert set(filtered) == {"search", "news"} @@ -36,22 +39,68 @@ def test_tool_label_manager_filter_tool_labels(): def test_tool_label_manager_update_tool_labels_db(): + """ + Test the database update logic for tool labels. + Focus: Verify that labels are filtered, de-duplicated, and safely handled within a database session. + """ + # 1. Setup expected data from the controller controller = _api_controller("api-1") - with patch("core.tools.tool_label_manager.db") as mock_db: + expected_id = controller.provider_id + expected_type = controller.provider_type + + # 2. Patching External Dependencies + # - We patch 'db' to prevent Flask from trying to access a real database. + # - We patch 'sessionmaker' to intercept and control the creation of SQLAlchemy sessions. + with ( + patch("core.tools.tool_label_manager.db"), + patch("core.tools.tool_label_manager.sessionmaker") as mock_sessionmaker, + ): + # 3. Constructing the "Mocking Chain" + # In the business logic, we use: with sessionmaker(db.engine).begin() as _session: + # We need to link our 'mock_session' to the end of this complex context manager chain: + # Step A: sessionmaker(db.engine) -> returns an object (mock_sessionmaker.return_value) + # Step B: .begin() -> returns a context manager (begin.return_value) + # Step C: with ... as _session: -> calls __enter__(), and _session gets the __enter__.return_value + mock_session = MagicMock() + mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session + + # 4. Trigger the logic under test + # Input: ["search", "search", "invalid"] + # Logic: + # - "invalid" should be filtered out (not in default_tool_label_name_list). + # - The duplicate "search" should be merged (unique labels). ToolLabelManager.update_tool_labels(controller, ["search", "search", "invalid"]) - mock_db.session.execute.assert_called_once() - # only one valid unique label should be inserted. - assert mock_db.session.add.call_count == 1 - mock_db.session.commit.assert_called_once() + # 5. Behavior Assertion: DELETE operation + # Verify that the manager first attempts to clear existing labels for this specific tool. + # This ensures the update is idempotent. + mock_session.execute.assert_called_once() + + # 6. Behavior Assertion: INSERT operation + # Verify that only ONE valid label ("search") was added after filtering and deduplication. + # If call_count == 1, it proves filter_tool_labels() worked as expected. + assert mock_session.add.call_count == 1 + + # 7. State Assertion: Data Integrity & Isolation + # Inspect the actual object passed to session.add() to ensure it has correct properties. + # This confirms that the data isolation (tool_id + tool_type) we refactored is active. + call_args = mock_session.add.call_args + added_label = call_args[0][0] # Retrieve the ToolLabelBinding instance + + assert added_label.label_name == "search", "The label name should be 'search' after filtering." + assert added_label.tool_id == expected_id, "The tool_id must match the provider_id for correct binding." + assert added_label.tool_type == expected_type, "Isolation failed: tool_type must be verified during update." +# Test error handling def test_tool_label_manager_update_tool_labels_unsupported(): with pytest.raises(ValueError, match="Unsupported tool type"): ToolLabelManager.update_tool_labels(object(), ["search"]) # type: ignore[arg-type] +# Test retrieval logic def test_tool_label_manager_get_tool_labels_for_builtin_and_db(): + # Mocking a property (@property) using PropertyMock with patch.object( _ConcreteBuiltinToolProviderController, "tool_labels", @@ -62,29 +111,67 @@ def test_tool_label_manager_get_tool_labels_for_builtin_and_db(): assert ToolLabelManager.get_tool_labels(builtin) == ["search", "news"] api = _api_controller("api-1") - with patch("core.tools.tool_label_manager.db") as mock_db: - mock_db.session.scalars.return_value.all.return_value = ["search", "news"] - labels = ToolLabelManager.get_tool_labels(api) - assert labels == ["search", "news"] + with ( + patch("core.tools.tool_label_manager.db"), + patch("core.tools.tool_label_manager.sessionmaker") as mock_sessionmaker, + ): + mock_session = MagicMock() + mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session + # Inject mock data into the query result: session.scalars(stmt).all() + mock_session.scalars.return_value.all.return_value = ["search", "news"] + + labels = ToolLabelManager.get_tool_labels(api) + assert labels == ["search", "news"] + + +def test_tool_label_manager_get_tool_labels_unsupported(): + """ + Negative Test: Ensure get_tool_labels raises ValueError for unsupported controller types. + This protects the internal API contract against accidental regressions during refactoring. + """ + # Passing a generic object() which doesn't match Api, Workflow, or Builtin controllers. with pytest.raises(ValueError, match="Unsupported tool type"): ToolLabelManager.get_tool_labels(object()) # type: ignore[arg-type] +# Test batch processing and mapping def test_tool_label_manager_get_tools_labels_batch(): assert ToolLabelManager.get_tools_labels([]) == {} api = _api_controller("api-1") wf = _workflow_controller("wf-1") + + # SimpleNamespace is a quick way to simulate SQLAlchemy row objects records = [ SimpleNamespace(tool_id="api-1", label_name="search"), SimpleNamespace(tool_id="api-1", label_name="news"), SimpleNamespace(tool_id="wf-1", label_name="utilities"), ] - with patch("core.tools.tool_label_manager.db") as mock_db: - mock_db.session.scalars.return_value.all.return_value = records + + with ( + patch("core.tools.tool_label_manager.db"), + patch("core.tools.tool_label_manager.sessionmaker") as mock_sessionmaker, + ): + mock_session = MagicMock() + mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session + + # Simulating the batch query result + mock_session.scalars.return_value.all.return_value = records + labels = ToolLabelManager.get_tools_labels([api, wf]) + + # Verify the final dictionary mapping assert labels == {"api-1": ["search", "news"], "wf-1": ["utilities"]} + +def test_tool_label_manager_get_tools_labels_unsupported(): + """ + Negative Test: Ensure get_tools_labels raises ValueError if the list contains + unsupported controller types, even alongside valid ones. + """ + api = _api_controller("api-1") + + # Passing a list with one valid controller and one invalid object() with pytest.raises(ValueError, match="Unsupported tool type"): ToolLabelManager.get_tools_labels([api, object()]) # type: ignore[list-item] From 62bb830338adaf14f1acde10758f5f0027294f0d Mon Sep 17 00:00:00 2001 From: aether <144865106+aether-png@users.noreply.github.com> Date: Tue, 14 Apr 2026 13:16:58 +0530 Subject: [PATCH 09/47] refactor: convert InvokeFrom if/elif to match/case (#35143) --- .../apps/workflow/generate_task_pipeline.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index f1b8b08eaa..96387133b1 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -682,15 +682,16 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): def _save_workflow_app_log(self, *, session: Session, workflow_run_id: str | None): invoke_from = self._application_generate_entity.invoke_from - if invoke_from == InvokeFrom.SERVICE_API: - created_from = WorkflowAppLogCreatedFrom.SERVICE_API - elif invoke_from == InvokeFrom.EXPLORE: - created_from = WorkflowAppLogCreatedFrom.INSTALLED_APP - elif invoke_from == InvokeFrom.WEB_APP: - created_from = WorkflowAppLogCreatedFrom.WEB_APP - else: - # not save log for debugging - return + match invoke_from: + case InvokeFrom.SERVICE_API: + created_from = WorkflowAppLogCreatedFrom.SERVICE_API + case InvokeFrom.EXPLORE: + created_from = WorkflowAppLogCreatedFrom.INSTALLED_APP + case InvokeFrom.WEB_APP: + created_from = WorkflowAppLogCreatedFrom.WEB_APP + case InvokeFrom.DEBUGGER | InvokeFrom.TRIGGER | InvokeFrom.PUBLISHED_PIPELINE | InvokeFrom.VALIDATION: + # not save log for debugging + return if not workflow_run_id: return From 173e0d6f35f37eaad58e7d9beeb6ce571dfd1b4b Mon Sep 17 00:00:00 2001 From: bohdansolovie <153934212+bohdansolovie@users.noreply.github.com> Date: Tue, 14 Apr 2026 03:56:07 -0400 Subject: [PATCH 10/47] test: migrate clean_dataset integration tests to SQLAlchemy 2.0 APIs (#35146) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../tasks/test_clean_dataset_task.py | 147 ++++++++++++------ 1 file changed, 98 insertions(+), 49 deletions(-) diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index 1dd37fbc92..32bc2fc0bd 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -16,6 +16,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker +from sqlalchemy import delete, select from sqlalchemy.orm import Session from core.rag.index_processor.constant.index_type import IndexStructureType, IndexTechniqueType @@ -52,18 +53,18 @@ class TestCleanDatasetTask: from extensions.ext_redis import redis_client # Clear all test data using the provided session fixture - db_session_with_containers.query(DatasetMetadataBinding).delete() - db_session_with_containers.query(DatasetMetadata).delete() - db_session_with_containers.query(AppDatasetJoin).delete() - db_session_with_containers.query(DatasetQuery).delete() - db_session_with_containers.query(DatasetProcessRule).delete() - db_session_with_containers.query(DocumentSegment).delete() - db_session_with_containers.query(Document).delete() - db_session_with_containers.query(Dataset).delete() - db_session_with_containers.query(UploadFile).delete() - db_session_with_containers.query(TenantAccountJoin).delete() - db_session_with_containers.query(Tenant).delete() - db_session_with_containers.query(Account).delete() + db_session_with_containers.execute(delete(DatasetMetadataBinding)) + db_session_with_containers.execute(delete(DatasetMetadata)) + db_session_with_containers.execute(delete(AppDatasetJoin)) + db_session_with_containers.execute(delete(DatasetQuery)) + db_session_with_containers.execute(delete(DatasetProcessRule)) + db_session_with_containers.execute(delete(DocumentSegment)) + db_session_with_containers.execute(delete(Document)) + db_session_with_containers.execute(delete(Dataset)) + db_session_with_containers.execute(delete(UploadFile)) + db_session_with_containers.execute(delete(TenantAccountJoin)) + db_session_with_containers.execute(delete(Tenant)) + db_session_with_containers.execute(delete(Account)) db_session_with_containers.commit() # Clear Redis cache @@ -302,28 +303,40 @@ class TestCleanDatasetTask: # Verify results # Check that dataset-related data was cleaned up - documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + documents = db_session_with_containers.scalars(select(Document).where(Document.dataset_id == dataset.id)).all() assert len(documents) == 0 - segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(segments) == 0 # Check that metadata and bindings were cleaned up - metadata = db_session_with_containers.query(DatasetMetadata).filter_by(dataset_id=dataset.id).all() + metadata = db_session_with_containers.scalars( + select(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset.id) + ).all() assert len(metadata) == 0 - bindings = db_session_with_containers.query(DatasetMetadataBinding).filter_by(dataset_id=dataset.id).all() + bindings = db_session_with_containers.scalars( + select(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset.id) + ).all() assert len(bindings) == 0 # Check that process rules and queries were cleaned up - process_rules = db_session_with_containers.query(DatasetProcessRule).filter_by(dataset_id=dataset.id).all() + process_rules = db_session_with_containers.scalars( + select(DatasetProcessRule).where(DatasetProcessRule.dataset_id == dataset.id) + ).all() assert len(process_rules) == 0 - queries = db_session_with_containers.query(DatasetQuery).filter_by(dataset_id=dataset.id).all() + queries = db_session_with_containers.scalars( + select(DatasetQuery).where(DatasetQuery.dataset_id == dataset.id) + ).all() assert len(queries) == 0 # Check that app dataset joins were cleaned up - app_joins = db_session_with_containers.query(AppDatasetJoin).filter_by(dataset_id=dataset.id).all() + app_joins = db_session_with_containers.scalars( + select(AppDatasetJoin).where(AppDatasetJoin.dataset_id == dataset.id) + ).all() assert len(app_joins) == 0 # Verify index processor was called @@ -414,24 +427,32 @@ class TestCleanDatasetTask: # Verify results # Check that all documents were deleted - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 # Check that all segments were deleted - remaining_segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Check that all upload files were deleted - remaining_files = db_session_with_containers.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).all() + remaining_files = db_session_with_containers.scalars( + select(UploadFile).where(UploadFile.id.in_(upload_file_ids)) + ).all() assert len(remaining_files) == 0 # Check that metadata and bindings were cleaned up - remaining_metadata = db_session_with_containers.query(DatasetMetadata).filter_by(dataset_id=dataset.id).all() + remaining_metadata = db_session_with_containers.scalars( + select(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset.id) + ).all() assert len(remaining_metadata) == 0 - remaining_bindings = ( - db_session_with_containers.query(DatasetMetadataBinding).filter_by(dataset_id=dataset.id).all() - ) + remaining_bindings = db_session_with_containers.scalars( + select(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset.id) + ).all() assert len(remaining_bindings) == 0 # Verify index processor was called @@ -485,12 +506,14 @@ class TestCleanDatasetTask: # Check that all data was cleaned up - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 - remaining_segments = ( - db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() - ) + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Recreate data for next test case @@ -538,11 +561,15 @@ class TestCleanDatasetTask: # Verify results - even with vector cleanup failure, documents and segments should be deleted # Check that documents were still deleted despite vector cleanup failure - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 # Check that segments were still deleted despite vector cleanup failure - remaining_segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Verify that index processor was called and failed @@ -622,18 +649,22 @@ class TestCleanDatasetTask: # Verify results # Check that all documents were deleted - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 # Check that all segments were deleted - remaining_segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Check that all image files were deleted from database image_file_ids = [f.id for f in image_files] - remaining_image_files = ( - db_session_with_containers.query(UploadFile).where(UploadFile.id.in_(image_file_ids)).all() - ) + remaining_image_files = db_session_with_containers.scalars( + select(UploadFile).where(UploadFile.id.in_(image_file_ids)) + ).all() assert len(remaining_image_files) == 0 # Verify that storage.delete was called for each image file @@ -738,24 +769,32 @@ class TestCleanDatasetTask: # Verify results # Check that all documents were deleted - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 # Check that all segments were deleted - remaining_segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Check that all upload files were deleted - remaining_files = db_session_with_containers.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).all() + remaining_files = db_session_with_containers.scalars( + select(UploadFile).where(UploadFile.id.in_(upload_file_ids)) + ).all() assert len(remaining_files) == 0 # Check that all metadata and bindings were deleted - remaining_metadata = db_session_with_containers.query(DatasetMetadata).filter_by(dataset_id=dataset.id).all() + remaining_metadata = db_session_with_containers.scalars( + select(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset.id) + ).all() assert len(remaining_metadata) == 0 - remaining_bindings = ( - db_session_with_containers.query(DatasetMetadataBinding).filter_by(dataset_id=dataset.id).all() - ) + remaining_bindings = db_session_with_containers.scalars( + select(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset.id) + ).all() assert len(remaining_bindings) == 0 # Verify performance expectations @@ -826,7 +865,9 @@ class TestCleanDatasetTask: # Check that upload file was still deleted from database despite storage failure # Note: When storage operations fail, the upload file may not be deleted # This demonstrates that the cleanup process continues even with storage errors - remaining_files = db_session_with_containers.query(UploadFile).filter_by(id=upload_file.id).all() + remaining_files = db_session_with_containers.scalars( + select(UploadFile).where(UploadFile.id == upload_file.id) + ).all() # The upload file should still be deleted from the database even if storage cleanup fails # However, this depends on the specific implementation of clean_dataset_task if len(remaining_files) > 0: @@ -976,19 +1017,27 @@ class TestCleanDatasetTask: # Verify results # Check that all documents were deleted - remaining_documents = db_session_with_containers.query(Document).filter_by(dataset_id=dataset.id).all() + remaining_documents = db_session_with_containers.scalars( + select(Document).where(Document.dataset_id == dataset.id) + ).all() assert len(remaining_documents) == 0 # Check that all segments were deleted - remaining_segments = db_session_with_containers.query(DocumentSegment).filter_by(dataset_id=dataset.id).all() + remaining_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.dataset_id == dataset.id) + ).all() assert len(remaining_segments) == 0 # Check that all upload files were deleted - remaining_files = db_session_with_containers.query(UploadFile).filter_by(id=upload_file_id).all() + remaining_files = db_session_with_containers.scalars( + select(UploadFile).where(UploadFile.id == upload_file_id) + ).all() assert len(remaining_files) == 0 # Check that all metadata was deleted - remaining_metadata = db_session_with_containers.query(DatasetMetadata).filter_by(dataset_id=dataset.id).all() + remaining_metadata = db_session_with_containers.scalars( + select(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset.id) + ).all() assert len(remaining_metadata) == 0 # Verify that storage.delete was called From a951cc996b234660c2498d2c0a093a589ab0c1ff Mon Sep 17 00:00:00 2001 From: bohdansolovie <153934212+bohdansolovie@users.noreply.github.com> Date: Tue, 14 Apr 2026 03:56:11 -0400 Subject: [PATCH 11/47] test: migrate document indexing task tests to SQLAlchemy 2.0 select API (#35145) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../tasks/test_disable_segments_from_index_task.py | 7 ++++--- .../tasks/test_document_indexing_update_task.py | 11 ++++++----- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py index 3e9a0c8f7f..6e03bd9351 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py @@ -9,6 +9,7 @@ The task is responsible for removing document segments from the search index whe from unittest.mock import MagicMock, patch from faker import Faker +from sqlalchemy import select from sqlalchemy.orm import Session from core.rag.index_processor.constant.index_type import IndexStructureType, IndexTechniqueType @@ -471,9 +472,9 @@ class TestDisableSegmentsFromIndexTask: db_session_with_containers.refresh(segments[1]) # Check that segments are re-enabled after error - updated_segments = ( - db_session_with_containers.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)).all() - ) + updated_segments = db_session_with_containers.scalars( + select(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)) + ).all() for segment in updated_segments: assert segment.enabled is True diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py index d94abf2b40..a9a8c0f30c 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker +from sqlalchemy import func, select from core.rag.index_processor.constant.index_type import IndexStructureType, IndexTechniqueType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole @@ -123,13 +124,13 @@ class TestDocumentIndexingUpdateTask: db_session_with_containers.expire_all() # Assert document status updated before reindex - updated = db_session_with_containers.query(Document).where(Document.id == document.id).first() + updated = db_session_with_containers.scalar(select(Document).where(Document.id == document.id).limit(1)) assert updated.indexing_status == IndexingStatus.PARSING assert updated.processing_started_at is not None # Segments should be deleted - remaining = ( - db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count() + remaining = db_session_with_containers.scalar( + select(func.count()).select_from(DocumentSegment).where(DocumentSegment.document_id == document.id) ) assert remaining == 0 @@ -167,8 +168,8 @@ class TestDocumentIndexingUpdateTask: mock_external_dependencies["runner_instance"].run.assert_called_once() # Segments should remain (since clean failed before DB delete) - remaining = ( - db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count() + remaining = db_session_with_containers.scalar( + select(func.count()).select_from(DocumentSegment).where(DocumentSegment.document_id == document.id) ) assert remaining > 0 From d7ad2baf7958992199aa69648a46cc00cb863784 Mon Sep 17 00:00:00 2001 From: Joel Date: Tue, 14 Apr 2026 16:15:07 +0800 Subject: [PATCH 12/47] chore: clarify tracing error copy to direct users to the Tracing tab (#35153) --- pnpm-lock.yaml | 12 ---- .../base/ui/select/__tests__/index.spec.tsx | 13 ++-- .../panel/__tests__/workflow-preview.spec.tsx | 40 +++++++++++- .../workflow/panel/workflow-preview.tsx | 5 ++ .../workflow/run/__tests__/status.spec.tsx | 65 ++++++++++++++++++- web/app/components/workflow/run/index.tsx | 1 + .../components/workflow/run/result-panel.tsx | 3 + web/app/components/workflow/run/status.tsx | 32 ++++++++- web/i18n/ar-TN/workflow.json | 2 +- web/i18n/de-DE/workflow.json | 2 +- web/i18n/en-US/workflow.json | 2 +- web/i18n/es-ES/workflow.json | 2 +- web/i18n/fa-IR/workflow.json | 2 +- web/i18n/fr-FR/workflow.json | 2 +- web/i18n/hi-IN/workflow.json | 2 +- web/i18n/id-ID/workflow.json | 2 +- web/i18n/it-IT/workflow.json | 2 +- web/i18n/ja-JP/workflow.json | 2 +- web/i18n/ko-KR/workflow.json | 2 +- web/i18n/nl-NL/workflow.json | 2 +- web/i18n/pl-PL/workflow.json | 2 +- web/i18n/pt-BR/workflow.json | 2 +- web/i18n/ro-RO/workflow.json | 2 +- web/i18n/ru-RU/workflow.json | 2 +- web/i18n/sl-SI/workflow.json | 2 +- web/i18n/th-TH/workflow.json | 2 +- web/i18n/tr-TR/workflow.json | 2 +- web/i18n/uk-UA/workflow.json | 2 +- web/i18n/vi-VN/workflow.json | 2 +- web/i18n/zh-Hans/workflow.json | 2 +- web/i18n/zh-Hant/workflow.json | 2 +- web/package.json | 2 - 32 files changed, 167 insertions(+), 52 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8901c7948f..4444981601 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -24,9 +24,6 @@ catalogs: '@cucumber/cucumber': specifier: 12.8.0 version: 12.8.0 - '@date-fns/tz': - specifier: 1.4.1 - version: 1.4.1 '@egoist/tailwindcss-icons': specifier: 1.9.2 version: 1.9.2 @@ -270,9 +267,6 @@ catalogs: cron-parser: specifier: 5.5.0 version: 5.5.0 - date-fns: - specifier: 4.1.0 - version: 4.1.0 dayjs: specifier: 1.11.20 version: 1.11.20 @@ -655,9 +649,6 @@ importers: '@base-ui/react': specifier: 'catalog:' version: 1.4.0(@date-fns/tz@1.4.1)(@types/react@19.2.14)(date-fns@4.1.0)(react-dom@19.2.5(react@19.2.5))(react@19.2.5) - '@date-fns/tz': - specifier: 'catalog:' - version: 1.4.1 '@emoji-mart/data': specifier: 'catalog:' version: 1.2.1 @@ -760,9 +751,6 @@ importers: cron-parser: specifier: 'catalog:' version: 5.5.0 - date-fns: - specifier: 'catalog:' - version: 4.1.0 dayjs: specifier: 'catalog:' version: 1.11.20 diff --git a/web/app/components/base/ui/select/__tests__/index.spec.tsx b/web/app/components/base/ui/select/__tests__/index.spec.tsx index f33b1eb650..124eb4d60e 100644 --- a/web/app/components/base/ui/select/__tests__/index.spec.tsx +++ b/web/app/components/base/ui/select/__tests__/index.spec.tsx @@ -42,12 +42,10 @@ const renderOpenSelect = ({ describe('Select wrappers', () => { describe('Select root integration', () => { - it('should associate the hidden input with an external form and preserve autocomplete hints', () => { - const formId = 'profile-form' + it('should submit the hidden input value and preserve autocomplete hints inside a form', () => { const { container } = render( - <> -
- @@ -56,13 +54,12 @@ describe('Select wrappers', () => { New York - , +
, ) const hiddenInput = container.querySelector('input[name="city"]') - const form = container.querySelector(`#${formId}`) as HTMLFormElement + const form = screen.getByRole('form', { name: 'profile form' }) as HTMLFormElement - expect(hiddenInput).toHaveAttribute('form', formId) expect(hiddenInput).toHaveAttribute('autocomplete', 'address-level2') expect(new FormData(form).get('city')).toBe('seattle') }) diff --git a/web/app/components/workflow/panel/__tests__/workflow-preview.spec.tsx b/web/app/components/workflow/panel/__tests__/workflow-preview.spec.tsx index 860322d729..e3c85bd2ad 100644 --- a/web/app/components/workflow/panel/__tests__/workflow-preview.spec.tsx +++ b/web/app/components/workflow/panel/__tests__/workflow-preview.spec.tsx @@ -33,7 +33,18 @@ vi.mock('@/app/components/workflow/hooks', () => ({ })) vi.mock('@/app/components/workflow/run/result-panel', () => ({ - default: ({ status }: { status?: string }) =>
{status}
, + default: ({ + status, + onOpenTracingTab, + }: { + status?: string + onOpenTracingTab?: () => void + }) => ( +
+
{status}
+ +
+ ), })) vi.mock('@/app/components/workflow/run/result-text', () => ({ @@ -329,6 +340,33 @@ describe('WorkflowPreview', () => { expect(screen.getByTestId('result-panel')).toBeInTheDocument() }) + it('should switch to the tracing tab when result panel requests it', async () => { + const user = userEvent.setup() + + renderWorkflowComponent( + , + { + initialStoreState: { + workflowRunningData: { + ...createWorkflowRunningData({ + result: createWorkflowResult({ + status: 'partial-succeeded', + files: [], + }), + tracing: [createNodeTracing()], + }), + resultText: 'ready', + } as NonNullable, + }, + }, + ) + + await user.click(screen.getByText('runLog.detail')) + await user.click(screen.getByRole('button', { name: 'open-tracing' })) + + expect(screen.getByTestId('tracing-panel')).toHaveTextContent('1') + }) + it('should resize the preview panel within the allowed workflow canvas bounds', async () => { const { container, store } = renderWorkflowComponent( , diff --git a/web/app/components/workflow/panel/workflow-preview.tsx b/web/app/components/workflow/panel/workflow-preview.tsx index d3950dcbca..e1e442c0cb 100644 --- a/web/app/components/workflow/panel/workflow-preview.tsx +++ b/web/app/components/workflow/panel/workflow-preview.tsx @@ -101,6 +101,10 @@ const WorkflowPreview = () => { await submitHumanInputForm(formToken, formData) }, []) + const handleOpenTracingTab = useCallback(() => { + switchTab('TRACING') + }, []) + return (
{ created_by={(workflowRunningData?.result?.created_by as any)?.name} steps={workflowRunningData?.result?.total_steps} exceptionCounts={workflowRunningData?.result?.exceptions_count} + onOpenTracingTab={handleOpenTracingTab} /> )} {currentTab === 'DETAIL' && !workflowRunningData?.result && ( diff --git a/web/app/components/workflow/run/__tests__/status.spec.tsx b/web/app/components/workflow/run/__tests__/status.spec.tsx index 01f32c4c47..24682aa47f 100644 --- a/web/app/components/workflow/run/__tests__/status.spec.tsx +++ b/web/app/components/workflow/run/__tests__/status.spec.tsx @@ -1,11 +1,60 @@ import type { WorkflowPausedDetailsResponse } from '@/models/log' -import { render, screen } from '@testing-library/react' +import { fireEvent, render, screen } from '@testing-library/react' +import { cloneElement, isValidElement } from 'react' import { createDocLinkMock, resolveDocLink } from '../../__tests__/i18n' import Status from '../status' const mockDocLink = createDocLinkMock() const mockUseWorkflowPausedDetails = vi.fn() +vi.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string, options?: Record) => { + const fullKey = options?.ns ? `${options.ns}.${key}` : key + if (fullKey === 'workflow.nodes.common.errorHandle.partialSucceeded.tip') + return 'There are {{num}} nodes in the process running abnormally, please go to TRACING to check the logs.' + + const params = { ...options } + delete params.ns + const suffix = Object.keys(params).length > 0 ? `:${JSON.stringify(params)}` : '' + return `${fullKey}${suffix}` + }, + }), + Trans: ({ + i18nKey, + values, + components, + }: { + i18nKey: string + values?: { + num?: string | number + } + components?: Record + }) => { + if (i18nKey !== 'nodes.common.errorHandle.partialSucceeded.tip') + return {i18nKey} + + const tracingLink = components?.tracingLink + const tracingNode = isValidElement(tracingLink) + ? cloneElement(tracingLink, undefined, 'TRACING') + : 'TRACING' + + return ( + + There are + {' '} + {values?.num} + {' '} + nodes in the process running abnormally, please go to + {' '} + {tracingNode} + {' '} + to check the logs. + + ) + }, +})) + vi.mock('@/context/i18n', () => ({ useDocLink: () => mockDocLink, })) @@ -64,14 +113,24 @@ describe('Status', () => { expect(screen.getByText('FAIL')).toBeInTheDocument() expect(screen.getByText('Something broke')).toBeInTheDocument() - expect(screen.getByText('workflow.nodes.common.errorHandle.partialSucceeded.tip:{"num":2}')).toBeInTheDocument() + expect(screen.getAllByText((_, element) => element?.textContent === 'There are 2 nodes in the process running abnormally, please go to TRACING to check the logs.')).toHaveLength(2) }) it('renders the partial-succeeded warning summary', () => { render() expect(screen.getByText('PARTIAL SUCCESS')).toBeInTheDocument() - expect(screen.getByText('workflow.nodes.common.errorHandle.partialSucceeded.tip:{"num":3}')).toBeInTheDocument() + expect(screen.getAllByText((_, element) => element?.textContent === 'There are 3 nodes in the process running abnormally, please go to TRACING to check the logs.')).toHaveLength(2) + }) + + it('opens the tracing tab when clicking the TRACING link', () => { + const onOpenTracingTab = vi.fn() + + render() + + fireEvent.click(screen.getByRole('link', { name: 'TRACING' })) + + expect(onOpenTracingTab).toHaveBeenCalledTimes(1) }) it('renders the exception learn-more link', () => { diff --git a/web/app/components/workflow/run/index.tsx b/web/app/components/workflow/run/index.tsx index d39e6d43c3..417d38657e 100644 --- a/web/app/components/workflow/run/index.tsx +++ b/web/app/components/workflow/run/index.tsx @@ -174,6 +174,7 @@ const RunPanel: FC = ({ exceptionCounts={runDetail.exceptions_count} isListening={isListening} workflowRunId={runDetail.id} + onOpenTracingTab={() => switchTab('TRACING')} /> )} {!loading && currentTab === 'DETAIL' && !runDetail && isListening && ( diff --git a/web/app/components/workflow/run/result-panel.tsx b/web/app/components/workflow/run/result-panel.tsx index 58f783e6c4..c7f4a45540 100644 --- a/web/app/components/workflow/run/result-panel.tsx +++ b/web/app/components/workflow/run/result-panel.tsx @@ -42,6 +42,7 @@ export type ResultPanelProps = { execution_metadata?: any isListening?: boolean workflowRunId?: string + onOpenTracingTab?: () => void handleShowIterationResultList?: (detail: NodeTracing[][], iterDurationMap: any) => void handleShowLoopResultList?: (detail: NodeTracing[][], loopDurationMap: any) => void onShowRetryDetail?: (detail: NodeTracing[]) => void @@ -69,6 +70,7 @@ const ResultPanel: FC = ({ execution_metadata, isListening = false, workflowRunId, + onOpenTracingTab, handleShowIterationResultList, handleShowLoopResultList, onShowRetryDetail, @@ -92,6 +94,7 @@ const ResultPanel: FC = ({ exceptionCounts={exceptionCounts} isListening={isListening} workflowRunId={workflowRunId} + onOpenTracingTab={onOpenTracingTab} />
diff --git a/web/app/components/workflow/run/status.tsx b/web/app/components/workflow/run/status.tsx index d4ecfcc0fd..233c6207a9 100644 --- a/web/app/components/workflow/run/status.tsx +++ b/web/app/components/workflow/run/status.tsx @@ -1,7 +1,7 @@ 'use client' import type { FC } from 'react' import { useMemo } from 'react' -import { useTranslation } from 'react-i18next' +import { Trans, useTranslation } from 'react-i18next' import Indicator from '@/app/components/header/indicator' import StatusContainer from '@/app/components/workflow/run/status-container' import { useDocLink } from '@/context/i18n' @@ -16,6 +16,7 @@ type ResultProps = { exceptionCounts?: number isListening?: boolean workflowRunId?: string + onOpenTracingTab?: () => void } const StatusPanel: FC = ({ @@ -26,6 +27,7 @@ const StatusPanel: FC = ({ exceptionCounts, isListening = false, workflowRunId, + onOpenTracingTab, }) => { const { t } = useTranslation() const docLink = useDocLink() @@ -65,6 +67,30 @@ const StatusPanel: FC = ({ return inputURLs }, [pausedDetails]) + const partialSucceededTip = exceptionCounts + ? ( + { + e.preventDefault() + onOpenTracingTab() + }} + /> + ) + : , + }} + /> + ) + : null + return (
@@ -160,7 +186,7 @@ const StatusPanel: FC = ({ <>
- {t('nodes.common.errorHandle.partialSucceeded.tip', { ns: 'workflow', num: exceptionCounts })} + {partialSucceededTip}
) @@ -172,7 +198,7 @@ const StatusPanel: FC = ({ <>
- {t('nodes.common.errorHandle.partialSucceeded.tip', { ns: 'workflow', num: exceptionCounts })} + {partialSucceededTip}
) diff --git a/web/i18n/ar-TN/workflow.json b/web/i18n/ar-TN/workflow.json index e0e498e8d4..56192350c3 100644 --- a/web/i18n/ar-TN/workflow.json +++ b/web/i18n/ar-TN/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "فرع الفشل", "nodes.common.errorHandle.none.desc": "ستتوقف العقدة عن العمل في حالة حدوث استثناء ولم يتم التعامل معه", "nodes.common.errorHandle.none.title": "لا شيء", - "nodes.common.errorHandle.partialSucceeded.tip": "هناك {{num}} عقد في العملية تعمل بشكل غير طبيعي، يرجى الانتقال إلى التتبع للتحقق من السجلات.", + "nodes.common.errorHandle.partialSucceeded.tip": "هناك {{num}} عقد في العملية تعمل بشكل غير طبيعي، يرجى الانتقال إلى التتبع للتحقق من السجلات.", "nodes.common.errorHandle.tip": "استراتيجية التعامل مع الاستثناءات، يتم تشغيلها عندما تواجه العقدة استثناءً.", "nodes.common.errorHandle.title": "معالجة الأخطاء", "nodes.common.inputVars": "متغيرات الإدخال", diff --git a/web/i18n/de-DE/workflow.json b/web/i18n/de-DE/workflow.json index 7a1ddeeb1c..75cb0d30ca 100644 --- a/web/i18n/de-DE/workflow.json +++ b/web/i18n/de-DE/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Fehlgeschlagener Zweig", "nodes.common.errorHandle.none.desc": "Der Knoten wird nicht mehr ausgeführt, wenn eine Ausnahme auftritt und nicht behandelt wird", "nodes.common.errorHandle.none.title": "Nichts", - "nodes.common.errorHandle.partialSucceeded.tip": "Es gibt {{num}} Knoten im Prozess, die nicht normal laufen, bitte gehen Sie zur Ablaufverfolgung, um die Protokolle zu überprüfen.", + "nodes.common.errorHandle.partialSucceeded.tip": "Es gibt {{num}} Knoten im Prozess, die nicht normal laufen, bitte gehen Sie zur Ablaufverfolgung, um die Protokolle zu überprüfen.", "nodes.common.errorHandle.tip": "Ausnahmebehandlungsstrategie, die ausgelöst wird, wenn ein Knoten auf eine Ausnahme stößt.", "nodes.common.errorHandle.title": "Fehlerbehandlung", "nodes.common.inputVars": "Eingabevariablen", diff --git a/web/i18n/en-US/workflow.json b/web/i18n/en-US/workflow.json index 6f6e5de2cc..42522950b8 100644 --- a/web/i18n/en-US/workflow.json +++ b/web/i18n/en-US/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Fail Branch", "nodes.common.errorHandle.none.desc": "The node will stop running if an exception occurs and is not handled", "nodes.common.errorHandle.none.title": "None", - "nodes.common.errorHandle.partialSucceeded.tip": "There are {{num}} nodes in the process running abnormally, please go to tracing to check the logs.", + "nodes.common.errorHandle.partialSucceeded.tip": "There are {{num}} nodes in the process running abnormally, please go to TRACING to check the logs.", "nodes.common.errorHandle.tip": "Exception handling strategy, triggered when a node encounters an exception.", "nodes.common.errorHandle.title": "Error Handling", "nodes.common.inputVars": "Input Variables", diff --git a/web/i18n/es-ES/workflow.json b/web/i18n/es-ES/workflow.json index 1955357c45..5545707b4f 100644 --- a/web/i18n/es-ES/workflow.json +++ b/web/i18n/es-ES/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Rama de error", "nodes.common.errorHandle.none.desc": "El nodo dejará de ejecutarse si se produce una excepción y no se controla", "nodes.common.errorHandle.none.title": "Ninguno", - "nodes.common.errorHandle.partialSucceeded.tip": "Hay nodos {{num}} en el proceso que se ejecutan de manera anormal, vaya a rastreo para verificar los registros.", + "nodes.common.errorHandle.partialSucceeded.tip": "Hay nodos {{num}} en el proceso que se ejecutan de manera anormal, vaya a rastreo para verificar los registros.", "nodes.common.errorHandle.tip": "Estrategia de control de excepciones, que se desencadena cuando un nodo encuentra una excepción.", "nodes.common.errorHandle.title": "Manejo de errores", "nodes.common.inputVars": "Variables de entrada", diff --git a/web/i18n/fa-IR/workflow.json b/web/i18n/fa-IR/workflow.json index e957d45267..fbe6685681 100644 --- a/web/i18n/fa-IR/workflow.json +++ b/web/i18n/fa-IR/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "شاخه شکست", "nodes.common.errorHandle.none.desc": "اگر استثنایی رخ دهد و مدیریت نشود، گره از کار می‌افتد", "nodes.common.errorHandle.none.title": "هیچ‌کدام", - "nodes.common.errorHandle.partialSucceeded.tip": "{{num}} گره با خطا مواجه شدند؛ برای بررسی لاگ‌ها به ردیابی مراجعه کنید.", + "nodes.common.errorHandle.partialSucceeded.tip": "{{num}} گره با خطا مواجه شدند؛ برای بررسی لاگ‌ها به ردیابی مراجعه کنید.", "nodes.common.errorHandle.tip": "استراتژی مدیریت استثنا؛ زمانی که گره با خطا مواجه شود فعال می‌شود.", "nodes.common.errorHandle.title": "مدیریت خطا", "nodes.common.inputVars": "متغیرهای ورودی", diff --git a/web/i18n/fr-FR/workflow.json b/web/i18n/fr-FR/workflow.json index ec23ac5c98..b7f7048ad2 100644 --- a/web/i18n/fr-FR/workflow.json +++ b/web/i18n/fr-FR/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Branche d’échec", "nodes.common.errorHandle.none.desc": "Le nœud cessera de s’exécuter si une exception se produit et n’est pas gérée", "nodes.common.errorHandle.none.title": "Aucun", - "nodes.common.errorHandle.partialSucceeded.tip": "Il y a des nœuds {{num}} dans le processus qui fonctionnent anormalement, veuillez aller dans le traçage pour vérifier les journaux.", + "nodes.common.errorHandle.partialSucceeded.tip": "Il y a des nœuds {{num}} dans le processus qui fonctionnent anormalement, veuillez aller dans le traçage pour vérifier les journaux.", "nodes.common.errorHandle.tip": "Stratégie de gestion des exceptions, déclenchée lorsqu’un nœud rencontre une exception.", "nodes.common.errorHandle.title": "Gestion des erreurs", "nodes.common.inputVars": "Variables d’entrée", diff --git a/web/i18n/hi-IN/workflow.json b/web/i18n/hi-IN/workflow.json index f18de6aa3c..9166eb43d8 100644 --- a/web/i18n/hi-IN/workflow.json +++ b/web/i18n/hi-IN/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "असफल शाखा", "nodes.common.errorHandle.none.desc": "यदि कोई अपवाद होता है और हैंडल नहीं किया जाता है, तो नोड चलना बंद कर देगा", "nodes.common.errorHandle.none.title": "कोई नहीं", - "nodes.common.errorHandle.partialSucceeded.tip": "प्रक्रिया में {{num}} नोड्स असामान्य रूप से चल रहे हैं, कृपया लॉग की जांच करने के लिए ट्रेसिंग पर जाएं।", + "nodes.common.errorHandle.partialSucceeded.tip": "प्रक्रिया में {{num}} नोड्स असामान्य रूप से चल रहे हैं, कृपया लॉग की जांच करने के लिए ट्रेसिंग पर जाएं।", "nodes.common.errorHandle.tip": "अपवाद हैंडलिंग रणनीति, ट्रिगर जब एक नोड एक अपवाद का सामना करता है।", "nodes.common.errorHandle.title": "त्रुटि हैंडलिंग", "nodes.common.inputVars": "इनपुट चर", diff --git a/web/i18n/id-ID/workflow.json b/web/i18n/id-ID/workflow.json index 5030489cb1..7e72577e3b 100644 --- a/web/i18n/id-ID/workflow.json +++ b/web/i18n/id-ID/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Cabang Gagal", "nodes.common.errorHandle.none.desc": "Node akan berhenti berjalan jika pengecualian terjadi dan tidak ditangani", "nodes.common.errorHandle.none.title": "Tidak", - "nodes.common.errorHandle.partialSucceeded.tip": "Ada {{num}} node dalam proses yang berjalan tidak normal, silakan pergi ke tracing untuk memeriksa log.", + "nodes.common.errorHandle.partialSucceeded.tip": "Ada {{num}} node dalam proses yang berjalan tidak normal, silakan pergi ke tracing untuk memeriksa log.", "nodes.common.errorHandle.tip": "Strategi penanganan pengecualian, dipicu ketika simpul menemukan pengecualian.", "nodes.common.errorHandle.title": "Penanganan Kesalahan", "nodes.common.inputVars": "Variabel Masukan", diff --git a/web/i18n/it-IT/workflow.json b/web/i18n/it-IT/workflow.json index a39e00c5d8..2fde43f694 100644 --- a/web/i18n/it-IT/workflow.json +++ b/web/i18n/it-IT/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Ramo fallito", "nodes.common.errorHandle.none.desc": "L'esecuzione del nodo verrà interrotta se si verifica un'eccezione e non viene gestita", "nodes.common.errorHandle.none.title": "Nessuno", - "nodes.common.errorHandle.partialSucceeded.tip": "Ci sono {{num}} nodi nel processo che funzionano in modo anomalo, si prega di andare su tracing per controllare i log.", + "nodes.common.errorHandle.partialSucceeded.tip": "Ci sono {{num}} nodi nel processo che funzionano in modo anomalo, si prega di andare su tracing per controllare i log.", "nodes.common.errorHandle.tip": "Strategia di gestione delle eccezioni, attivata quando un nodo rileva un'eccezione.", "nodes.common.errorHandle.title": "Gestione degli errori", "nodes.common.inputVars": "Variabili di input", diff --git a/web/i18n/ja-JP/workflow.json b/web/i18n/ja-JP/workflow.json index 089468053a..11cf9caa34 100644 --- a/web/i18n/ja-JP/workflow.json +++ b/web/i18n/ja-JP/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "例外分岐", "nodes.common.errorHandle.none.desc": "例外発生時に処理を停止", "nodes.common.errorHandle.none.title": "処理なし", - "nodes.common.errorHandle.partialSucceeded.tip": "{{num}}個のノードで異常発生。ログはトレース画面で確認可能", + "nodes.common.errorHandle.partialSucceeded.tip": "{{num}}個のノードで異常発生。ログはトレース画面で確認可能", "nodes.common.errorHandle.tip": "ノード例外発生時の処理ポリシーを設定", "nodes.common.errorHandle.title": "例外処理", "nodes.common.inputVars": "入力変数", diff --git a/web/i18n/ko-KR/workflow.json b/web/i18n/ko-KR/workflow.json index ab602c391c..c93f417361 100644 --- a/web/i18n/ko-KR/workflow.json +++ b/web/i18n/ko-KR/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "실패 분기", "nodes.common.errorHandle.none.desc": "예외가 발생하고 처리되지 않으면 노드 실행이 중지됩니다", "nodes.common.errorHandle.none.title": "없음", - "nodes.common.errorHandle.partialSucceeded.tip": "프로세스에 {{num}} 노드가 비정상적으로 실행 중입니다. 추적으로 이동하여 로그를 확인하십시오.", + "nodes.common.errorHandle.partialSucceeded.tip": "프로세스에 {{num}} 노드가 비정상적으로 실행 중입니다. 추적으로 이동하여 로그를 확인하십시오.", "nodes.common.errorHandle.tip": "노드에 예외가 발생할 때 트리거되는 예외 처리 전략입니다.", "nodes.common.errorHandle.title": "오류 처리", "nodes.common.inputVars": "입력 변수", diff --git a/web/i18n/nl-NL/workflow.json b/web/i18n/nl-NL/workflow.json index b706c42962..4a6ccd8937 100644 --- a/web/i18n/nl-NL/workflow.json +++ b/web/i18n/nl-NL/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Fail Branch", "nodes.common.errorHandle.none.desc": "The node will stop running if an exception occurs and is not handled", "nodes.common.errorHandle.none.title": "None", - "nodes.common.errorHandle.partialSucceeded.tip": "There are {{num}} nodes in the process running abnormally, please go to tracing to check the logs.", + "nodes.common.errorHandle.partialSucceeded.tip": "There are {{num}} nodes in the process running abnormally, please go to tracing to check the logs.", "nodes.common.errorHandle.tip": "Exception handling strategy, triggered when a node encounters an exception.", "nodes.common.errorHandle.title": "Error Handling", "nodes.common.inputVars": "Input Variables", diff --git a/web/i18n/pl-PL/workflow.json b/web/i18n/pl-PL/workflow.json index 1f54fe6437..57aa50dd4e 100644 --- a/web/i18n/pl-PL/workflow.json +++ b/web/i18n/pl-PL/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Gałąź Fail (Gałąź Niepowodzenia", "nodes.common.errorHandle.none.desc": "Węzeł przestanie działać, jeśli wystąpi wyjątek i nie zostanie obsłużony", "nodes.common.errorHandle.none.title": "Żaden", - "nodes.common.errorHandle.partialSucceeded.tip": "W procesie {{num}} węzły działają nieprawidłowo, przejdź do śledzenia, aby sprawdzić dzienniki.", + "nodes.common.errorHandle.partialSucceeded.tip": "W procesie {{num}} węzły działają nieprawidłowo, przejdź do śledzenia, aby sprawdzić dzienniki.", "nodes.common.errorHandle.tip": "Strategia obsługi wyjątków, wyzwalana, gdy węzeł napotka wyjątek.", "nodes.common.errorHandle.title": "Obsługa błędów", "nodes.common.inputVars": "Zmienne wejściowe", diff --git a/web/i18n/pt-BR/workflow.json b/web/i18n/pt-BR/workflow.json index ab1fea2990..a0635ad814 100644 --- a/web/i18n/pt-BR/workflow.json +++ b/web/i18n/pt-BR/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Ramificação com falha", "nodes.common.errorHandle.none.desc": "O nó deixará de ser executado se ocorrer uma exceção e não for tratada", "nodes.common.errorHandle.none.title": "Nenhum", - "nodes.common.errorHandle.partialSucceeded.tip": "Existem {{num}} nós no processo em execução anormal, vá para rastreamento para verificar os logs.", + "nodes.common.errorHandle.partialSucceeded.tip": "Existem {{num}} nós no processo em execução anormal, vá para rastreamento para verificar os logs.", "nodes.common.errorHandle.tip": "Estratégia de tratamento de exceções, disparada quando um nó encontra uma exceção.", "nodes.common.errorHandle.title": "Tratamento de erros", "nodes.common.inputVars": "Variáveis de entrada", diff --git a/web/i18n/ro-RO/workflow.json b/web/i18n/ro-RO/workflow.json index dbe331e316..58a0786894 100644 --- a/web/i18n/ro-RO/workflow.json +++ b/web/i18n/ro-RO/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Ramură Fail", "nodes.common.errorHandle.none.desc": "Nodul se va opri din rulare dacă apare o excepție și nu este gestionat", "nodes.common.errorHandle.none.title": "Niciunul", - "nodes.common.errorHandle.partialSucceeded.tip": "Există {{num}} noduri în proces care rulează anormal, vă rugăm să mergeți la urmărire pentru a verifica jurnalele.", + "nodes.common.errorHandle.partialSucceeded.tip": "Există {{num}} noduri în proces care rulează anormal, vă rugăm să mergeți la urmărire pentru a verifica jurnalele.", "nodes.common.errorHandle.tip": "Strategie de gestionare a excepțiilor, declanșată atunci când un nod întâlnește o excepție.", "nodes.common.errorHandle.title": "Gestionarea erorilor", "nodes.common.inputVars": "Variabile de intrare", diff --git a/web/i18n/ru-RU/workflow.json b/web/i18n/ru-RU/workflow.json index 48a253b31b..585c9fae93 100644 --- a/web/i18n/ru-RU/workflow.json +++ b/web/i18n/ru-RU/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Неудачная ветвь", "nodes.common.errorHandle.none.desc": "Узел перестанет работать, если произойдет исключение и оно не будет обработано", "nodes.common.errorHandle.none.title": "Никакой", - "nodes.common.errorHandle.partialSucceeded.tip": "В процессе есть {{num}} узлов, которые работают ненормально, пожалуйста, перейдите к трассировке, чтобы проверить логи.", + "nodes.common.errorHandle.partialSucceeded.tip": "В процессе есть {{num}} узлов, которые работают ненормально, пожалуйста, перейдите к трассировке, чтобы проверить логи.", "nodes.common.errorHandle.tip": "Стратегия обработки исключений, запускаемая при обнаружении исключения на узле.", "nodes.common.errorHandle.title": "Обработка ошибок", "nodes.common.inputVars": "Входные переменные", diff --git a/web/i18n/sl-SI/workflow.json b/web/i18n/sl-SI/workflow.json index f2b032cfaa..f29c211ed8 100644 --- a/web/i18n/sl-SI/workflow.json +++ b/web/i18n/sl-SI/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Napaka veja", "nodes.common.errorHandle.none.desc": "Vozlišče se bo prenehalo izvajati, če pride do izjeme, ki ni obravnavana.", "nodes.common.errorHandle.none.title": "Noben", - "nodes.common.errorHandle.partialSucceeded.tip": "V procesu je {{num}} vozlišč, ki delujejo nenormalno, prosim, pojdite na sledenje, da preverite dnevnike.", + "nodes.common.errorHandle.partialSucceeded.tip": "V procesu je {{num}} vozlišč, ki delujejo nenormalno, prosim, pojdite na sledenje, da preverite dnevnike.", "nodes.common.errorHandle.tip": "Strategija ravnanja z izjemo, ki se sproži, ko vozlišče naleti na izjemo.", "nodes.common.errorHandle.title": "Obvladovanje napak", "nodes.common.inputVars": "Vhodne spremenljivke", diff --git a/web/i18n/th-TH/workflow.json b/web/i18n/th-TH/workflow.json index fb49264d71..130e2c0269 100644 --- a/web/i18n/th-TH/workflow.json +++ b/web/i18n/th-TH/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "สาขาล้มเหลว", "nodes.common.errorHandle.none.desc": "โหนดจะหยุดทํางานหากเกิดข้อยกเว้นและไม่ได้รับการจัดการ", "nodes.common.errorHandle.none.title": "ไม่มีใคร", - "nodes.common.errorHandle.partialSucceeded.tip": "มีโหนด {{num}} ในกระบวนการที่ทํางานผิดปกติ โปรดไปที่การติดตามเพื่อตรวจสอบบันทึก", + "nodes.common.errorHandle.partialSucceeded.tip": "มีโหนด {{num}} ในกระบวนการที่ทํางานผิดปกติ โปรดไปที่การติดตามเพื่อตรวจสอบบันทึก", "nodes.common.errorHandle.tip": "กลยุทธ์การจัดการข้อยกเว้น ทริกเกอร์เมื่อโหนดพบข้อยกเว้น", "nodes.common.errorHandle.title": "การจัดการข้อผิดพลาด", "nodes.common.inputVars": "ตัวแปรอินพุต", diff --git a/web/i18n/tr-TR/workflow.json b/web/i18n/tr-TR/workflow.json index 6b87b4e1e8..c6893cfb84 100644 --- a/web/i18n/tr-TR/workflow.json +++ b/web/i18n/tr-TR/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Başarısız Dal", "nodes.common.errorHandle.none.desc": "Bir özel durum oluşursa ve işlenmezse düğüm çalışmayı durdurur", "nodes.common.errorHandle.none.title": "Hiç kimse", - "nodes.common.errorHandle.partialSucceeded.tip": "İşlemde anormal şekilde çalışan {{num}} düğümleri var, lütfen günlükleri kontrol etmek için izlemeye gidin.", + "nodes.common.errorHandle.partialSucceeded.tip": "İşlemde anormal şekilde çalışan {{num}} düğümleri var, lütfen günlükleri kontrol etmek için izlemeye gidin.", "nodes.common.errorHandle.tip": "Bir düğüm bir özel durumla karşılaştığında tetiklenen özel durum işleme stratejisi.", "nodes.common.errorHandle.title": "Hata İşleme", "nodes.common.inputVars": "Giriş Değişkenleri", diff --git a/web/i18n/uk-UA/workflow.json b/web/i18n/uk-UA/workflow.json index 70d5378d22..90ee940ae7 100644 --- a/web/i18n/uk-UA/workflow.json +++ b/web/i18n/uk-UA/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Гілка невдачі", "nodes.common.errorHandle.none.desc": "Вузол припинить роботу, якщо виникне виняток і не буде оброблений", "nodes.common.errorHandle.none.title": "Ніхто", - "nodes.common.errorHandle.partialSucceeded.tip": "У процесі є вузли {{num}}, які працюють ненормально, будь ласка, перейдіть до трасування, щоб перевірити логи.", + "nodes.common.errorHandle.partialSucceeded.tip": "У процесі є вузли {{num}}, які працюють ненормально, будь ласка, перейдіть до трасування, щоб перевірити логи.", "nodes.common.errorHandle.tip": "Стратегія обробки винятків, що спрацьовує, коли вузол стикається з винятком.", "nodes.common.errorHandle.title": "Обробка помилок", "nodes.common.inputVars": "Вхідні змінні", diff --git a/web/i18n/vi-VN/workflow.json b/web/i18n/vi-VN/workflow.json index 9bf9b4d61c..6ba72b8b3d 100644 --- a/web/i18n/vi-VN/workflow.json +++ b/web/i18n/vi-VN/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "Chi nhánh thất bại", "nodes.common.errorHandle.none.desc": "Nút sẽ ngừng chạy nếu xảy ra ngoại lệ và không được xử lý", "nodes.common.errorHandle.none.title": "Không ai", - "nodes.common.errorHandle.partialSucceeded.tip": "Có {{num}} node trong quá trình chạy bất thường, vui lòng truy tìm để kiểm tra nhật ký.", + "nodes.common.errorHandle.partialSucceeded.tip": "Có {{num}} node trong quá trình chạy bất thường, vui lòng vào truy tìm để kiểm tra nhật ký.", "nodes.common.errorHandle.tip": "Chiến lược xử lý ngoại lệ, được kích hoạt khi một nút gặp phải ngoại lệ.", "nodes.common.errorHandle.title": "Xử lý lỗi", "nodes.common.inputVars": "Biến đầu vào", diff --git a/web/i18n/zh-Hans/workflow.json b/web/i18n/zh-Hans/workflow.json index 6bb832f925..c02cad5145 100644 --- a/web/i18n/zh-Hans/workflow.json +++ b/web/i18n/zh-Hans/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "异常分支", "nodes.common.errorHandle.none.desc": "当发生异常且未处理时,节点将停止运行", "nodes.common.errorHandle.none.title": "无", - "nodes.common.errorHandle.partialSucceeded.tip": "流程中有 {{num}} 个节点运行异常,请前往追踪查看日志。", + "nodes.common.errorHandle.partialSucceeded.tip": "流程中有 {{num}} 个节点运行异常,请前往追踪查看日志。", "nodes.common.errorHandle.tip": "配置异常处理策略,当节点发生异常时触发。", "nodes.common.errorHandle.title": "异常处理", "nodes.common.inputVars": "输入变量", diff --git a/web/i18n/zh-Hant/workflow.json b/web/i18n/zh-Hant/workflow.json index 865d8b66ae..5c7c9cfc95 100644 --- a/web/i18n/zh-Hant/workflow.json +++ b/web/i18n/zh-Hant/workflow.json @@ -433,7 +433,7 @@ "nodes.common.errorHandle.failBranch.title": "失敗分支", "nodes.common.errorHandle.none.desc": "如果發生異常且未得到處理,節點將停止運行", "nodes.common.errorHandle.none.title": "沒有", - "nodes.common.errorHandle.partialSucceeded.tip": "進程中有 {{num}} 個節點運行異常,請前往 tracing 查看日誌。", + "nodes.common.errorHandle.partialSucceeded.tip": "進程中有 {{num}} 個節點運行異常,請前往tracing查看日誌。", "nodes.common.errorHandle.tip": "異常處理策略,當節點遇到異常時觸發。", "nodes.common.errorHandle.title": "錯誤處理", "nodes.common.inputVars": "輸入變數", diff --git a/web/package.json b/web/package.json index 3ce16d8fb0..8bc31dce31 100644 --- a/web/package.json +++ b/web/package.json @@ -56,7 +56,6 @@ "@amplitude/analytics-browser": "catalog:", "@amplitude/plugin-session-replay-browser": "catalog:", "@base-ui/react": "catalog:", - "@date-fns/tz": "catalog:", "@emoji-mart/data": "catalog:", "@floating-ui/react": "catalog:", "@formatjs/intl-localematcher": "catalog:", @@ -91,7 +90,6 @@ "cmdk": "catalog:", "copy-to-clipboard": "catalog:", "cron-parser": "catalog:", - "date-fns": "catalog:", "dayjs": "catalog:", "decimal.js": "catalog:", "dompurify": "catalog:", From 9a47bb2f80946a14838cc4906379860713c4836b Mon Sep 17 00:00:00 2001 From: Joel Date: Tue, 14 Apr 2026 16:16:19 +0800 Subject: [PATCH 13/47] fix: doc modal hidden by config modal (#35157) --- web/app/components/plugins/readme-panel/index.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/app/components/plugins/readme-panel/index.tsx b/web/app/components/plugins/readme-panel/index.tsx index 516ef63415..182da360c4 100644 --- a/web/app/components/plugins/readme-panel/index.tsx +++ b/web/app/components/plugins/readme-panel/index.tsx @@ -87,7 +87,7 @@ const ReadmePanel: FC = () => { const portalContent = showType === ReadmeShowType.drawer ? ( -
+
{
) : ( -
+
Date: Tue, 14 Apr 2026 11:18:30 +0200 Subject: [PATCH 14/47] refactor: replace bare dict with dict[str, Any] in model provider service and core modules (#35122) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: Asuka Minato --- .../agent/output_parser/cot_output_parser.py | 4 +-- api/core/schemas/resolver.py | 2 +- api/services/model_provider_service.py | 27 +++++++++++++------ 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/api/core/agent/output_parser/cot_output_parser.py b/api/core/agent/output_parser/cot_output_parser.py index 46c1f1230d..8cccd2be6d 100644 --- a/api/core/agent/output_parser/cot_output_parser.py +++ b/api/core/agent/output_parser/cot_output_parser.py @@ -1,7 +1,7 @@ import json import re from collections.abc import Generator -from typing import Union +from typing import Any, Union from graphon.model_runtime.entities.llm_entities import LLMResultChunk @@ -11,7 +11,7 @@ from core.agent.entities import AgentScratchpadUnit class CotAgentOutputParser: @classmethod def handle_react_stream_output( - cls, llm_response: Generator[LLMResultChunk, None, None], usage_dict: dict + cls, llm_response: Generator[LLMResultChunk, None, None], usage_dict: dict[str, Any] ) -> Generator[Union[str, AgentScratchpadUnit.Action], None, None]: def parse_action(action) -> Union[str, AgentScratchpadUnit.Action]: action_name = None diff --git a/api/core/schemas/resolver.py b/api/core/schemas/resolver.py index 6e26664ac2..e267c1abd9 100644 --- a/api/core/schemas/resolver.py +++ b/api/core/schemas/resolver.py @@ -254,7 +254,7 @@ def resolve_dify_schema_refs( return resolver.resolve(schema) -def _remove_metadata_fields(schema: dict) -> dict: +def _remove_metadata_fields(schema: dict[str, Any]) -> dict[str, Any]: """ Remove metadata fields from schema that shouldn't be included in resolved output diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index 3f37c9b176..bf208c9bc7 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -1,4 +1,5 @@ import logging +from typing import Any from graphon.model_runtime.entities.model_entities import ModelType, ParameterRule @@ -168,7 +169,9 @@ class ModelProviderService: model_name=model, ) - def get_provider_credential(self, tenant_id: str, provider: str, credential_id: str | None = None) -> dict | None: + def get_provider_credential( + self, tenant_id: str, provider: str, credential_id: str | None = None + ) -> dict[str, Any] | None: """ get provider credentials. @@ -180,7 +183,7 @@ class ModelProviderService: provider_configuration = self._get_provider_configuration(tenant_id, provider) return provider_configuration.get_provider_credential(credential_id=credential_id) - def validate_provider_credentials(self, tenant_id: str, provider: str, credentials: dict): + def validate_provider_credentials(self, tenant_id: str, provider: str, credentials: dict[str, Any]): """ validate provider credentials before saving. @@ -192,7 +195,7 @@ class ModelProviderService: provider_configuration.validate_provider_credentials(credentials) def create_provider_credential( - self, tenant_id: str, provider: str, credentials: dict, credential_name: str | None + self, tenant_id: str, provider: str, credentials: dict[str, Any], credential_name: str | None ) -> None: """ Create and save new provider credentials. @@ -210,7 +213,7 @@ class ModelProviderService: self, tenant_id: str, provider: str, - credentials: dict, + credentials: dict[str, Any], credential_id: str, credential_name: str | None, ) -> None: @@ -254,7 +257,7 @@ class ModelProviderService: def get_model_credential( self, tenant_id: str, provider: str, model_type: str, model: str, credential_id: str | None - ) -> dict | None: + ) -> dict[str, Any] | None: """ Retrieve model-specific credentials. @@ -270,7 +273,9 @@ class ModelProviderService: model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id ) - def validate_model_credentials(self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict): + def validate_model_credentials( + self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict[str, Any] + ): """ validate model credentials. @@ -287,7 +292,13 @@ class ModelProviderService: ) def create_model_credential( - self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict, credential_name: str | None + self, + tenant_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict[str, Any], + credential_name: str | None, ) -> None: """ create and save model credentials. @@ -314,7 +325,7 @@ class ModelProviderService: provider: str, model_type: str, model: str, - credentials: dict, + credentials: dict[str, Any], credential_id: str, credential_name: str | None, ) -> None: From 736880e046581ca5b0b65441cee1c1e96cc5caac Mon Sep 17 00:00:00 2001 From: Blackoutta <37723456+Blackoutta@users.noreply.github.com> Date: Tue, 14 Apr 2026 17:31:41 +0800 Subject: [PATCH 15/47] feat: support configurable redis key prefix (#35139) --- api/.env.example | 3 + api/configs/middleware/cache/redis_config.py | 5 + api/extensions/ext_celery.py | 24 +- api/extensions/ext_redis.py | 217 ++++++++++++------ api/extensions/redis_names.py | 32 +++ api/libs/broadcast_channel/redis/channel.py | 6 +- .../redis/sharded_channel.py | 6 +- .../redis/streams_channel.py | 3 +- api/libs/db_migration_lock.py | 5 +- api/tests/integration_tests/.env.example | 1 + .../unit_tests/configs/test_dify_config.py | 35 +++ .../unit_tests/extensions/test_celery_ssl.py | 87 +++++++ .../extensions/test_pubsub_channel.py | 2 + api/tests/unit_tests/extensions/test_redis.py | 101 +++++++- .../redis/test_channel_unit_tests.py | 44 ++++ .../redis/test_streams_channel_unit_tests.py | 20 ++ docker/.env.example | 3 + docker/README.md | 1 + docker/docker-compose.yaml | 1 + 19 files changed, 522 insertions(+), 74 deletions(-) create mode 100644 api/extensions/redis_names.py diff --git a/api/.env.example b/api/.env.example index a04a18944a..beb820e797 100644 --- a/api/.env.example +++ b/api/.env.example @@ -57,6 +57,9 @@ REDIS_SSL_CERTFILE= REDIS_SSL_KEYFILE= # Path to client private key file for SSL authentication REDIS_DB=0 +# Optional global prefix for Redis keys, topics, streams, and Celery Redis transport artifacts. +# Leave empty to preserve current unprefixed behavior. +REDIS_KEY_PREFIX= # redis Sentinel configuration. REDIS_USE_SENTINEL=false diff --git a/api/configs/middleware/cache/redis_config.py b/api/configs/middleware/cache/redis_config.py index b49275758a..2def0a0d4e 100644 --- a/api/configs/middleware/cache/redis_config.py +++ b/api/configs/middleware/cache/redis_config.py @@ -32,6 +32,11 @@ class RedisConfig(BaseSettings): default=0, ) + REDIS_KEY_PREFIX: str = Field( + description="Optional global prefix for Redis keys, topics, and transport artifacts", + default="", + ) + REDIS_USE_SSL: bool = Field( description="Enable SSL/TLS for the Redis connection", default=False, diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 86b0550187..340f514fcc 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -9,6 +9,7 @@ from typing_extensions import TypedDict from configs import dify_config from dify_app import DifyApp +from extensions.redis_names import normalize_redis_key_prefix class _CelerySentinelKwargsDict(TypedDict): @@ -16,9 +17,10 @@ class _CelerySentinelKwargsDict(TypedDict): password: str | None -class CelerySentinelTransportDict(TypedDict): +class CelerySentinelTransportDict(TypedDict, total=False): master_name: str | None sentinel_kwargs: _CelerySentinelKwargsDict + global_keyprefix: str class CelerySSLOptionsDict(TypedDict): @@ -61,15 +63,31 @@ def get_celery_ssl_options() -> CelerySSLOptionsDict | None: def get_celery_broker_transport_options() -> CelerySentinelTransportDict | dict[str, Any]: """Get broker transport options (e.g. Redis Sentinel) for Celery connections.""" + transport_options: CelerySentinelTransportDict | dict[str, Any] if dify_config.CELERY_USE_SENTINEL: - return CelerySentinelTransportDict( + transport_options = CelerySentinelTransportDict( master_name=dify_config.CELERY_SENTINEL_MASTER_NAME, sentinel_kwargs=_CelerySentinelKwargsDict( socket_timeout=dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT, password=dify_config.CELERY_SENTINEL_PASSWORD, ), ) - return {} + else: + transport_options = {} + + global_keyprefix = get_celery_redis_global_keyprefix() + if global_keyprefix: + transport_options["global_keyprefix"] = global_keyprefix + + return transport_options + + +def get_celery_redis_global_keyprefix() -> str | None: + """Return the Redis transport prefix for Celery when namespace isolation is enabled.""" + normalized_prefix = normalize_redis_key_prefix(dify_config.REDIS_KEY_PREFIX) + if not normalized_prefix: + return None + return f"{normalized_prefix}:" def init_app(app: DifyApp) -> Celery: diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 20f05b8b9e..9f7f73765e 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -3,7 +3,7 @@ import logging import ssl from collections.abc import Callable from datetime import timedelta -from typing import TYPE_CHECKING, Any, Union +from typing import Any, Union, cast import redis from redis import RedisError @@ -18,17 +18,26 @@ from typing_extensions import TypedDict from configs import dify_config from dify_app import DifyApp +from extensions.redis_names import ( + normalize_redis_key_prefix, + serialize_redis_name, + serialize_redis_name_arg, + serialize_redis_name_args, +) from libs.broadcast_channel.channel import BroadcastChannel as BroadcastChannelProtocol from libs.broadcast_channel.redis.channel import BroadcastChannel as RedisBroadcastChannel from libs.broadcast_channel.redis.sharded_channel import ShardedRedisBroadcastChannel from libs.broadcast_channel.redis.streams_channel import StreamsBroadcastChannel -if TYPE_CHECKING: - from redis.lock import Lock - logger = logging.getLogger(__name__) +_normalize_redis_key_prefix = normalize_redis_key_prefix +_serialize_redis_name = serialize_redis_name +_serialize_redis_name_arg = serialize_redis_name_arg +_serialize_redis_name_args = serialize_redis_name_args + + class RedisClientWrapper: """ A wrapper class for the Redis client that addresses the issue where the global @@ -59,68 +68,148 @@ class RedisClientWrapper: if self._client is None: self._client = client - if TYPE_CHECKING: - # Type hints for IDE support and static analysis - # These are not executed at runtime but provide type information - def get(self, name: str | bytes) -> Any: ... - - def set( - self, - name: str | bytes, - value: Any, - ex: int | None = None, - px: int | None = None, - nx: bool = False, - xx: bool = False, - keepttl: bool = False, - get: bool = False, - exat: int | None = None, - pxat: int | None = None, - ) -> Any: ... - - def setex(self, name: str | bytes, time: int | timedelta, value: Any) -> Any: ... - def setnx(self, name: str | bytes, value: Any) -> Any: ... - def delete(self, *names: str | bytes) -> Any: ... - def incr(self, name: str | bytes, amount: int = 1) -> Any: ... - def expire( - self, - name: str | bytes, - time: int | timedelta, - nx: bool = False, - xx: bool = False, - gt: bool = False, - lt: bool = False, - ) -> Any: ... - def lock( - self, - name: str, - timeout: float | None = None, - sleep: float = 0.1, - blocking: bool = True, - blocking_timeout: float | None = None, - thread_local: bool = True, - ) -> Lock: ... - def zadd( - self, - name: str | bytes, - mapping: dict[str | bytes | int | float, float | int | str | bytes], - nx: bool = False, - xx: bool = False, - ch: bool = False, - incr: bool = False, - gt: bool = False, - lt: bool = False, - ) -> Any: ... - def zremrangebyscore(self, name: str | bytes, min: float | str, max: float | str) -> Any: ... - def zcard(self, name: str | bytes) -> Any: ... - def getdel(self, name: str | bytes) -> Any: ... - def pubsub(self) -> PubSub: ... - def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Any: ... - - def __getattr__(self, item: str) -> Any: + def _require_client(self) -> redis.Redis | RedisCluster: if self._client is None: raise RuntimeError("Redis client is not initialized. Call init_app first.") - return getattr(self._client, item) + return self._client + + def _get_prefix(self) -> str: + return dify_config.REDIS_KEY_PREFIX + + def get(self, name: str | bytes) -> Any: + return self._require_client().get(_serialize_redis_name_arg(name, self._get_prefix())) + + def set( + self, + name: str | bytes, + value: Any, + ex: int | None = None, + px: int | None = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: int | None = None, + pxat: int | None = None, + ) -> Any: + return self._require_client().set( + _serialize_redis_name_arg(name, self._get_prefix()), + value, + ex=ex, + px=px, + nx=nx, + xx=xx, + keepttl=keepttl, + get=get, + exat=exat, + pxat=pxat, + ) + + def setex(self, name: str | bytes, time: int | timedelta, value: Any) -> Any: + return self._require_client().setex(_serialize_redis_name_arg(name, self._get_prefix()), time, value) + + def setnx(self, name: str | bytes, value: Any) -> Any: + return self._require_client().setnx(_serialize_redis_name_arg(name, self._get_prefix()), value) + + def delete(self, *names: str | bytes) -> Any: + return self._require_client().delete(*_serialize_redis_name_args(names, self._get_prefix())) + + def incr(self, name: str | bytes, amount: int = 1) -> Any: + return self._require_client().incr(_serialize_redis_name_arg(name, self._get_prefix()), amount) + + def expire( + self, + name: str | bytes, + time: int | timedelta, + nx: bool = False, + xx: bool = False, + gt: bool = False, + lt: bool = False, + ) -> Any: + return self._require_client().expire( + _serialize_redis_name_arg(name, self._get_prefix()), + time, + nx=nx, + xx=xx, + gt=gt, + lt=lt, + ) + + def exists(self, *names: str | bytes) -> Any: + return self._require_client().exists(*_serialize_redis_name_args(names, self._get_prefix())) + + def ttl(self, name: str | bytes) -> Any: + return self._require_client().ttl(_serialize_redis_name_arg(name, self._get_prefix())) + + def getdel(self, name: str | bytes) -> Any: + return self._require_client().getdel(_serialize_redis_name_arg(name, self._get_prefix())) + + def lock( + self, + name: str, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + thread_local: bool = True, + ) -> Any: + return self._require_client().lock( + _serialize_redis_name(name, self._get_prefix()), + timeout=timeout, + sleep=sleep, + blocking=blocking, + blocking_timeout=blocking_timeout, + thread_local=thread_local, + ) + + def hset(self, name: str | bytes, *args: Any, **kwargs: Any) -> Any: + return self._require_client().hset(_serialize_redis_name_arg(name, self._get_prefix()), *args, **kwargs) + + def hgetall(self, name: str | bytes) -> Any: + return self._require_client().hgetall(_serialize_redis_name_arg(name, self._get_prefix())) + + def hdel(self, name: str | bytes, *keys: str | bytes) -> Any: + return self._require_client().hdel(_serialize_redis_name_arg(name, self._get_prefix()), *keys) + + def hlen(self, name: str | bytes) -> Any: + return self._require_client().hlen(_serialize_redis_name_arg(name, self._get_prefix())) + + def zadd( + self, + name: str | bytes, + mapping: dict[str | bytes | int | float, float | int | str | bytes], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: bool = False, + lt: bool = False, + ) -> Any: + return self._require_client().zadd( + _serialize_redis_name_arg(name, self._get_prefix()), + cast(Any, mapping), + nx=nx, + xx=xx, + ch=ch, + incr=incr, + gt=gt, + lt=lt, + ) + + def zremrangebyscore(self, name: str | bytes, min: float | str, max: float | str) -> Any: + return self._require_client().zremrangebyscore(_serialize_redis_name_arg(name, self._get_prefix()), min, max) + + def zcard(self, name: str | bytes) -> Any: + return self._require_client().zcard(_serialize_redis_name_arg(name, self._get_prefix())) + + def pubsub(self) -> PubSub: + return self._require_client().pubsub() + + def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Any: + return self._require_client().pipeline(transaction=transaction, shard_hint=shard_hint) + + def __getattr__(self, item: str) -> Any: + return getattr(self._require_client(), item) redis_client: RedisClientWrapper = RedisClientWrapper() diff --git a/api/extensions/redis_names.py b/api/extensions/redis_names.py new file mode 100644 index 0000000000..9e63416daf --- /dev/null +++ b/api/extensions/redis_names.py @@ -0,0 +1,32 @@ +from configs import dify_config + + +def normalize_redis_key_prefix(prefix: str | None) -> str: + """Normalize the configured Redis key prefix for consistent runtime use.""" + if prefix is None: + return "" + return prefix.strip() + + +def get_redis_key_prefix() -> str: + """Read and normalize the current Redis key prefix from config.""" + return normalize_redis_key_prefix(dify_config.REDIS_KEY_PREFIX) + + +def serialize_redis_name(name: str, prefix: str | None = None) -> str: + """Convert a logical Redis name into the physical name used in Redis.""" + normalized_prefix = get_redis_key_prefix() if prefix is None else normalize_redis_key_prefix(prefix) + if not normalized_prefix: + return name + return f"{normalized_prefix}:{name}" + + +def serialize_redis_name_arg(name: str | bytes, prefix: str | None = None) -> str | bytes: + """Prefix string Redis names while preserving bytes inputs unchanged.""" + if isinstance(name, bytes): + return name + return serialize_redis_name(name, prefix) + + +def serialize_redis_name_args(names: tuple[str | bytes, ...], prefix: str | None = None) -> tuple[str | bytes, ...]: + return tuple(serialize_redis_name_arg(name, prefix) for name in names) diff --git a/api/libs/broadcast_channel/redis/channel.py b/api/libs/broadcast_channel/redis/channel.py index 36aa1cd3e8..b76a23eb3c 100644 --- a/api/libs/broadcast_channel/redis/channel.py +++ b/api/libs/broadcast_channel/redis/channel.py @@ -2,6 +2,7 @@ from __future__ import annotations from typing import Any +from extensions.redis_names import serialize_redis_name from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis, RedisCluster @@ -32,12 +33,13 @@ class Topic: def __init__(self, redis_client: Redis | RedisCluster, topic: str): self._client = redis_client self._topic = topic + self._redis_topic = serialize_redis_name(topic) def as_producer(self) -> Producer: return self def publish(self, payload: bytes) -> None: - self._client.publish(self._topic, payload) + self._client.publish(self._redis_topic, payload) def as_subscriber(self) -> Subscriber: return self @@ -46,7 +48,7 @@ class Topic: return _RedisSubscription( client=self._client, pubsub=self._client.pubsub(), - topic=self._topic, + topic=self._redis_topic, ) diff --git a/api/libs/broadcast_channel/redis/sharded_channel.py b/api/libs/broadcast_channel/redis/sharded_channel.py index dddc92d099..919d8d622e 100644 --- a/api/libs/broadcast_channel/redis/sharded_channel.py +++ b/api/libs/broadcast_channel/redis/sharded_channel.py @@ -2,6 +2,7 @@ from __future__ import annotations from typing import Any +from extensions.redis_names import serialize_redis_name from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis, RedisCluster @@ -30,12 +31,13 @@ class ShardedTopic: def __init__(self, redis_client: Redis | RedisCluster, topic: str): self._client = redis_client self._topic = topic + self._redis_topic = serialize_redis_name(topic) def as_producer(self) -> Producer: return self def publish(self, payload: bytes) -> None: - self._client.spublish(self._topic, payload) # type: ignore[attr-defined,union-attr] + self._client.spublish(self._redis_topic, payload) # type: ignore[attr-defined,union-attr] def as_subscriber(self) -> Subscriber: return self @@ -44,7 +46,7 @@ class ShardedTopic: return _RedisShardedSubscription( client=self._client, pubsub=self._client.pubsub(), - topic=self._topic, + topic=self._redis_topic, ) diff --git a/api/libs/broadcast_channel/redis/streams_channel.py b/api/libs/broadcast_channel/redis/streams_channel.py index 983f785027..55ff6cd4f9 100644 --- a/api/libs/broadcast_channel/redis/streams_channel.py +++ b/api/libs/broadcast_channel/redis/streams_channel.py @@ -6,6 +6,7 @@ import threading from collections.abc import Iterator from typing import Self +from extensions.redis_names import serialize_redis_name from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from libs.broadcast_channel.exc import SubscriptionClosedError from redis import Redis, RedisCluster @@ -35,7 +36,7 @@ class StreamsTopic: def __init__(self, redis_client: Redis | RedisCluster, topic: str, *, retention_seconds: int = 600): self._client = redis_client self._topic = topic - self._key = f"stream:{topic}" + self._key = serialize_redis_name(f"stream:{topic}") self._retention_seconds = retention_seconds self.max_length = 5000 diff --git a/api/libs/db_migration_lock.py b/api/libs/db_migration_lock.py index ca8956e397..b5fe38342a 100644 --- a/api/libs/db_migration_lock.py +++ b/api/libs/db_migration_lock.py @@ -103,7 +103,10 @@ class DbMigrationAutoRenewLock: timeout=self._ttl_seconds, thread_local=False, ) - acquired = bool(self._lock.acquire(*args, **kwargs)) + lock = self._lock + if lock is None: + raise RuntimeError("Redis lock initialization failed.") + acquired = bool(lock.acquire(*args, **kwargs)) self._acquired = acquired if acquired: self._start_heartbeat() diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index f84d39aeb5..c07ab6d6bf 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -33,6 +33,7 @@ REDIS_USERNAME= REDIS_PASSWORD=difyai123456 REDIS_USE_SSL=false REDIS_DB=0 +REDIS_KEY_PREFIX= # PostgreSQL database configuration DB_USERNAME=postgres diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index 3089750c3e..bad246a4bb 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -236,6 +236,41 @@ def test_pubsub_redis_url_required_when_default_unavailable(monkeypatch: pytest. _ = DifyConfig().normalized_pubsub_redis_url +def test_dify_config_exposes_redis_key_prefix_default(monkeypatch: pytest.MonkeyPatch): + os.environ.clear() + + monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") + monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") + monkeypatch.setenv("DB_USERNAME", "postgres") + monkeypatch.setenv("DB_PASSWORD", "postgres") + monkeypatch.setenv("DB_HOST", "localhost") + monkeypatch.setenv("DB_PORT", "5432") + monkeypatch.setenv("DB_DATABASE", "dify") + + config = DifyConfig(_env_file=None) + + assert config.REDIS_KEY_PREFIX == "" + + +def test_dify_config_reads_redis_key_prefix_from_env(monkeypatch: pytest.MonkeyPatch): + os.environ.clear() + + monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") + monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") + monkeypatch.setenv("DB_USERNAME", "postgres") + monkeypatch.setenv("DB_PASSWORD", "postgres") + monkeypatch.setenv("DB_HOST", "localhost") + monkeypatch.setenv("DB_PORT", "5432") + monkeypatch.setenv("DB_DATABASE", "dify") + monkeypatch.setenv("REDIS_KEY_PREFIX", "enterprise-a") + + config = DifyConfig(_env_file=None) + + assert config.REDIS_KEY_PREFIX == "enterprise-a" + + @pytest.mark.parametrize( ("broker_url", "expected_host", "expected_port", "expected_username", "expected_password", "expected_db"), [ diff --git a/api/tests/unit_tests/extensions/test_celery_ssl.py b/api/tests/unit_tests/extensions/test_celery_ssl.py index 81687ce5f8..366e45d86d 100644 --- a/api/tests/unit_tests/extensions/test_celery_ssl.py +++ b/api/tests/unit_tests/extensions/test_celery_ssl.py @@ -7,6 +7,47 @@ from unittest.mock import MagicMock, patch class TestCelerySSLConfiguration: """Test suite for Celery SSL configuration.""" + def test_get_celery_broker_transport_options_includes_global_keyprefix_for_redis(self): + mock_config = MagicMock() + mock_config.CELERY_USE_SENTINEL = False + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + with patch("extensions.ext_celery.dify_config", mock_config): + from extensions.ext_celery import get_celery_broker_transport_options + + result = get_celery_broker_transport_options() + + assert result["global_keyprefix"] == "enterprise-a:" + + def test_get_celery_broker_transport_options_omits_global_keyprefix_when_prefix_empty(self): + mock_config = MagicMock() + mock_config.CELERY_USE_SENTINEL = False + mock_config.REDIS_KEY_PREFIX = " " + + with patch("extensions.ext_celery.dify_config", mock_config): + from extensions.ext_celery import get_celery_broker_transport_options + + result = get_celery_broker_transport_options() + + assert "global_keyprefix" not in result + + def test_get_celery_broker_transport_options_keeps_sentinel_and_adds_global_keyprefix(self): + mock_config = MagicMock() + mock_config.CELERY_USE_SENTINEL = True + mock_config.CELERY_SENTINEL_MASTER_NAME = "mymaster" + mock_config.CELERY_SENTINEL_SOCKET_TIMEOUT = 0.1 + mock_config.CELERY_SENTINEL_PASSWORD = "secret" + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + with patch("extensions.ext_celery.dify_config", mock_config): + from extensions.ext_celery import get_celery_broker_transport_options + + result = get_celery_broker_transport_options() + + assert result["master_name"] == "mymaster" + assert result["sentinel_kwargs"]["password"] == "secret" + assert result["global_keyprefix"] == "enterprise-a:" + def test_get_celery_ssl_options_when_ssl_disabled(self): """Test SSL options when BROKER_USE_SSL is False.""" from configs import DifyConfig @@ -151,3 +192,49 @@ class TestCelerySSLConfiguration: # Check that SSL is also applied to Redis backend assert "redis_backend_use_ssl" in celery_app.conf assert celery_app.conf["redis_backend_use_ssl"] is not None + + def test_celery_init_applies_global_keyprefix_to_broker_and_backend_transport(self): + mock_config = MagicMock() + mock_config.BROKER_USE_SSL = False + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + mock_config.HUMAN_INPUT_TIMEOUT_TASK_INTERVAL = 1 + mock_config.CELERY_BROKER_URL = "redis://localhost:6379/0" + mock_config.CELERY_BACKEND = "redis" + mock_config.CELERY_RESULT_BACKEND = "redis://localhost:6379/0" + mock_config.CELERY_USE_SENTINEL = False + mock_config.LOG_FORMAT = "%(message)s" + mock_config.LOG_TZ = "UTC" + mock_config.LOG_FILE = None + mock_config.CELERY_TASK_ANNOTATIONS = {} + + mock_config.CELERY_BEAT_SCHEDULER_TIME = 1 + mock_config.ENABLE_CLEAN_EMBEDDING_CACHE_TASK = False + mock_config.ENABLE_CLEAN_UNUSED_DATASETS_TASK = False + mock_config.ENABLE_CREATE_TIDB_SERVERLESS_TASK = False + mock_config.ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK = False + mock_config.ENABLE_CLEAN_MESSAGES = False + mock_config.ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK = False + mock_config.ENABLE_DATASETS_QUEUE_MONITOR = False + mock_config.ENABLE_HUMAN_INPUT_TIMEOUT_TASK = False + mock_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK = False + mock_config.MARKETPLACE_ENABLED = False + mock_config.WORKFLOW_LOG_CLEANUP_ENABLED = False + mock_config.ENABLE_WORKFLOW_RUN_CLEANUP_TASK = False + mock_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK = False + mock_config.WORKFLOW_SCHEDULE_POLLER_INTERVAL = 1 + mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False + mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15 + mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False + mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30 + mock_config.ENTERPRISE_ENABLED = False + mock_config.ENTERPRISE_TELEMETRY_ENABLED = False + + with patch("extensions.ext_celery.dify_config", mock_config): + from dify_app import DifyApp + from extensions.ext_celery import init_app + + app = DifyApp(__name__) + celery_app = init_app(app) + + assert celery_app.conf["broker_transport_options"]["global_keyprefix"] == "enterprise-a:" + assert celery_app.conf["result_backend_transport_options"]["global_keyprefix"] == "enterprise-a:" diff --git a/api/tests/unit_tests/extensions/test_pubsub_channel.py b/api/tests/unit_tests/extensions/test_pubsub_channel.py index a5b41a7266..926c406ad4 100644 --- a/api/tests/unit_tests/extensions/test_pubsub_channel.py +++ b/api/tests/unit_tests/extensions/test_pubsub_channel.py @@ -6,6 +6,7 @@ from libs.broadcast_channel.redis.sharded_channel import ShardedRedisBroadcastCh def test_get_pubsub_broadcast_channel_defaults_to_pubsub(monkeypatch): monkeypatch.setattr(dify_config, "PUBSUB_REDIS_CHANNEL_TYPE", "pubsub") + monkeypatch.setattr(ext_redis, "_pubsub_redis_client", object()) channel = ext_redis.get_pubsub_broadcast_channel() @@ -14,6 +15,7 @@ def test_get_pubsub_broadcast_channel_defaults_to_pubsub(monkeypatch): def test_get_pubsub_broadcast_channel_sharded(monkeypatch): monkeypatch.setattr(dify_config, "PUBSUB_REDIS_CHANNEL_TYPE", "sharded") + monkeypatch.setattr(ext_redis, "_pubsub_redis_client", object()) channel = ext_redis.get_pubsub_broadcast_channel() diff --git a/api/tests/unit_tests/extensions/test_redis.py b/api/tests/unit_tests/extensions/test_redis.py index 5e9be4ab9b..21248439bf 100644 --- a/api/tests/unit_tests/extensions/test_redis.py +++ b/api/tests/unit_tests/extensions/test_redis.py @@ -1,12 +1,15 @@ -from unittest.mock import patch +from unittest.mock import MagicMock, patch from redis import RedisError from redis.retry import Retry from extensions.ext_redis import ( + RedisClientWrapper, _get_base_redis_params, _get_cluster_connection_health_params, _get_connection_health_params, + _normalize_redis_key_prefix, + _serialize_redis_name, redis_fallback, ) @@ -123,3 +126,99 @@ class TestRedisFallback: assert test_func.__name__ == "test_func" assert test_func.__doc__ == "Test function docstring" + + +class TestRedisKeyPrefixHelpers: + def test_normalize_redis_key_prefix_trims_whitespace(self): + assert _normalize_redis_key_prefix(" enterprise-a ") == "enterprise-a" + + def test_normalize_redis_key_prefix_treats_whitespace_only_as_empty(self): + assert _normalize_redis_key_prefix(" ") == "" + + def test_serialize_redis_name_returns_original_when_prefix_empty(self): + assert _serialize_redis_name("model_lb_index:test", "") == "model_lb_index:test" + + def test_serialize_redis_name_adds_single_colon_separator(self): + assert _serialize_redis_name("model_lb_index:test", "enterprise-a") == "enterprise-a:model_lb_index:test" + + +class TestRedisClientWrapperKeyPrefix: + def test_wrapper_get_prefixes_string_keys(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + wrapper.get("oauth_state:abc") + + mock_client.get.assert_called_once_with("enterprise-a:oauth_state:abc") + + def test_wrapper_delete_prefixes_multiple_keys(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + wrapper.delete("key:a", "key:b") + + mock_client.delete.assert_called_once_with("enterprise-a:key:a", "enterprise-a:key:b") + + def test_wrapper_lock_prefixes_lock_name(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + wrapper.lock("resource-lock", timeout=10) + + mock_client.lock.assert_called_once() + args, kwargs = mock_client.lock.call_args + assert args == ("enterprise-a:resource-lock",) + assert kwargs["timeout"] == 10 + + def test_wrapper_hash_operations_prefix_key_name(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + wrapper.hset("hash:key", "field", "value") + wrapper.hgetall("hash:key") + + mock_client.hset.assert_called_once_with("enterprise-a:hash:key", "field", "value") + mock_client.hgetall.assert_called_once_with("enterprise-a:hash:key") + + def test_wrapper_zadd_prefixes_sorted_set_name(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + wrapper.zadd("zset:key", {"member": 1}) + + mock_client.zadd.assert_called_once() + args, kwargs = mock_client.zadd.call_args + assert args == ("enterprise-a:zset:key", {"member": 1}) + assert kwargs["nx"] is False + + def test_wrapper_preserves_keys_when_prefix_is_empty(self): + mock_client = MagicMock() + wrapper = RedisClientWrapper() + wrapper.initialize(mock_client) + + with patch("extensions.ext_redis.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = " " + + wrapper.get("plain:key") + + mock_client.get.assert_called_once_with("plain:key") diff --git a/api/tests/unit_tests/libs/broadcast_channel/redis/test_channel_unit_tests.py b/api/tests/unit_tests/libs/broadcast_channel/redis/test_channel_unit_tests.py index 460374b6f6..8bef01c1ed 100644 --- a/api/tests/unit_tests/libs/broadcast_channel/redis/test_channel_unit_tests.py +++ b/api/tests/unit_tests/libs/broadcast_channel/redis/test_channel_unit_tests.py @@ -139,6 +139,28 @@ class TestTopic: mock_redis_client.publish.assert_called_once_with("test-topic", payload) + def test_publish_prefixes_regular_topic(self, mock_redis_client: MagicMock): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + topic = Topic(mock_redis_client, "test-topic") + + topic.publish(b"test message") + + mock_redis_client.publish.assert_called_once_with("enterprise-a:test-topic", b"test message") + + def test_subscribe_prefixes_regular_topic(self, mock_redis_client: MagicMock): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + topic = Topic(mock_redis_client, "test-topic") + + subscription = topic.subscribe() + try: + subscription._start_if_needed() + finally: + subscription.close() + + mock_redis_client.pubsub.return_value.subscribe.assert_called_once_with("enterprise-a:test-topic") + class TestShardedTopic: """Test cases for the ShardedTopic class.""" @@ -176,6 +198,15 @@ class TestShardedTopic: mock_redis_client.spublish.assert_called_once_with("test-sharded-topic", payload) + def test_publish_prefixes_sharded_topic(self, mock_redis_client: MagicMock): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + sharded_topic = ShardedTopic(mock_redis_client, "test-sharded-topic") + + sharded_topic.publish(b"test sharded message") + + mock_redis_client.spublish.assert_called_once_with("enterprise-a:test-sharded-topic", b"test sharded message") + def test_subscribe_returns_sharded_subscription(self, sharded_topic: ShardedTopic, mock_redis_client: MagicMock): """Test that subscribe() returns a _RedisShardedSubscription instance.""" subscription = sharded_topic.subscribe() @@ -185,6 +216,19 @@ class TestShardedTopic: assert subscription._pubsub is mock_redis_client.pubsub.return_value assert subscription._topic == "test-sharded-topic" + def test_subscribe_prefixes_sharded_topic(self, mock_redis_client: MagicMock): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + sharded_topic = ShardedTopic(mock_redis_client, "test-sharded-topic") + + subscription = sharded_topic.subscribe() + try: + subscription._start_if_needed() + finally: + subscription.close() + + mock_redis_client.pubsub.return_value.ssubscribe.assert_called_once_with("enterprise-a:test-sharded-topic") + @dataclasses.dataclass(frozen=True) class SubscriptionTestCase: diff --git a/api/tests/unit_tests/libs/broadcast_channel/redis/test_streams_channel_unit_tests.py b/api/tests/unit_tests/libs/broadcast_channel/redis/test_streams_channel_unit_tests.py index 0886b70ee5..fd9e5ca5b3 100644 --- a/api/tests/unit_tests/libs/broadcast_channel/redis/test_streams_channel_unit_tests.py +++ b/api/tests/unit_tests/libs/broadcast_channel/redis/test_streams_channel_unit_tests.py @@ -2,6 +2,7 @@ import threading import time from dataclasses import dataclass from typing import cast +from unittest.mock import patch import pytest @@ -150,6 +151,25 @@ class TestStreamsBroadcastChannel: # Expire called after publish assert fake_redis._expire_calls.get("stream:beta", 0) >= 1 + def test_topic_uses_prefixed_stream_key(self, fake_redis: FakeStreamsRedis): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + + topic = StreamsBroadcastChannel(fake_redis, retention_seconds=60).topic("alpha") + + assert topic._topic == "alpha" + assert topic._key == "enterprise-a:stream:alpha" + + def test_publish_uses_prefixed_stream_key(self, fake_redis: FakeStreamsRedis): + with patch("extensions.redis_names.dify_config") as mock_config: + mock_config.REDIS_KEY_PREFIX = "enterprise-a" + topic = StreamsBroadcastChannel(fake_redis, retention_seconds=60).topic("beta") + + topic.publish(b"hello") + + assert fake_redis._store["enterprise-a:stream:beta"][0][1] == {b"data": b"hello"} + assert fake_redis._expire_calls.get("enterprise-a:stream:beta", 0) >= 1 + def test_topic_exposes_self_as_producer_and_subscriber(self, streams_channel: StreamsBroadcastChannel): topic = streams_channel.topic("producer-subscriber") diff --git a/docker/.env.example b/docker/.env.example index 4426a882f1..856b04a3df 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -351,6 +351,9 @@ REDIS_SSL_CERTFILE= REDIS_SSL_KEYFILE= # Path to client private key file for SSL authentication REDIS_DB=0 +# Optional global prefix for Redis keys, topics, streams, and Celery Redis transport artifacts. +# Leave empty to preserve current unprefixed behavior. +REDIS_KEY_PREFIX= # Optional: limit total Redis connections used by API/Worker (unset for default) # Align with API's REDIS_MAX_CONNECTIONS in configs REDIS_MAX_CONNECTIONS= diff --git a/docker/README.md b/docker/README.md index 4c40317f37..3130fa9886 100644 --- a/docker/README.md +++ b/docker/README.md @@ -88,6 +88,7 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w 1. **Redis Configuration**: - `REDIS_HOST`, `REDIS_PORT`, `REDIS_PASSWORD`: Redis server connection settings. + - `REDIS_KEY_PREFIX`: Optional global namespace prefix for Redis keys, topics, streams, and Celery Redis transport artifacts. 1. **Celery Configuration**: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 1fc1cfdf9e..c1ddba4f80 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -90,6 +90,7 @@ x-shared-env: &shared-api-worker-env REDIS_SSL_CERTFILE: ${REDIS_SSL_CERTFILE:-} REDIS_SSL_KEYFILE: ${REDIS_SSL_KEYFILE:-} REDIS_DB: ${REDIS_DB:-0} + REDIS_KEY_PREFIX: ${REDIS_KEY_PREFIX:-} REDIS_MAX_CONNECTIONS: ${REDIS_MAX_CONNECTIONS:-} REDIS_USE_SENTINEL: ${REDIS_USE_SENTINEL:-false} REDIS_SENTINELS: ${REDIS_SENTINELS:-} From d4783e8c1427975f8b90aa4dd9a01d6fd55e43d7 Mon Sep 17 00:00:00 2001 From: Joel Date: Tue, 14 Apr 2026 17:55:55 +0800 Subject: [PATCH 16/47] chore: url in tool description support clicking jump directly (#35163) --- .../tool-form/__tests__/item.spec.tsx | 26 +++++++++++ .../nodes/tool/components/tool-form/item.tsx | 45 ++++++++++++++++++- 2 files changed, 69 insertions(+), 2 deletions(-) diff --git a/web/app/components/workflow/nodes/tool/components/tool-form/__tests__/item.spec.tsx b/web/app/components/workflow/nodes/tool/components/tool-form/__tests__/item.spec.tsx index e5760310a9..896897a777 100644 --- a/web/app/components/workflow/nodes/tool/components/tool-form/__tests__/item.spec.tsx +++ b/web/app/components/workflow/nodes/tool/components/tool-form/__tests__/item.spec.tsx @@ -78,6 +78,7 @@ describe('tool/tool-form/item', () => { mockUseLanguage.mockReturnValue('en_US') }) + // Text input fields render their descriptions inline above the input. it('should render text input labels and forward props to form input item', () => { const handleChange = vi.fn() const handleManageInputField = vi.fn() @@ -121,6 +122,31 @@ describe('tool/tool-form/item', () => { }) }) + // URL fragments inside descriptions should be rendered as external links. + it('should render URLs in descriptions as external links', () => { + render( + , + ) + + const link = screen.getByRole('link', { name: 'https://docs.dify.ai/tools' }) + expect(link).toHaveAttribute('href', 'https://docs.dify.ai/tools') + expect(link).toHaveAttribute('target', '_blank') + expect(link).toHaveAttribute('rel', 'noopener noreferrer') + expect(link.parentElement).toHaveTextContent('Visit https://docs.dify.ai/tools for docs') + }) + + // Non-text fields keep their descriptions inside the tooltip and support JSON schema preview. it('should show tooltip for non-description fields and open the schema modal', () => { const objectSchema = createSchema({ name: 'tool_config', diff --git a/web/app/components/workflow/nodes/tool/components/tool-form/item.tsx b/web/app/components/workflow/nodes/tool/components/tool-form/item.tsx index d83f445c2c..5011cf9486 100644 --- a/web/app/components/workflow/nodes/tool/components/tool-form/item.tsx +++ b/web/app/components/workflow/nodes/tool/components/tool-form/item.tsx @@ -1,5 +1,5 @@ 'use client' -import type { FC } from 'react' +import type { FC, ReactNode } from 'react' import type { ToolVarInputs } from '../../types' import type { CredentialFormSchema } from '@/app/components/header/account-setting/model-provider-page/declarations' import type { Tool } from '@/app/components/tools/types' @@ -15,6 +15,45 @@ import { useLanguage } from '@/app/components/header/account-setting/model-provi import { SchemaModal } from '@/app/components/plugins/plugin-detail-panel/tool-selector/components' import FormInputItem from '@/app/components/workflow/nodes/_base/components/form-input-item' +const URL_REGEX = /(https?:\/\/\S+)/g + +const renderDescriptionWithLinks = (description: string): ReactNode => { + const matches = [...description.matchAll(URL_REGEX)] + + if (!matches.length) + return description + + const parts: ReactNode[] = [] + let currentIndex = 0 + + matches.forEach((match, index) => { + const [url] = match + const start = match.index ?? 0 + + if (start > currentIndex) + parts.push(description.slice(currentIndex, start)) + + parts.push( + + {url} + , + ) + + currentIndex = start + url.length + }) + + if (currentIndex < description.length) + parts.push(description.slice(currentIndex)) + + return parts +} + type Props = { readOnly: boolean nodeId: string @@ -87,7 +126,9 @@ const ToolFormItem: FC = ({ )}
{showDescription && tooltip && ( -
{tooltip[language] || tooltip.en_US}
+
+ {renderDescriptionWithLinks(tooltip[language] || tooltip.en_US)} +
)}
Date: Tue, 14 Apr 2026 21:22:23 +0800 Subject: [PATCH 17/47] refactor(web): re-design button api (#35166) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../tracing/provider-config-modal.tsx | 4 +- .../(humanInputLayout)/form/[token]/form.tsx | 22 +- .../webapp-reset-password/check-code/page.tsx | 10 +- .../webapp-reset-password/page.tsx | 10 +- .../set-password/page.tsx | 10 +- .../webapp-signin/check-code/page.tsx | 10 +- .../components/mail-and-code-auth.tsx | 4 +- .../components/mail-and-password-auth.tsx | 4 +- .../webapp-signin/components/sso-auth.tsx | 2 +- .../account-page/AvatarWithEdit.tsx | 6 +- .../account-page/email-change-modal.tsx | 54 +- .../(commonLayout)/account-page/index.tsx | 32 +- .../delete-account/components/check-email.tsx | 8 +- .../delete-account/components/feed-back.tsx | 4 +- .../components/verify-email.tsx | 10 +- web/app/account/(commonLayout)/header.tsx | 6 +- web/app/account/oauth/authorize/page.tsx | 12 +- web/app/activate/activateForm.tsx | 2 +- .../__tests__/app-info-detail-panel.spec.tsx | 4 +- .../__tests__/app-operations.spec.tsx | 4 +- .../app-info/app-info-detail-panel.tsx | 12 +- .../app-sidebar/app-info/app-operations.tsx | 12 +- .../components/app-sidebar/toggle-button.tsx | 4 +- .../annotation/add-annotation-modal/index.tsx | 6 +- .../csv-uploader.tsx | 4 +- .../batch-add-annotation-modal/index.tsx | 8 +- .../edit-annotation-modal/edit-item/index.tsx | 12 +- .../app/annotation/header-opts/index.tsx | 16 +- .../add-member-or-group-pop.tsx | 8 +- .../app/app-access-control/index.tsx | 6 +- .../components/app/app-publisher/index.tsx | 4 +- .../publish-with-multiple-model.tsx | 2 +- .../components/app/app-publisher/sections.tsx | 2 +- .../app/app-publisher/version-info-modal.tsx | 10 +- .../warning-mask/cannot-query-dataset.tsx | 2 +- .../base/warning-mask/formatting-changed.tsx | 2 +- .../config-prompt/advanced-prompt-input.tsx | 12 +- .../config-prompt/confirm-add-var/index.tsx | 4 +- .../conversation-history/edit-modal.tsx | 6 +- .../app/configuration/config-prompt/index.tsx | 2 +- .../configuration/config-var/modal-foot.tsx | 2 +- .../config-vision/param-config.tsx | 2 +- .../config/agent-setting-button.tsx | 2 +- .../config/agent/agent-setting/index.tsx | 16 +- .../config/agent/agent-tools/index.tsx | 12 +- .../agent-tools/setting-built-in-tool.tsx | 18 +- .../config/automatic/automatic-btn.tsx | 2 +- .../config/automatic/get-automatic-res.tsx | 10 +- .../configuration/config/automatic/result.tsx | 4 +- .../code-generator/get-code-generator-res.tsx | 6 +- .../app/configuration/configuration-view.tsx | 2 +- .../configuration/ctrl-btn-group/index.tsx | 4 +- .../dataset-config/params-config/index.tsx | 2 +- .../dataset-config/select-dataset/index.tsx | 4 +- .../dataset-config/settings-modal/index.tsx | 16 +- .../app/configuration/debug/index.tsx | 16 +- .../prompt-value-panel/index.tsx | 8 +- .../tools/external-data-tool-modal.tsx | 8 +- .../app/create-app-dialog/app-card/index.tsx | 8 +- .../components/app/create-app-modal/index.tsx | 38 +- .../dsl-confirm-modal.tsx | 4 +- .../app/create-from-dsl-modal/index.tsx | 12 +- .../components/app/duplicate-modal/index.tsx | 10 +- .../components/app/in-site-message/index.tsx | 12 +- .../app/overview/apikey-info-panel/index.tsx | 8 +- .../app/overview/app-card-sections.tsx | 2 +- .../app/overview/customize/index.tsx | 20 +- .../app/overview/settings/index.tsx | 46 +- .../components/app/switch-app-modal/index.tsx | 10 +- .../saved-items/no-data/index.tsx | 6 +- .../components/base/app-icon-picker/index.tsx | 4 +- .../base/button/__tests__/add-button.spec.tsx | 49 - .../button/__tests__/sync-button.spec.tsx | 52 - .../base/button/add-button.stories.tsx | 52 - web/app/components/base/button/add-button.tsx | 21 - .../base/button/sync-button.stories.tsx | 57 - .../components/base/button/sync-button.tsx | 26 - .../chat-with-history/inputs-form/index.tsx | 8 +- .../sidebar/__tests__/rename-modal.spec.tsx | 10 +- .../chat/chat-with-history/sidebar/index.tsx | 4 +- .../sidebar/rename-modal.tsx | 4 +- .../human-input-content/human-input-form.tsx | 4 +- .../chat/chat/chat-input-area/operation.tsx | 2 +- web/app/components/base/chat/chat/index.tsx | 2 +- .../components/base/chat/chat/question.tsx | 6 +- .../components/base/chat/chat/try-to-ask.tsx | 6 +- .../embedded-chatbot/inputs-form/index.tsx | 8 +- .../components/base/checkbox-list/index.tsx | 14 +- .../components/base/confirm/index.stories.tsx | 2 +- web/app/components/base/confirm/index.tsx | 12 +- .../date-picker/footer.tsx | 8 +- .../time-picker/footer.tsx | 2 +- .../year-and-month-picker/footer.tsx | 2 +- web/app/components/base/drawer/index.tsx | 4 +- .../components/base/emoji-picker/index.tsx | 4 +- .../components/base/error-boundary/index.tsx | 6 +- .../annotation-reply/config-param-modal.tsx | 4 +- .../annotation-reply/index.tsx | 12 +- .../conversation-opener/index.tsx | 6 +- .../conversation-opener/modal.tsx | 12 +- .../new-feature-panel/feature-bar.tsx | 6 +- .../new-feature-panel/file-upload/index.tsx | 8 +- .../file-upload/setting-content.tsx | 4 +- .../new-feature-panel/image-upload/index.tsx | 8 +- .../new-feature-panel/moderation/index.tsx | 12 +- .../moderation/moderation-setting-modal.tsx | 18 +- .../text-to-speech/index.tsx | 10 +- .../file-from-link-or-local/index.tsx | 8 +- .../file-uploader-in-attachment/index.tsx | 2 +- .../file-image-item.tsx | 8 +- .../file-uploader-in-chat-input/file-item.tsx | 10 +- .../base/form/components/form/actions.tsx | 2 +- .../components/base/form/index.stories.tsx | 2 +- .../base/image-uploader/image-link-input.tsx | 4 +- .../base/inline-delete-confirm/index.tsx | 4 +- .../base/markdown-blocks/button.tsx | 4 +- .../components/base/markdown-blocks/form.tsx | 6 +- .../components/base/modal-like-wrap/index.tsx | 6 +- .../components/base/modal/modal.stories.tsx | 4 +- web/app/components/base/modal/modal.tsx | 12 +- .../base/notion-connector/index.tsx | 6 +- web/app/components/base/pagination/index.tsx | 14 +- .../plugins/hitl-input-block/input-field.tsx | 12 +- .../base/tag-management/tag-remove-modal.tsx | 8 +- .../components/base/ui/alert-dialog/index.tsx | 6 +- .../{ => ui}/button/__tests__/index.spec.tsx | 9 +- .../components/base/{ => ui}/button/index.css | 185 +-- .../base/{ => ui}/button/index.stories.tsx | 11 +- .../components/base/{ => ui}/button/index.tsx | 47 +- .../billing/apps-full-in-dialog/index.tsx | 8 +- .../billing/plan-upgrade-modal/index.tsx | 4 +- web/app/components/billing/plan/index.tsx | 2 +- web/app/components/billing/pricing/header.tsx | 6 +- .../components/billing/upgrade-btn/index.tsx | 2 +- .../components/chat-preview-card.tsx | 12 +- .../components/workflow-preview-card.tsx | 10 +- .../custom/custom-web-app-brand/index.tsx | 16 +- .../datasets/common/image-previewer/index.tsx | 12 +- .../image-uploader-in-chunk/image-item.tsx | 4 +- .../image-item.tsx | 4 +- .../dsl-confirm-modal.tsx | 4 +- .../create-from-dsl-modal/index.tsx | 4 +- .../datasets/create-from-pipeline/header.tsx | 6 +- .../list/template-card/actions.tsx | 2 +- .../list/template-card/details/index.tsx | 12 +- .../list/template-card/edit-pipeline-info.tsx | 10 +- .../create/embedding-process/index.tsx | 4 +- .../empty-dataset-creation-modal/index.tsx | 2 +- .../step-one/components/next-step-button.tsx | 2 +- .../components/general-chunking-options.tsx | 10 +- .../components/indexing-mode-section.tsx | 20 +- .../components/parent-child-options.tsx | 6 +- .../step-two/components/step-two-footer.tsx | 2 +- .../create/stop-embedding-modal/index.tsx | 2 +- .../website/base/__tests__/url-input.spec.tsx | 9 +- .../website/base/crawled-result-item.tsx | 4 +- .../datasets/create/website/base/header.tsx | 4 +- .../create/website/base/url-input.tsx | 3 +- .../base/__tests__/url-input.spec.tsx | 8 +- .../website/jina-reader/base/url-input.tsx | 2 +- .../datasets/create/website/no-data.tsx | 6 +- .../documents/components/documents-header.tsx | 2 +- .../documents/components/empty-element.tsx | 4 +- .../documents/components/rename-modal.tsx | 4 +- .../create-from-pipeline/actions/index.tsx | 6 +- .../base/__tests__/header.spec.tsx | 4 +- .../data-source/base/header.tsx | 4 +- .../online-drive/connect/index.tsx | 6 +- .../file-list/list/empty-search-result.tsx | 2 +- .../data-source/online-drive/header.tsx | 4 +- .../__tests__/crawled-result-item.spec.tsx | 4 +- .../base/crawled-result-item.tsx | 8 +- .../website-crawl/base/options/index.tsx | 5 +- .../create-from-pipeline/left-header.tsx | 12 +- .../preview/chunk-preview.tsx | 2 +- .../__tests__/header.spec.tsx | 4 +- .../process-documents/actions.tsx | 2 +- .../process-documents/header.tsx | 4 +- .../processing/embedding-process/index.tsx | 12 +- .../detail/batch-modal/csv-uploader.tsx | 4 +- .../documents/detail/batch-modal/index.tsx | 6 +- .../completed/common/action-buttons.tsx | 2 +- .../detail/completed/common/batch-action.tsx | 6 +- .../completed/common/regeneration-modal.tsx | 6 +- .../metadata/components/doc-type-selector.tsx | 2 +- .../documents/detail/metadata/index.tsx | 2 +- .../pipeline-settings/left-header.tsx | 12 +- .../process-documents/actions.tsx | 2 +- .../external-api/external-api-modal/index.tsx | 18 +- .../external-api/external-api-panel/index.tsx | 10 +- .../create/ExternalApiSelection.tsx | 2 +- .../external-knowledge-base/create/index.tsx | 10 +- .../datasets/extra-info/service-api/card.tsx | 10 +- .../modify-external-retrieval-modal.spec.tsx | 4 +- .../__tests__/modify-retrieval-modal.spec.tsx | 4 +- .../query-input/__tests__/index.spec.tsx | 6 +- .../components/query-input/index.tsx | 8 +- .../modify-external-retrieval-modal.tsx | 6 +- .../hit-testing/modify-retrieval-modal.tsx | 8 +- web/app/components/datasets/list/index.tsx | 6 +- .../datasets/metadata/add-metadata-button.tsx | 2 +- .../metadata/edit-metadata-batch/modal.tsx | 12 +- .../dataset-metadata-drawer.tsx | 12 +- .../metadata/metadata-document/index.tsx | 2 +- .../metadata/metadata-document/no-data.tsx | 6 +- .../datasets/rename-modal/index.tsx | 8 +- .../datasets/settings/form/index.tsx | 2 +- .../develop/secret-key/secret-key-button.tsx | 4 +- .../secret-key/secret-key-generate.tsx | 6 +- .../develop/secret-key/secret-key-modal.tsx | 6 +- web/app/components/explore/app-card/index.tsx | 12 +- web/app/components/explore/app-list/index.tsx | 6 +- .../explore/create-app-modal/index.tsx | 18 +- .../explore/try-app/app-info/index.tsx | 14 +- web/app/components/explore/try-app/index.tsx | 4 +- .../components/header/account-about/index.tsx | 4 +- .../__tests__/compliance.spec.tsx | 32 +- .../header/account-dropdown/compliance.tsx | 23 +- .../api-based-extension-page/index.tsx | 2 +- .../api-based-extension-page/item.tsx | 2 +- .../api-based-extension-page/modal.tsx | 6 +- .../data-source-page-new/configure.tsx | 4 +- .../data-source-page-new/item.tsx | 8 +- .../header/account-setting/index.tsx | 20 +- .../edit-workspace-modal/index.tsx | 4 +- .../members-page/invite-button.tsx | 2 +- .../members-page/invite-modal/index.tsx | 10 +- .../members-page/invited-modal/index.tsx | 4 +- .../transfer-ownership-modal/index.tsx | 44 +- .../model-auth/add-custom-model.tsx | 10 +- .../model-auth/authorized/index.tsx | 6 +- .../model-auth/config-model.tsx | 6 +- .../model-auth/config-provider.tsx | 4 +- .../manage-custom-model-credentials.tsx | 2 +- .../switch-credential-in-load-balancing.tsx | 2 +- .../model-provider-page/model-modal/index.tsx | 23 +- .../configuration-button.tsx | 2 +- .../presets-parameter.tsx | 4 +- .../model-selector/popup.tsx | 28 +- .../model-auth-dropdown/api-key-section.tsx | 8 +- .../model-auth-dropdown/index.tsx | 4 +- .../model-load-balancing-modal.tsx | 6 +- .../provider-added-card/priority-selector.tsx | 6 +- .../provider-card-actions.tsx | 4 +- .../system-model-selector/index.tsx | 14 +- .../plugins/install-plugin/base/installed.tsx | 2 +- .../install-bundle/steps/install.tsx | 4 +- .../install-bundle/steps/installed.tsx | 2 +- .../install-from-github/steps/loaded.tsx | 2 +- .../steps/selectPackage.tsx | 2 +- .../install-from-github/steps/setURL.tsx | 8 +- .../steps/install.tsx | 6 +- .../steps/uploading.tsx | 2 +- .../steps/install.tsx | 6 +- .../plugins/marketplace/list/card-wrapper.tsx | 6 +- .../authorize/add-api-key-button.tsx | 4 +- .../authorize/add-oauth-button.tsx | 12 +- .../authorize/oauth-client-settings.tsx | 2 +- .../authorized-in-data-source-node.tsx | 2 +- .../plugin-auth/authorized-in-node.tsx | 2 +- .../plugins/plugin-auth/authorized/index.tsx | 6 +- .../plugins/plugin-auth/authorized/item.tsx | 6 +- .../plugin-auth/plugin-auth-in-agent.tsx | 2 +- .../plugin-auth-in-datasource-node.tsx | 2 +- .../datasource-action-list.tsx | 6 +- .../detail-header/__tests__/index.spec.tsx | 4 +- .../detail-header/index.tsx | 6 +- .../plugin-detail-panel/endpoint-modal.tsx | 10 +- .../subscription-list/create/index.tsx | 6 +- .../subscription-list/create/oauth-client.tsx | 8 +- .../components/tool-credentials-form.tsx | 2 +- .../tool-selector/components/tool-item.tsx | 6 +- .../plugins/plugin-mutation-model/index.tsx | 4 +- .../plugin-page/__tests__/debug-info.spec.tsx | 4 +- .../install-plugin-dropdown.spec.tsx | 4 +- .../plugins/plugin-page/debug-info.tsx | 4 +- .../plugins/plugin-page/empty/index.tsx | 10 +- .../components/plugins/plugin-page/index.tsx | 4 +- .../plugin-page/install-plugin-dropdown.tsx | 6 +- .../components/error-plugin-item.tsx | 2 +- .../components/plugin-task-list.tsx | 4 +- .../plugins/plugin-page/plugins-panel.tsx | 4 +- web/app/components/plugins/provider-card.tsx | 6 +- .../__tests__/plugins-picker.spec.tsx | 4 +- .../__tests__/strategy-picker.spec.tsx | 4 +- .../auto-update-setting/plugins-picker.tsx | 4 +- .../auto-update-setting/strategy-picker.tsx | 4 +- .../plugins/reference-setting-modal/index.tsx | 6 +- .../__tests__/from-market-place.spec.tsx | 4 +- .../update-plugin/downgrade-warning.tsx | 6 +- .../update-plugin/from-market-place.tsx | 6 +- .../components/__tests__/conversion.spec.tsx | 4 +- ...blish-as-knowledge-pipeline-modal.spec.tsx | 4 +- .../__tests__/update-dsl-modal.spec.tsx | 8 +- .../rag-pipeline/components/conversion.tsx | 4 +- .../panel/input-field/editor/form/index.tsx | 2 +- .../components/panel/input-field/index.tsx | 6 +- .../test-run/preparation/actions/index.tsx | 2 +- .../document-processing/actions.tsx | 2 +- .../test-run/result/result-preview/index.tsx | 4 +- .../publish-as-knowledge-pipeline-modal.tsx | 10 +- .../input-field-button.tsx | 2 +- .../publisher/__tests__/popup.spec.tsx | 4 +- .../rag-pipeline-header/publisher/index.tsx | 2 +- .../rag-pipeline-header/publisher/popup.tsx | 16 +- .../components/update-dsl-modal.tsx | 11 +- .../components/version-mismatch-modal.tsx | 4 +- .../share/text-generation/result/index.tsx | 6 +- .../share/text-generation/run-batch/index.tsx | 4 +- .../run-batch/res-download/index.tsx | 2 +- .../share/text-generation/run-once/index.tsx | 8 +- .../config-credentials.tsx | 14 +- .../get-schema.tsx | 12 +- .../edit-custom-collection-modal/index.tsx | 26 +- .../edit-custom-collection-modal/test-api.tsx | 16 +- .../components/tools/mcp/detail/content.tsx | 20 +- .../components/tools/mcp/headers-input.tsx | 6 +- .../components/tools/mcp/mcp-server-modal.tsx | 10 +- .../components/tools/mcp/mcp-service-card.tsx | 12 +- web/app/components/tools/mcp/modal.tsx | 8 +- web/app/components/tools/provider/detail.tsx | 8 +- .../setting/build-in/config-credentials.tsx | 4 +- .../tools/workflow-tool/configure-button.tsx | 8 +- .../confirm-modal/__tests__/index.spec.tsx | 3 +- .../workflow-tool/confirm-modal/index.tsx | 8 +- .../components/tools/workflow-tool/index.tsx | 4 +- .../workflow-header/features-trigger.tsx | 2 +- .../block-selector/all-start-blocks.tsx | 6 +- .../workflow/block-selector/all-tools.tsx | 4 +- .../market-place-plugin/action.tsx | 6 +- .../workflow/dsl-export-confirm-modal.tsx | 16 +- .../workflow/header/chat-variable-button.tsx | 2 +- .../header/checklist/plugin-group.tsx | 4 +- .../components/workflow/header/env-button.tsx | 2 +- .../header/global-variable-button.tsx | 2 +- .../workflow/header/header-in-restoring.tsx | 4 +- .../header/header-in-view-history.tsx | 2 +- .../header/version-history-button.tsx | 4 +- .../nodes/_base/components/add-button.tsx | 2 +- .../_base/components/before-run-form/form.tsx | 7 +- .../components/before-run-form/index.tsx | 2 +- .../error-handle-type-selector.tsx | 4 +- .../components/install-plugin-button.tsx | 2 +- .../nodes/_base/components/next-step/item.tsx | 4 +- .../_base/components/next-step/operator.tsx | 4 +- .../variable/var-reference-picker.trigger.tsx | 23 +- .../workflow-panel/last-run/no-data.tsx | 4 +- .../components/workflow/nodes/code/panel.tsx | 28 +- .../nodes/data-source-empty/index.tsx | 4 +- .../nodes/data-source/before-run-form.tsx | 2 +- .../components/workflow/nodes/end/panel.tsx | 9 +- .../http/components/authorization/index.tsx | 4 +- .../nodes/http/components/curl-panel.tsx | 2 +- .../__tests__/button-style-dropdown.spec.tsx | 5 +- .../__tests__/form-content-preview.spec.tsx | 5 +- .../components/__tests__/user-action.spec.tsx | 5 +- .../components/button-style-dropdown.tsx | 2 +- .../delivery-method/email-configure-modal.tsx | 10 +- .../delivery-method/method-item.tsx | 6 +- .../recipient/member-selector.tsx | 2 +- .../delivery-method/test-email-sender.tsx | 12 +- .../delivery-method/upgrade-modal.tsx | 6 +- .../components/form-content-preview.tsx | 6 +- .../components/single-run-form.tsx | 8 +- .../human-input/components/user-action.tsx | 2 +- .../workflow/nodes/human-input/panel.tsx | 8 +- .../if-else/components/condition-add.tsx | 2 +- .../condition-list/condition-operator.tsx | 2 +- .../components/condition-number-input.tsx | 6 +- .../if-else/components/condition-wrap.tsx | 10 +- .../workflow/nodes/if-else/panel.tsx | 4 +- .../components/chunk-structure/index.tsx | 4 +- .../components/chunk-structure/selector.tsx | 4 +- .../components/add-dataset.tsx | 5 +- .../components/metadata/add-condition.tsx | 6 +- .../condition-list/condition-operator.tsx | 2 +- .../condition-list/condition-value-method.tsx | 2 +- .../metadata-filter-selector.tsx | 2 +- .../components/metadata/metadata-trigger.tsx | 4 +- .../components/retrieval-config.tsx | 4 +- .../json-importer.tsx | 10 +- .../json-schema-config.tsx | 10 +- .../generated-result.tsx | 10 +- .../json-schema-generator/prompt-editor.tsx | 14 +- .../visual-editor/add-field.tsx | 2 +- .../edit-card/advanced-actions.tsx | 2 +- .../nodes/llm/components/structure-output.tsx | 6 +- .../components/workflow/nodes/llm/panel.tsx | 11 +- .../nodes/loop/components/condition-add.tsx | 2 +- .../condition-list/condition-operator.tsx | 2 +- .../components/condition-number-input.tsx | 6 +- .../nodes/loop/components/condition-wrap.tsx | 4 +- .../components/extract-parameter/update.tsx | 9 +- .../components/workflow/nodes/start/panel.tsx | 9 +- .../nodes/template-transform/panel.tsx | 9 +- .../nodes/tool/components/tool-form/item.tsx | 10 +- .../components/trigger-form/item.tsx | 10 +- .../plugins/link-editor-plugin/component.tsx | 4 +- .../components/array-bool-list.tsx | 2 +- .../components/array-value-list.tsx | 2 +- .../components/variable-modal-trigger.tsx | 2 +- .../components/variable-modal.sections.tsx | 12 +- .../components/variable-modal.tsx | 4 +- .../panel/env-panel/variable-modal.tsx | 24 +- .../panel/env-panel/variable-trigger.tsx | 2 +- .../workflow/panel/inputs-panel.tsx | 4 +- .../context-menu/index.tsx | 2 +- .../delete-confirm-modal.tsx | 6 +- .../panel/version-history-panel/empty.tsx | 4 +- .../restore-confirm-modal.tsx | 4 +- .../workflow/panel/workflow-preview.tsx | 12 +- .../workflow/run/agent-log/agent-log-item.tsx | 8 +- .../run/agent-log/agent-log-nav-more.tsx | 4 +- .../workflow/run/agent-log/agent-log-nav.tsx | 12 +- .../iteration-log/iteration-log-trigger.tsx | 4 +- .../run/loop-log/loop-log-trigger.tsx | 4 +- .../run/retry-log/retry-log-trigger.tsx | 2 +- .../components/workflow/update-dsl-modal.tsx | 19 +- .../workflow/variable-inspect/group.tsx | 10 +- .../workflow/variable-inspect/left.tsx | 6 +- .../workflow/variable-inspect/listening.tsx | 6 +- .../education-apply/education-apply-page.tsx | 24 +- .../education-apply/expire-notice-modal.tsx | 10 +- web/app/education-apply/user-info.tsx | 8 +- .../education-apply/verify-state-modal.tsx | 12 +- .../forgot-password/ChangePasswordForm.tsx | 2 +- .../forgot-password/ForgotPasswordForm.tsx | 2 +- web/app/init/InitPasswordPopup.tsx | 4 +- web/app/install/installForm.tsx | 2 +- web/app/reset-password/check-code/page.tsx | 10 +- web/app/reset-password/page.tsx | 10 +- web/app/reset-password/set-password/page.tsx | 10 +- web/app/signin/check-code/page.tsx | 10 +- .../signin/components/mail-and-code-auth.tsx | 4 +- .../components/mail-and-password-auth.tsx | 6 +- web/app/signin/components/social-auth.tsx | 2 +- web/app/signin/components/sso-auth.tsx | 2 +- web/app/signin/invite-settings/page.tsx | 18 +- web/app/signin/one-more-step.tsx | 10 +- web/app/signup/check-code/page.tsx | 10 +- web/app/signup/components/input-mail.tsx | 10 +- web/app/signup/set-password/page.tsx | 10 +- web/app/styles/globals.css | 2 +- web/eslint-suppressions.json | 1239 ----------------- 444 files changed, 1636 insertions(+), 3169 deletions(-) delete mode 100644 web/app/components/base/button/__tests__/add-button.spec.tsx delete mode 100644 web/app/components/base/button/__tests__/sync-button.spec.tsx delete mode 100644 web/app/components/base/button/add-button.stories.tsx delete mode 100644 web/app/components/base/button/add-button.tsx delete mode 100644 web/app/components/base/button/sync-button.stories.tsx delete mode 100644 web/app/components/base/button/sync-button.tsx rename web/app/components/base/{ => ui}/button/__tests__/index.spec.tsx (93%) rename web/app/components/base/{ => ui}/button/index.css (66%) rename web/app/components/base/{ => ui}/button/index.stories.tsx (88%) rename web/app/components/base/{ => ui}/button/index.tsx (57%) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index 72913b4934..caf6562a3e 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -5,7 +5,6 @@ import { useBoolean } from 'ahooks' import * as React from 'react' import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Confirm from '@/app/components/base/confirm' import Divider from '@/app/components/base/divider' import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general' @@ -14,6 +13,7 @@ import { PortalToFollowElem, PortalToFollowElemContent, } from '@/app/components/base/portal-to-follow-elem' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import { addTracingConfig, removeTracingConfig, updateTracingConfig } from '@/service/apps' import { docURL } from './config' @@ -621,7 +621,7 @@ const ProviderConfigModal: FC = ({
diff --git a/web/app/(humanInputLayout)/form/[token]/form.tsx b/web/app/(humanInputLayout)/form/[token]/form.tsx index 221420aade..898dab8f4a 100644 --- a/web/app/(humanInputLayout)/form/[token]/form.tsx +++ b/web/app/(humanInputLayout)/form/[token]/form.tsx @@ -1,5 +1,5 @@ 'use client' -import type { ButtonProps } from '@/app/components/base/button' +import type { ButtonProps } from '@/app/components/base/ui/button' import type { FormInputItem, UserAction } from '@/app/components/workflow/nodes/human-input/types' import type { SiteInfo } from '@/models/share' import type { HumanInputFormError } from '@/service/use-share' @@ -13,12 +13,12 @@ import * as React from 'react' import { useEffect, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import AppIcon from '@/app/components/base/app-icon' -import Button from '@/app/components/base/button' import ContentItem from '@/app/components/base/chat/chat/answer/human-input-content/content-item' import ExpirationTime from '@/app/components/base/chat/chat/answer/human-input-content/expiration-time' import { getButtonStyle } from '@/app/components/base/chat/chat/answer/human-input-content/utils' import Loading from '@/app/components/base/loading' import DifyLogo from '@/app/components/base/logo/dify-logo' +import { Button } from '@/app/components/base/ui/button' import useDocumentTitle from '@/hooks/use-document-title' import { useParams } from '@/next/navigation' import { useGetHumanInputForm, useSubmitHumanInputForm } from '@/service/use-share' @@ -100,7 +100,7 @@ const FormContent = () => { if (success) { return (
-
+
@@ -109,7 +109,7 @@ const FormContent = () => {
{t('humanInput.thanks', { ns: 'share' })}
{t('humanInput.recorded', { ns: 'share' })}
-
{t('humanInput.submissionID', { id: token, ns: 'share' })}
+
{t('humanInput.submissionID', { id: token, ns: 'share' })}
{ if (expired) { return (
-
+
@@ -137,7 +137,7 @@ const FormContent = () => {
{t('humanInput.sorry', { ns: 'share' })}
{t('humanInput.expired', { ns: 'share' })}
-
{t('humanInput.submissionID', { id: token, ns: 'share' })}
+
{t('humanInput.submissionID', { id: token, ns: 'share' })}
{ if (submitted) { return (
-
+
@@ -165,7 +165,7 @@ const FormContent = () => {
{t('humanInput.sorry', { ns: 'share' })}
{t('humanInput.completed', { ns: 'share' })}
-
{t('humanInput.submissionID', { id: token, ns: 'share' })}
+
{t('humanInput.submissionID', { id: token, ns: 'share' })}
{ if (rateLimitExceeded) { return (
-
+
@@ -210,7 +210,7 @@ const FormContent = () => { if (!formData) { return (
-
+
@@ -245,7 +245,7 @@ const FormContent = () => { background={site.icon_background} imageUrl={site.icon_url} /> -
{site.title}
+
{site.title}
diff --git a/web/app/(shareLayout)/webapp-reset-password/check-code/page.tsx b/web/app/(shareLayout)/webapp-reset-password/check-code/page.tsx index b71e6b4767..d19e5a7d2d 100644 --- a/web/app/(shareLayout)/webapp-reset-password/check-code/page.tsx +++ b/web/app/(shareLayout)/webapp-reset-password/check-code/page.tsx @@ -2,8 +2,8 @@ import { RiArrowLeftLine, RiMailSendFill } from '@remixicon/react' import { useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import Countdown from '@/app/components/signin/countdown' import { useLocale } from '@/context/i18n' @@ -62,9 +62,9 @@ export default function CheckCode() {
-
+

{t('checkCode.checkYourEmail', { ns: 'login' })}

-

+

{t('checkCode.tipsPrefix', { ns: 'login' })} {email} @@ -76,7 +76,7 @@ export default function CheckCode() {

- + setVerifyCode(e.target.value)} maxLength={6} className="mt-1" placeholder={t('checkCode.verificationCodePlaceholder', { ns: 'login' }) || ''} /> @@ -88,7 +88,7 @@ export default function CheckCode() {
- {t('back', { ns: 'login' })} + {t('back', { ns: 'login' })}
) diff --git a/web/app/(shareLayout)/webapp-reset-password/page.tsx b/web/app/(shareLayout)/webapp-reset-password/page.tsx index a25b4bb4ef..cb6ece219c 100644 --- a/web/app/(shareLayout)/webapp-reset-password/page.tsx +++ b/web/app/(shareLayout)/webapp-reset-password/page.tsx @@ -3,8 +3,8 @@ import { RiArrowLeftLine, RiLockPasswordLine } from '@remixicon/react' import { noop } from 'es-toolkit/function' import { useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown' import { emailRegex } from '@/config' @@ -64,9 +64,9 @@ export default function CheckCode() {
-
+

{t('resetPassword', { ns: 'login' })}

-

+

{t('resetPasswordDesc', { ns: 'login' })}

@@ -74,7 +74,7 @@ export default function CheckCode() {
- +
setEmail(e.target.value)} />
@@ -90,7 +90,7 @@ export default function CheckCode() {
- {t('backToLogin', { ns: 'login' })} + {t('backToLogin', { ns: 'login' })}
) diff --git a/web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx b/web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx index bc8f651d17..5b89084ea1 100644 --- a/web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx +++ b/web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx @@ -3,8 +3,8 @@ import { RiCheckboxCircleFill } from '@remixicon/react' import { useCountDown } from 'ahooks' import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import { validPassword } from '@/config' import { useRouter, useSearchParams } from '@/next/navigation' @@ -91,7 +91,7 @@ const ChangePasswordForm = () => {

{t('changePassword', { ns: 'login' })}

-

+

{t('changePasswordTip', { ns: 'login' })}

@@ -100,7 +100,7 @@ const ChangePasswordForm = () => {
{/* Password */}
-
-
{t('error.passwordInvalid', { ns: 'login' })}
+
{t('error.passwordInvalid', { ns: 'login' })}
{/* Confirm Password */}
-
) diff --git a/web/app/(shareLayout)/webapp-signin/components/mail-and-code-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/mail-and-code-auth.tsx index fbd6b216df..f600dba8b2 100644 --- a/web/app/(shareLayout)/webapp-signin/components/mail-and-code-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/mail-and-code-auth.tsx @@ -1,8 +1,8 @@ import { noop } from 'es-toolkit/function' import { useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import { COUNT_DOWN_KEY, COUNT_DOWN_TIME_MS } from '@/app/components/signin/countdown' import { emailRegex } from '@/config' @@ -52,7 +52,7 @@ export default function MailAndCodeAuth() {
- +
setEmail(e.target.value)} />
diff --git a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx index 1e9355e7ba..7fe5363927 100644 --- a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx @@ -2,8 +2,8 @@ import { noop } from 'es-toolkit/function' import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' +import { Button } from '@/app/components/base/ui/button' import { toast } from '@/app/components/base/ui/toast' import { emailRegex } from '@/config' import { useLocale } from '@/context/i18n' @@ -103,7 +103,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut return (
-