Merge branch 'main' into wife

This commit is contained in:
-LAN-
2025-09-29 18:18:55 +08:00
committed by GitHub
53 changed files with 1399 additions and 453 deletions

View File

@@ -80,10 +80,10 @@
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
```
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
```bash
uv run celery -A app.celery beat

View File

@@ -4,7 +4,7 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
import services
from controllers.console import api
from controllers.console import console_ns
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import (
account_initialization_required,
@@ -32,6 +32,7 @@ def _validate_description_length(description):
return description
@console_ns.route("/rag/pipeline/dataset")
class CreateRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@@ -84,6 +85,7 @@ class CreateRagPipelineDatasetApi(Resource):
return import_info, 201
@console_ns.route("/rag/pipeline/empty-dataset")
class CreateEmptyRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@@ -108,7 +110,3 @@ class CreateEmptyRagPipelineDatasetApi(Resource):
),
)
return marshal(dataset, dataset_detail_fields), 201
api.add_resource(CreateRagPipelineDatasetApi, "/rag/pipeline/dataset")
api.add_resource(CreateEmptyRagPipelineDatasetApi, "/rag/pipeline/empty-dataset")

View File

@@ -6,7 +6,7 @@ from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqpars
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console import console_ns
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
@@ -111,6 +111,7 @@ def _api_prerequisite(f):
return wrapper
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables")
class RagPipelineVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@@ -168,6 +169,7 @@ def validate_node_id(node_id: str) -> NoReturn | None:
return None
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables")
class RagPipelineNodeVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@@ -190,6 +192,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
return Response("", 204)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>")
class RagPipelineVariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@@ -284,6 +287,7 @@ class RagPipelineVariableApi(Resource):
return Response("", 204)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>/reset")
class RagPipelineVariableResetApi(Resource):
@_api_prerequisite
def put(self, pipeline: Pipeline, variable_id: str):
@@ -325,6 +329,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList
return draft_vars
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables")
class RagPipelineSystemVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@@ -332,6 +337,7 @@ class RagPipelineSystemVariableCollectionApi(Resource):
return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/environment-variables")
class RagPipelineEnvironmentVariableCollectionApi(Resource):
@_api_prerequisite
def get(self, pipeline: Pipeline):
@@ -364,26 +370,3 @@ class RagPipelineEnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
api.add_resource(
RagPipelineVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables",
)
api.add_resource(
RagPipelineNodeVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/variables",
)
api.add_resource(
RagPipelineVariableApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>"
)
api.add_resource(
RagPipelineVariableResetApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/variables/<uuid:variable_id>/reset"
)
api.add_resource(
RagPipelineSystemVariableCollectionApi, "/rag/pipelines/<uuid:pipeline_id>/workflows/draft/system-variables"
)
api.add_resource(
RagPipelineEnvironmentVariableCollectionApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/environment-variables",
)

View File

@@ -5,7 +5,7 @@ from flask_restx import Resource, marshal_with, reqparse # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console import console_ns
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import (
account_initialization_required,
@@ -20,6 +20,7 @@ from services.app_dsl_service import ImportStatus
from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService
@console_ns.route("/rag/pipelines/imports")
class RagPipelineImportApi(Resource):
@setup_required
@login_required
@@ -66,6 +67,7 @@ class RagPipelineImportApi(Resource):
return result.model_dump(mode="json"), 200
@console_ns.route("/rag/pipelines/imports/<string:import_id>/confirm")
class RagPipelineImportConfirmApi(Resource):
@setup_required
@login_required
@@ -90,6 +92,7 @@ class RagPipelineImportConfirmApi(Resource):
return result.model_dump(mode="json"), 200
@console_ns.route("/rag/pipelines/imports/<string:pipeline_id>/check-dependencies")
class RagPipelineImportCheckDependenciesApi(Resource):
@setup_required
@login_required
@@ -107,6 +110,7 @@ class RagPipelineImportCheckDependenciesApi(Resource):
return result.model_dump(mode="json"), 200
@console_ns.route("/rag/pipelines/<string:pipeline_id>/exports")
class RagPipelineExportApi(Resource):
@setup_required
@login_required
@@ -128,22 +132,3 @@ class RagPipelineExportApi(Resource):
)
return {"data": result}, 200
# Import Rag Pipeline
api.add_resource(
RagPipelineImportApi,
"/rag/pipelines/imports",
)
api.add_resource(
RagPipelineImportConfirmApi,
"/rag/pipelines/imports/<string:import_id>/confirm",
)
api.add_resource(
RagPipelineImportCheckDependenciesApi,
"/rag/pipelines/imports/<string:pipeline_id>/check-dependencies",
)
api.add_resource(
RagPipelineExportApi,
"/rag/pipelines/<string:pipeline_id>/exports",
)

View File

@@ -9,7 +9,7 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api
from controllers.console import console_ns
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
@@ -50,6 +50,7 @@ from services.rag_pipeline.rag_pipeline_transform_service import RagPipelineTran
logger = logging.getLogger(__name__)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft")
class DraftRagPipelineApi(Resource):
@setup_required
@login_required
@@ -147,6 +148,7 @@ class DraftRagPipelineApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class RagPipelineDraftRunIterationNodeApi(Resource):
@setup_required
@login_required
@@ -181,6 +183,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class RagPipelineDraftRunLoopNodeApi(Resource):
@setup_required
@login_required
@@ -215,6 +218,7 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run")
class DraftRagPipelineRunApi(Resource):
@setup_required
@login_required
@@ -249,6 +253,7 @@ class DraftRagPipelineRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/run")
class PublishedRagPipelineRunApi(Resource):
@setup_required
@login_required
@@ -369,6 +374,7 @@ class PublishedRagPipelineRunApi(Resource):
#
# return result
#
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run")
class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@setup_required
@login_required
@@ -411,6 +417,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run")
class RagPipelineDraftDatasourceNodeRunApi(Resource):
@setup_required
@login_required
@@ -453,6 +460,7 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run")
class RagPipelineDraftNodeRunApi(Resource):
@setup_required
@login_required
@@ -486,6 +494,7 @@ class RagPipelineDraftNodeRunApi(Resource):
return workflow_node_execution
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop")
class RagPipelineTaskStopApi(Resource):
@setup_required
@login_required
@@ -504,6 +513,7 @@ class RagPipelineTaskStopApi(Resource):
return {"result": "success"}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/publish")
class PublishedRagPipelineApi(Resource):
@setup_required
@login_required
@@ -559,6 +569,7 @@ class PublishedRagPipelineApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs")
class DefaultRagPipelineBlockConfigsApi(Resource):
@setup_required
@login_required
@@ -577,6 +588,7 @@ class DefaultRagPipelineBlockConfigsApi(Resource):
return rag_pipeline_service.get_default_block_configs()
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultRagPipelineBlockConfigApi(Resource):
@setup_required
@login_required
@@ -608,6 +620,7 @@ class DefaultRagPipelineBlockConfigApi(Resource):
return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows")
class PublishedAllRagPipelineApi(Resource):
@setup_required
@login_required
@@ -656,6 +669,7 @@ class PublishedAllRagPipelineApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>")
class RagPipelineByIdApi(Resource):
@setup_required
@login_required
@@ -713,6 +727,7 @@ class RagPipelineByIdApi(Resource):
return workflow
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters")
class PublishedRagPipelineSecondStepApi(Resource):
@setup_required
@login_required
@@ -738,6 +753,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters")
class PublishedRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@@ -763,6 +779,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters")
class DraftRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@@ -788,6 +805,7 @@ class DraftRagPipelineFirstStepApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters")
class DraftRagPipelineSecondStepApi(Resource):
@setup_required
@login_required
@@ -814,6 +832,7 @@ class DraftRagPipelineSecondStepApi(Resource):
}
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs")
class RagPipelineWorkflowRunListApi(Resource):
@setup_required
@login_required
@@ -835,6 +854,7 @@ class RagPipelineWorkflowRunListApi(Resource):
return result
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>")
class RagPipelineWorkflowRunDetailApi(Resource):
@setup_required
@login_required
@@ -853,6 +873,7 @@ class RagPipelineWorkflowRunDetailApi(Resource):
return workflow_run
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions")
class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
@setup_required
@login_required
@@ -876,6 +897,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
return {"data": node_executions}
@console_ns.route("/rag/pipelines/datasource-plugins")
class DatasourceListApi(Resource):
@setup_required
@login_required
@@ -891,6 +913,7 @@ class DatasourceListApi(Resource):
return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id))
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run")
class RagPipelineWorkflowLastRunApi(Resource):
@setup_required
@login_required
@@ -912,6 +935,7 @@ class RagPipelineWorkflowLastRunApi(Resource):
return node_exec
@console_ns.route("/rag/pipelines/transform/datasets/<uuid:dataset_id>")
class RagPipelineTransformApi(Resource):
@setup_required
@login_required
@@ -929,6 +953,7 @@ class RagPipelineTransformApi(Resource):
return result
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect")
class RagPipelineDatasourceVariableApi(Resource):
@setup_required
@login_required
@@ -958,6 +983,7 @@ class RagPipelineDatasourceVariableApi(Resource):
return workflow_node_execution
@console_ns.route("/rag/pipelines/recommended-plugins")
class RagPipelineRecommendedPluginApi(Resource):
@setup_required
@login_required
@@ -966,114 +992,3 @@ class RagPipelineRecommendedPluginApi(Resource):
rag_pipeline_service = RagPipelineService()
recommended_plugins = rag_pipeline_service.get_recommended_plugins()
return recommended_plugins
api.add_resource(
DraftRagPipelineApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft",
)
api.add_resource(
DraftRagPipelineRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run",
)
api.add_resource(
PublishedRagPipelineRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/run",
)
api.add_resource(
RagPipelineTaskStopApi,
"/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop",
)
api.add_resource(
RagPipelineDraftNodeRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run",
)
api.add_resource(
RagPipelinePublishedDatasourceNodeRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run",
)
api.add_resource(
RagPipelineDraftDatasourceNodeRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run",
)
api.add_resource(
RagPipelineDraftRunIterationNodeApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
RagPipelineDraftRunLoopNodeApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
PublishedRagPipelineApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/publish",
)
api.add_resource(
PublishedAllRagPipelineApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows",
)
api.add_resource(
DefaultRagPipelineBlockConfigsApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs",
)
api.add_resource(
DefaultRagPipelineBlockConfigApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>",
)
api.add_resource(
RagPipelineByIdApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>",
)
api.add_resource(
RagPipelineWorkflowRunListApi,
"/rag/pipelines/<uuid:pipeline_id>/workflow-runs",
)
api.add_resource(
RagPipelineWorkflowRunDetailApi,
"/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>",
)
api.add_resource(
RagPipelineWorkflowRunNodeExecutionListApi,
"/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions",
)
api.add_resource(
DatasourceListApi,
"/rag/pipelines/datasource-plugins",
)
api.add_resource(
PublishedRagPipelineSecondStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters",
)
api.add_resource(
PublishedRagPipelineFirstStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters",
)
api.add_resource(
DraftRagPipelineSecondStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters",
)
api.add_resource(
DraftRagPipelineFirstStepApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters",
)
api.add_resource(
RagPipelineWorkflowLastRunApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run",
)
api.add_resource(
RagPipelineTransformApi,
"/rag/pipelines/transform/datasets/<uuid:dataset_id>",
)
api.add_resource(
RagPipelineDatasourceVariableApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect",
)
api.add_resource(
RagPipelineRecommendedPluginApi,
"/rag/pipelines/recommended-plugins",
)

View File

@@ -1,10 +1,10 @@
from typing import Literal
from typing import Any, Literal, cast
from flask import request
from flask_restx import marshal, reqparse
from werkzeug.exceptions import Forbidden, NotFound
import services.dataset_service
import services
from controllers.service_api import service_api_ns
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError
from controllers.service_api.wraps import (
@@ -254,19 +254,21 @@ class DatasetListApi(DatasetApiResource):
"""Resource for creating datasets."""
args = dataset_create_parser.parse_args()
if args.get("embedding_model_provider"):
DatasetService.check_embedding_model_setting(
tenant_id, args.get("embedding_model_provider"), args.get("embedding_model")
)
embedding_model_provider = args.get("embedding_model_provider")
embedding_model = args.get("embedding_model")
if embedding_model_provider and embedding_model:
DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model)
retrieval_model = args.get("retrieval_model")
if (
args.get("retrieval_model")
and args.get("retrieval_model").get("reranking_model")
and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name")
retrieval_model
and retrieval_model.get("reranking_model")
and retrieval_model.get("reranking_model").get("reranking_provider_name")
):
DatasetService.check_reranking_model_setting(
tenant_id,
args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"),
args.get("retrieval_model").get("reranking_model").get("reranking_model_name"),
retrieval_model.get("reranking_model").get("reranking_provider_name"),
retrieval_model.get("reranking_model").get("reranking_model_name"),
)
try:
@@ -317,7 +319,7 @@ class DatasetApi(DatasetApiResource):
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
data = marshal(dataset, dataset_detail_fields)
data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
# check embedding setting
provider_manager = ProviderManager()
assert isinstance(current_user, Account)
@@ -331,8 +333,8 @@ class DatasetApi(DatasetApiResource):
for embedding_model in embedding_models:
model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}")
if data["indexing_technique"] == "high_quality":
item_model = f"{data['embedding_model']}:{data['embedding_model_provider']}"
if data.get("indexing_technique") == "high_quality":
item_model = f"{data.get('embedding_model')}:{data.get('embedding_model_provider')}"
if item_model in model_names:
data["embedding_available"] = True
else:
@@ -341,7 +343,9 @@ class DatasetApi(DatasetApiResource):
data["embedding_available"] = True
# force update search method to keyword_search if indexing_technique is economic
data["retrieval_model_dict"]["search_method"] = "keyword_search"
retrieval_model_dict = data.get("retrieval_model_dict")
if retrieval_model_dict:
retrieval_model_dict["search_method"] = "keyword_search"
if data.get("permission") == "partial_members":
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
@@ -372,19 +376,24 @@ class DatasetApi(DatasetApiResource):
data = request.get_json()
# check embedding model setting
if data.get("indexing_technique") == "high_quality" or data.get("embedding_model_provider"):
DatasetService.check_embedding_model_setting(
dataset.tenant_id, data.get("embedding_model_provider"), data.get("embedding_model")
)
embedding_model_provider = data.get("embedding_model_provider")
embedding_model = data.get("embedding_model")
if data.get("indexing_technique") == "high_quality" or embedding_model_provider:
if embedding_model_provider and embedding_model:
DatasetService.check_embedding_model_setting(
dataset.tenant_id, embedding_model_provider, embedding_model
)
retrieval_model = data.get("retrieval_model")
if (
data.get("retrieval_model")
and data.get("retrieval_model").get("reranking_model")
and data.get("retrieval_model").get("reranking_model").get("reranking_provider_name")
retrieval_model
and retrieval_model.get("reranking_model")
and retrieval_model.get("reranking_model").get("reranking_provider_name")
):
DatasetService.check_reranking_model_setting(
dataset.tenant_id,
data.get("retrieval_model").get("reranking_model").get("reranking_provider_name"),
data.get("retrieval_model").get("reranking_model").get("reranking_model_name"),
retrieval_model.get("reranking_model").get("reranking_provider_name"),
retrieval_model.get("reranking_model").get("reranking_model_name"),
)
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
@@ -397,7 +406,7 @@ class DatasetApi(DatasetApiResource):
if dataset is None:
raise NotFound("Dataset not found.")
result_data = marshal(dataset, dataset_detail_fields)
result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
assert isinstance(current_user, Account)
tenant_id = current_user.current_tenant_id
@@ -591,9 +600,10 @@ class DatasetTagsApi(DatasetApiResource):
args = tag_update_parser.parse_args()
args["type"] = "knowledge"
tag = TagService.update_tags(args, args.get("tag_id"))
tag_id = args["tag_id"]
tag = TagService.update_tags(args, tag_id)
binding_count = TagService.get_tag_binding_count(args.get("tag_id"))
binding_count = TagService.get_tag_binding_count(tag_id)
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count}
@@ -616,7 +626,7 @@ class DatasetTagsApi(DatasetApiResource):
if not current_user.has_edit_permission:
raise Forbidden()
args = tag_delete_parser.parse_args()
TagService.delete_tag(args.get("tag_id"))
TagService.delete_tag(args["tag_id"])
return 204

View File

@@ -108,19 +108,21 @@ class DocumentAddByTextApi(DatasetApiResource):
if text is None or name is None:
raise ValueError("Both 'text' and 'name' must be non-null values.")
if args.get("embedding_model_provider"):
DatasetService.check_embedding_model_setting(
tenant_id, args.get("embedding_model_provider"), args.get("embedding_model")
)
embedding_model_provider = args.get("embedding_model_provider")
embedding_model = args.get("embedding_model")
if embedding_model_provider and embedding_model:
DatasetService.check_embedding_model_setting(tenant_id, embedding_model_provider, embedding_model)
retrieval_model = args.get("retrieval_model")
if (
args.get("retrieval_model")
and args.get("retrieval_model").get("reranking_model")
and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name")
retrieval_model
and retrieval_model.get("reranking_model")
and retrieval_model.get("reranking_model").get("reranking_provider_name")
):
DatasetService.check_reranking_model_setting(
tenant_id,
args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"),
args.get("retrieval_model").get("reranking_model").get("reranking_model_name"),
retrieval_model.get("reranking_model").get("reranking_provider_name"),
retrieval_model.get("reranking_model").get("reranking_model_name"),
)
if not current_user:
@@ -187,15 +189,16 @@ class DocumentUpdateByTextApi(DatasetApiResource):
if not dataset:
raise ValueError("Dataset does not exist.")
retrieval_model = args.get("retrieval_model")
if (
args.get("retrieval_model")
and args.get("retrieval_model").get("reranking_model")
and args.get("retrieval_model").get("reranking_model").get("reranking_provider_name")
retrieval_model
and retrieval_model.get("reranking_model")
and retrieval_model.get("reranking_model").get("reranking_provider_name")
):
DatasetService.check_reranking_model_setting(
tenant_id,
args.get("retrieval_model").get("reranking_model").get("reranking_provider_name"),
args.get("retrieval_model").get("reranking_model").get("reranking_model_name"),
retrieval_model.get("reranking_model").get("reranking_provider_name"),
retrieval_model.get("reranking_model").get("reranking_model_name"),
)
# indexing_technique is already set in dataset since this is an update

View File

@@ -106,7 +106,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
raise NotFound("Dataset not found.")
DatasetService.check_dataset_permission(dataset, current_user)
metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name"))
metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args["name"])
return marshal(metadata, dataset_metadata_fields), 200
@service_api_ns.doc("delete_dataset_metadata")

View File

@@ -184,11 +184,22 @@ class VariablePool(BaseModel):
"""Extract the actual value from an ObjectSegment."""
return obj.value if isinstance(obj, ObjectSegment) else obj
def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str):
"""Get a nested attribute from a dictionary-like object."""
if not isinstance(obj, dict):
def _get_nested_attribute(self, obj: Mapping[str, Any], attr: str) -> Segment | None:
"""
Get a nested attribute from a dictionary-like object.
Args:
obj: The dictionary-like object to search.
attr: The key to look up.
Returns:
Segment | None:
The corresponding Segment built from the attribute value if the key exists,
otherwise None.
"""
if not isinstance(obj, dict) or attr not in obj:
return None
return obj.get(attr)
return variable_factory.build_segment(obj.get(attr))
def remove(self, selector: Sequence[str], /):
"""

View File

@@ -10,6 +10,8 @@ from typing_extensions import TypeIs
from core.variables import IntegerVariable, NoneSegment
from core.variables.segments import ArrayAnySegment, ArraySegment
from core.variables.variables import VariableUnion
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID
from core.workflow.entities import VariablePool
from core.workflow.enums import (
ErrorStrategy,
@@ -217,6 +219,13 @@ class IterationNode(Node):
graph_engine=graph_engine,
)
# Sync conversation variables after each iteration completes
self._sync_conversation_variables_from_snapshot(
self._extract_conversation_variable_snapshot(
variable_pool=graph_engine.graph_runtime_state.variable_pool
)
)
# Update the total tokens from this iteration
self.graph_runtime_state.total_tokens += graph_engine.graph_runtime_state.total_tokens
iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
@@ -235,7 +244,10 @@ class IterationNode(Node):
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Submit all iteration tasks
future_to_index: dict[Future[tuple[datetime, list[GraphNodeEventBase], object | None, int]], int] = {}
future_to_index: dict[
Future[tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]],
int,
] = {}
for index, item in enumerate(iterator_list_value):
yield IterationNextEvent(index=index)
future = executor.submit(
@@ -252,7 +264,7 @@ class IterationNode(Node):
index = future_to_index[future]
try:
result = future.result()
iter_start_at, events, output_value, tokens_used = result
iter_start_at, events, output_value, tokens_used, conversation_snapshot = result
# Update outputs at the correct index
outputs[index] = output_value
@@ -264,6 +276,9 @@ class IterationNode(Node):
self.graph_runtime_state.total_tokens += tokens_used
iter_run_map[str(index)] = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
# Sync conversation variables after iteration completion
self._sync_conversation_variables_from_snapshot(conversation_snapshot)
except Exception as e:
# Handle errors based on error_handle_mode
match self._node_data.error_handle_mode:
@@ -288,7 +303,7 @@ class IterationNode(Node):
item: object,
flask_app: Flask,
context_vars: contextvars.Context,
) -> tuple[datetime, list[GraphNodeEventBase], object | None, int]:
) -> tuple[datetime, list[GraphNodeEventBase], object | None, int, dict[str, VariableUnion]]:
"""Execute a single iteration in parallel mode and return results."""
with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars):
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
@@ -307,8 +322,17 @@ class IterationNode(Node):
# Get the output value from the temporary outputs list
output_value = outputs_temp[0] if outputs_temp else None
conversation_snapshot = self._extract_conversation_variable_snapshot(
variable_pool=graph_engine.graph_runtime_state.variable_pool
)
return iter_start_at, events, output_value, graph_engine.graph_runtime_state.total_tokens
return (
iter_start_at,
events,
output_value,
graph_engine.graph_runtime_state.total_tokens,
conversation_snapshot,
)
def _handle_iteration_success(
self,
@@ -430,6 +454,23 @@ class IterationNode(Node):
return variable_mapping
def _extract_conversation_variable_snapshot(self, *, variable_pool: VariablePool) -> dict[str, VariableUnion]:
conversation_variables = variable_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {})
return {name: variable.model_copy(deep=True) for name, variable in conversation_variables.items()}
def _sync_conversation_variables_from_snapshot(self, snapshot: dict[str, VariableUnion]) -> None:
parent_pool = self.graph_runtime_state.variable_pool
parent_conversations = parent_pool.variable_dictionary.get(CONVERSATION_VARIABLE_NODE_ID, {})
current_keys = set(parent_conversations.keys())
snapshot_keys = set(snapshot.keys())
for removed_key in current_keys - snapshot_keys:
parent_pool.remove((CONVERSATION_VARIABLE_NODE_ID, removed_key))
for name, variable in snapshot.items():
parent_pool.add((CONVERSATION_VARIABLE_NODE_ID, name), variable)
def _append_iteration_info_to_event(
self,
event: GraphNodeEventBase,

View File

@@ -145,6 +145,7 @@ def init_app(app: DifyApp) -> Celery:
}
if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK and dify_config.MARKETPLACE_ENABLED:
imports.append("schedule.check_upgradable_plugin_task")
imports.append("tasks.process_tenant_plugin_autoupgrade_check_task")
beat_schedule["check_upgradable_plugin_task"] = {
"task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task",
"schedule": crontab(minute="*/15"),

View File

@@ -142,6 +142,8 @@ def build_segment(value: Any, /) -> Segment:
# below
if value is None:
return NoneSegment()
if isinstance(value, Segment):
return value
if isinstance(value, str):
return StringSegment(value=value)
if isinstance(value, bool):

View File

@@ -180,7 +180,7 @@ dev = [
storage = [
"azure-storage-blob==12.13.0",
"bce-python-sdk~=0.9.23",
"cos-python-sdk-v5==1.9.30",
"cos-python-sdk-v5==1.9.38",
"esdk-obs-python==3.24.6.1",
"google-cloud-storage==2.16.0",
"opendal~=0.46.0",
@@ -207,7 +207,7 @@ vdb = [
"couchbase~=4.3.0",
"elasticsearch==8.14.0",
"opensearch-py==2.4.0",
"oracledb==3.0.0",
"oracledb==3.3.0",
"pgvecto-rs[sqlalchemy]~=0.2.1",
"pgvector==0.2.5",
"pymilvus~=2.5.0",

View File

@@ -8,7 +8,6 @@
"extensions",
"libs",
"controllers/console/datasets",
"controllers/service_api/dataset",
"core/ops",
"core/tools",
"core/model_runtime",

View File

@@ -6,7 +6,7 @@ import click
import app
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks.process_tenant_plugin_autoupgrade_check_task import process_tenant_plugin_autoupgrade_check_task
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
@@ -43,7 +43,7 @@ def check_upgradable_plugin_task():
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:
process_tenant_plugin_autoupgrade_check_task.delay(
check_task.process_tenant_plugin_autoupgrade_check_task.delay(
strategy.tenant_id,
strategy.strategy_setting,
strategy.upgrade_time_of_day,

View File

@@ -1,5 +1,5 @@
import json
import operator
import traceback
import typing
import click
@@ -9,38 +9,106 @@ from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
from models.account import TenantPluginAutoUpgradeStrategy
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 15 # 15 minutes
cached_plugin_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
def _get_redis_cache_key(plugin_id: str) -> str:
"""Generate Redis cache key for plugin manifest."""
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginDeclaration: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
try:
key = _get_redis_cache_key(plugin_id)
cached_data = redis_client.get(key)
if cached_data is None:
return False
cached_json = json.loads(cached_data)
if cached_json is None:
return None
return MarketplacePluginDeclaration.model_validate(cached_json)
except Exception:
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
pass
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginDeclaration]:
global cached_plugin_manifests
# return marketplace.batch_fetch_plugin_manifests(plugin_ids_plain_list)
not_included_plugin_ids = [
plugin_id for plugin_id in plugin_ids_plain_list if plugin_id not in cached_plugin_manifests
]
if not_included_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_included_plugin_ids)
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_plugin_manifests[manifest.plugin_id] = manifest
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
if (
len(manifests) == 0
): # this indicates that the plugin not found in marketplace, should set None in cache to prevent future check
for plugin_id in not_included_plugin_ids:
cached_plugin_manifests[plugin_id] = None
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
final_manifest = cached_plugin_manifests.get(plugin_id)
if final_manifest is not None:
result.append(final_manifest)
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
return result
@@ -157,10 +225,10 @@ def process_tenant_plugin_autoupgrade_check_task(
)
except Exception as e:
click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red"))
traceback.print_exc()
# traceback.print_exc()
break
except Exception as e:
click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red"))
traceback.print_exc()
# traceback.print_exc()
return

View File

@@ -0,0 +1,222 @@
app:
description: 'this is a chatflow with 2 answer nodes.
it''s outouts should like:
```
--- answer 1 ---
foo
--- answer 2 ---
<llm''s outputs>
```'
icon: 🤖
icon_background: '#FFEAD5'
mode: advanced-chat
name: test-answer-order
use_icon_as_answer_icon: false
dependencies:
- current_identifier: null
type: marketplace
value:
marketplace_plugin_unique_identifier: langgenius/openai:0.2.6@e2665624a156f52160927bceac9e169bd7e5ae6b936ae82575e14c90af390e6e
version: null
kind: app
version: 0.4.0
workflow:
conversation_variables: []
environment_variables: []
features:
file_upload:
allowed_file_extensions:
- .JPG
- .JPEG
- .PNG
- .GIF
- .WEBP
- .SVG
allowed_file_types:
- image
allowed_file_upload_methods:
- local_file
- remote_url
enabled: false
fileUploadConfig:
audio_file_size_limit: 50
batch_count_limit: 5
file_size_limit: 15
image_file_size_limit: 10
video_file_size_limit: 100
workflow_file_upload_limit: 10
image:
enabled: false
number_limits: 3
transfer_methods:
- local_file
- remote_url
number_limits: 3
opening_statement: ''
retriever_resource:
enabled: true
sensitive_word_avoidance:
enabled: false
speech_to_text:
enabled: false
suggested_questions: []
suggested_questions_after_answer:
enabled: false
text_to_speech:
enabled: false
language: ''
voice: ''
graph:
edges:
- data:
isInIteration: false
isInLoop: false
sourceType: answer
targetType: answer
id: 1759052466526-source-1759052469368-target
source: '1759052466526'
sourceHandle: source
target: '1759052469368'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: false
isInLoop: false
sourceType: start
targetType: llm
id: 1759052439553-source-1759052580454-target
source: '1759052439553'
sourceHandle: source
target: '1759052580454'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: false
isInLoop: false
sourceType: llm
targetType: answer
id: 1759052580454-source-1759052466526-target
source: '1759052580454'
sourceHandle: source
target: '1759052466526'
targetHandle: target
type: custom
zIndex: 0
nodes:
- data:
selected: false
title: Start
type: start
variables: []
height: 52
id: '1759052439553'
position:
x: 30
y: 242
positionAbsolute:
x: 30
y: 242
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
answer: '--- answer 1 ---
foo
'
selected: false
title: Answer
type: answer
variables: []
height: 100
id: '1759052466526'
position:
x: 632
y: 242
positionAbsolute:
x: 632
y: 242
selected: true
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
answer: '--- answer 2 ---
{{#1759052580454.text#}}
'
selected: false
title: Answer 2
type: answer
variables: []
height: 103
id: '1759052469368'
position:
x: 934
y: 242
positionAbsolute:
x: 934
y: 242
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
context:
enabled: false
variable_selector: []
model:
completion_params:
temperature: 0.7
mode: chat
name: gpt-4o
provider: langgenius/openai/openai
prompt_template:
- id: 5c1d873b-06b2-4dce-939e-672882bbd7c0
role: system
text: ''
- role: user
text: '{{#sys.query#}}'
selected: false
title: LLM
type: llm
vision:
enabled: false
height: 88
id: '1759052580454'
position:
x: 332
y: 242
positionAbsolute:
x: 332
y: 242
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
viewport:
x: 126.2797574512839
y: 289.55932160537446
zoom: 1.0743222672006216
rag_pipeline_variables: []

View File

@@ -0,0 +1,316 @@
app:
description: 'This chatflow receives a sys.query, writes it into the `answer` variable,
and then outputs the `answer` variable.
`answer` is a conversation variable with a blank default value; it will be updated
in an iteration node.
if this chatflow works correctly, it will output the `sys.query` as the same.'
icon: 🤖
icon_background: '#FFEAD5'
mode: advanced-chat
name: update-conversation-variable-in-iteration
use_icon_as_answer_icon: false
dependencies: []
kind: app
version: 0.4.0
workflow:
conversation_variables:
- description: ''
id: c30af82d-b2ec-417d-a861-4dd78584faa4
name: answer
selector:
- conversation
- answer
value: ''
value_type: string
environment_variables: []
features:
file_upload:
allowed_file_extensions:
- .JPG
- .JPEG
- .PNG
- .GIF
- .WEBP
- .SVG
allowed_file_types:
- image
allowed_file_upload_methods:
- local_file
- remote_url
enabled: false
fileUploadConfig:
audio_file_size_limit: 50
batch_count_limit: 5
file_size_limit: 15
image_file_size_limit: 10
video_file_size_limit: 100
workflow_file_upload_limit: 10
image:
enabled: false
number_limits: 3
transfer_methods:
- local_file
- remote_url
number_limits: 3
opening_statement: ''
retriever_resource:
enabled: true
sensitive_word_avoidance:
enabled: false
speech_to_text:
enabled: false
suggested_questions: []
suggested_questions_after_answer:
enabled: false
text_to_speech:
enabled: false
language: ''
voice: ''
graph:
edges:
- data:
isInIteration: false
isInLoop: false
sourceType: start
targetType: code
id: 1759032354471-source-1759032363865-target
source: '1759032354471'
sourceHandle: source
target: '1759032363865'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: false
isInLoop: false
sourceType: code
targetType: iteration
id: 1759032363865-source-1759032379989-target
source: '1759032363865'
sourceHandle: source
target: '1759032379989'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: true
isInLoop: false
iteration_id: '1759032379989'
sourceType: iteration-start
targetType: assigner
id: 1759032379989start-source-1759032394460-target
source: 1759032379989start
sourceHandle: source
target: '1759032394460'
targetHandle: target
type: custom
zIndex: 1002
- data:
isInIteration: false
isInLoop: false
sourceType: iteration
targetType: answer
id: 1759032379989-source-1759032410331-target
source: '1759032379989'
sourceHandle: source
target: '1759032410331'
targetHandle: target
type: custom
zIndex: 0
- data:
isInIteration: true
isInLoop: false
iteration_id: '1759032379989'
sourceType: assigner
targetType: code
id: 1759032394460-source-1759032476318-target
source: '1759032394460'
sourceHandle: source
target: '1759032476318'
targetHandle: target
type: custom
zIndex: 1002
nodes:
- data:
selected: false
title: Start
type: start
variables: []
height: 52
id: '1759032354471'
position:
x: 30
y: 302
positionAbsolute:
x: 30
y: 302
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
code: "\ndef main():\n return {\n \"result\": [1],\n }\n"
code_language: python3
outputs:
result:
children: null
type: array[number]
selected: false
title: Code
type: code
variables: []
height: 52
id: '1759032363865'
position:
x: 332
y: 302
positionAbsolute:
x: 332
y: 302
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
error_handle_mode: terminated
height: 204
is_parallel: false
iterator_input_type: array[number]
iterator_selector:
- '1759032363865'
- result
output_selector:
- '1759032476318'
- result
output_type: array[string]
parallel_nums: 10
selected: false
start_node_id: 1759032379989start
title: Iteration
type: iteration
width: 808
height: 204
id: '1759032379989'
position:
x: 634
y: 302
positionAbsolute:
x: 634
y: 302
selected: true
sourcePosition: right
targetPosition: left
type: custom
width: 808
zIndex: 1
- data:
desc: ''
isInIteration: true
selected: false
title: ''
type: iteration-start
draggable: false
height: 48
id: 1759032379989start
parentId: '1759032379989'
position:
x: 60
y: 78
positionAbsolute:
x: 694
y: 380
selectable: false
sourcePosition: right
targetPosition: left
type: custom-iteration-start
width: 44
zIndex: 1002
- data:
isInIteration: true
isInLoop: false
items:
- input_type: variable
operation: over-write
value:
- sys
- query
variable_selector:
- conversation
- answer
write_mode: over-write
iteration_id: '1759032379989'
selected: false
title: Variable Assigner
type: assigner
version: '2'
height: 84
id: '1759032394460'
parentId: '1759032379989'
position:
x: 204
y: 60
positionAbsolute:
x: 838
y: 362
sourcePosition: right
targetPosition: left
type: custom
width: 242
zIndex: 1002
- data:
answer: '{{#conversation.answer#}}'
selected: false
title: Answer
type: answer
variables: []
height: 104
id: '1759032410331'
position:
x: 1502
y: 302
positionAbsolute:
x: 1502
y: 302
selected: false
sourcePosition: right
targetPosition: left
type: custom
width: 242
- data:
code: "\ndef main():\n return {\n \"result\": '',\n }\n"
code_language: python3
isInIteration: true
isInLoop: false
iteration_id: '1759032379989'
outputs:
result:
children: null
type: string
selected: false
title: Code 2
type: code
variables: []
height: 52
id: '1759032476318'
parentId: '1759032379989'
position:
x: 506
y: 76
positionAbsolute:
x: 1140
y: 378
sourcePosition: right
targetPosition: left
type: custom
width: 242
zIndex: 1002
viewport:
x: 120.39999999999998
y: 85.20000000000005
zoom: 0.7
rag_pipeline_variables: []

View File

@@ -0,0 +1,113 @@
from core.variables.segments import (
BooleanSegment,
IntegerSegment,
NoneSegment,
StringSegment,
)
from core.workflow.entities.variable_pool import VariablePool
class TestVariablePoolGetAndNestedAttribute:
#
# _get_nested_attribute tests
#
def test__get_nested_attribute_existing_key(self):
pool = VariablePool.empty()
obj = {"a": 123}
segment = pool._get_nested_attribute(obj, "a")
assert segment is not None
assert segment.value == 123
def test__get_nested_attribute_missing_key(self):
pool = VariablePool.empty()
obj = {"a": 123}
segment = pool._get_nested_attribute(obj, "b")
assert segment is None
def test__get_nested_attribute_non_dict(self):
pool = VariablePool.empty()
obj = ["not", "a", "dict"]
segment = pool._get_nested_attribute(obj, "a")
assert segment is None
def test__get_nested_attribute_with_none_value(self):
pool = VariablePool.empty()
obj = {"a": None}
segment = pool._get_nested_attribute(obj, "a")
assert segment is not None
assert isinstance(segment, NoneSegment)
def test__get_nested_attribute_with_empty_string(self):
pool = VariablePool.empty()
obj = {"a": ""}
segment = pool._get_nested_attribute(obj, "a")
assert segment is not None
assert isinstance(segment, StringSegment)
assert segment.value == ""
#
# get tests
#
def test_get_simple_variable(self):
pool = VariablePool.empty()
pool.add(("node1", "var1"), "value1")
segment = pool.get(("node1", "var1"))
assert segment is not None
assert segment.value == "value1"
def test_get_missing_variable(self):
pool = VariablePool.empty()
result = pool.get(("node1", "unknown"))
assert result is None
def test_get_with_too_short_selector(self):
pool = VariablePool.empty()
result = pool.get(("only_node",))
assert result is None
def test_get_nested_object_attribute(self):
pool = VariablePool.empty()
obj_value = {"inner": "hello"}
pool.add(("node1", "obj"), obj_value)
# simulate selector with nested attr
segment = pool.get(("node1", "obj", "inner"))
assert segment is not None
assert segment.value == "hello"
def test_get_nested_object_missing_attribute(self):
pool = VariablePool.empty()
obj_value = {"inner": "hello"}
pool.add(("node1", "obj"), obj_value)
result = pool.get(("node1", "obj", "not_exist"))
assert result is None
def test_get_nested_object_attribute_with_falsy_values(self):
pool = VariablePool.empty()
obj_value = {
"inner_none": None,
"inner_empty": "",
"inner_zero": 0,
"inner_false": False,
}
pool.add(("node1", "obj"), obj_value)
segment_none = pool.get(("node1", "obj", "inner_none"))
assert segment_none is not None
assert isinstance(segment_none, NoneSegment)
segment_empty = pool.get(("node1", "obj", "inner_empty"))
assert segment_empty is not None
assert isinstance(segment_empty, StringSegment)
assert segment_empty.value == ""
segment_zero = pool.get(("node1", "obj", "inner_zero"))
assert segment_zero is not None
assert isinstance(segment_zero, IntegerSegment)
assert segment_zero.value == 0
segment_false = pool.get(("node1", "obj", "inner_false"))
assert segment_false is not None
assert isinstance(segment_false, BooleanSegment)
assert segment_false.value is False

View File

@@ -0,0 +1,28 @@
from .test_mock_config import MockConfigBuilder
from .test_table_runner import TableTestRunner, WorkflowTestCase
LLM_NODE_ID = "1759052580454"
def test_answer_nodes_emit_in_order() -> None:
mock_config = (
MockConfigBuilder()
.with_llm_response("unused default")
.with_node_output(LLM_NODE_ID, {"text": "mocked llm text"})
.build()
)
expected_answer = "--- answer 1 ---\n\nfoo\n--- answer 2 ---\n\nmocked llm text\n"
case = WorkflowTestCase(
fixture_path="test-answer-order",
query="",
expected_outputs={"answer": expected_answer},
use_auto_mock=True,
mock_config=mock_config,
)
runner = TableTestRunner()
result = runner.run_test_case(case)
assert result.success, result.error

View File

@@ -0,0 +1,41 @@
"""Validate conversation variable updates inside an iteration workflow.
This test uses the ``update-conversation-variable-in-iteration`` fixture, which
routes ``sys.query`` into the conversation variable ``answer`` from within an
iteration container. The workflow should surface that updated conversation
variable in the final answer output.
Code nodes in the fixture are mocked because their concrete outputs are not
relevant to verifying variable propagation semantics.
"""
from .test_mock_config import MockConfigBuilder
from .test_table_runner import TableTestRunner, WorkflowTestCase
def test_update_conversation_variable_in_iteration():
fixture_name = "update-conversation-variable-in-iteration"
user_query = "ensure conversation variable syncs"
mock_config = (
MockConfigBuilder()
.with_node_output("1759032363865", {"result": [1]})
.with_node_output("1759032476318", {"result": ""})
.build()
)
case = WorkflowTestCase(
fixture_path=fixture_name,
use_auto_mock=True,
mock_config=mock_config,
query=user_query,
expected_outputs={"answer": user_query},
description="Conversation variable updated within iteration should flow to answer output.",
)
runner = TableTestRunner()
result = runner.run_test_case(case)
assert result.success, f"Workflow execution failed: {result.error}"
assert result.actual_outputs is not None
assert result.actual_outputs.get("answer") == user_query

35
api/uv.lock generated
View File

@@ -1076,7 +1076,7 @@ wheels = [
[[package]]
name = "cos-python-sdk-v5"
version = "1.9.30"
version = "1.9.38"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "crcmod" },
@@ -1085,7 +1085,10 @@ dependencies = [
{ name = "six" },
{ name = "xmltodict" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384, upload-time = "2024-06-14T08:02:37.063Z" }
sdist = { url = "https://files.pythonhosted.org/packages/24/3c/d208266fec7cc3221b449e236b87c3fc1999d5ac4379d4578480321cfecc/cos_python_sdk_v5-1.9.38.tar.gz", hash = "sha256:491a8689ae2f1a6f04dacba66a877b2c8d361456f9cfd788ed42170a1cbf7a9f", size = 98092, upload-time = "2025-07-22T07:56:20.34Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/c8/c9c156aa3bc7caba9b4f8a2b6abec3da6263215988f3fec0ea843f137a10/cos_python_sdk_v5-1.9.38-py3-none-any.whl", hash = "sha256:1d3dd3be2bd992b2e9c2dcd018e2596aa38eab022dbc86b4a5d14c8fc88370e6", size = 92601, upload-time = "2025-08-17T05:12:30.867Z" },
]
[[package]]
name = "couchbase"
@@ -1624,7 +1627,7 @@ dev = [
storage = [
{ name = "azure-storage-blob", specifier = "==12.13.0" },
{ name = "bce-python-sdk", specifier = "~=0.9.23" },
{ name = "cos-python-sdk-v5", specifier = "==1.9.30" },
{ name = "cos-python-sdk-v5", specifier = "==1.9.38" },
{ name = "esdk-obs-python", specifier = "==3.24.6.1" },
{ name = "google-cloud-storage", specifier = "==2.16.0" },
{ name = "opendal", specifier = "~=0.46.0" },
@@ -1646,7 +1649,7 @@ vdb = [
{ name = "elasticsearch", specifier = "==8.14.0" },
{ name = "mo-vector", specifier = "~=0.1.13" },
{ name = "opensearch-py", specifier = "==2.4.0" },
{ name = "oracledb", specifier = "==3.0.0" },
{ name = "oracledb", specifier = "==3.3.0" },
{ name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" },
{ name = "pgvector", specifier = "==0.2.5" },
{ name = "pymilvus", specifier = "~=2.5.0" },
@@ -4079,23 +4082,23 @@ numpy = [
[[package]]
name = "oracledb"
version = "3.0.0"
version = "3.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431, upload-time = "2025-03-03T19:36:12.223Z" }
sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963, upload-time = "2025-03-03T19:36:32.576Z" },
{ url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536, upload-time = "2025-03-03T19:36:34.904Z" },
{ url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461, upload-time = "2025-03-03T19:36:36.508Z" },
{ url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046, upload-time = "2025-03-03T19:36:38.313Z" },
{ url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210, upload-time = "2025-03-03T19:36:40.669Z" },
{ url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993, upload-time = "2025-03-03T19:36:42.577Z" },
{ url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640, upload-time = "2025-03-03T19:36:45.066Z" },
{ url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949, upload-time = "2025-03-03T19:36:47.47Z" },
{ url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373, upload-time = "2025-03-03T19:36:49.67Z" },
{ url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452, upload-time = "2025-03-03T19:36:51.363Z" },
{ url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" },
{ url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" },
{ url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" },
{ url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" },
{ url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" },
{ url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" },
{ url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" },
{ url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" },
{ url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" },
{ url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" },
]
[[package]]

View File

@@ -45,7 +45,7 @@ APP_WEB_URL=
# Recommendation: use a dedicated domain (e.g., https://upload.example.com).
# Alternatively, use http://<your-ip>:5001 or http://api:5001,
# ensuring port 5001 is externally accessible (see docker-compose.yaml).
FILES_URL=http://api:5001
FILES_URL=
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
# Set this to the internal Docker service URL for proper plugin file access.

View File

@@ -10,7 +10,7 @@ x-shared-env: &shared-api-worker-env
SERVICE_API_URL: ${SERVICE_API_URL:-}
APP_API_URL: ${APP_API_URL:-}
APP_WEB_URL: ${APP_WEB_URL:-}
FILES_URL: ${FILES_URL:-http://api:5001}
FILES_URL: ${FILES_URL:-}
INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-}
LANG: ${LANG:-en_US.UTF-8}
LC_ALL: ${LC_ALL:-en_US.UTF-8}

View File

@@ -1,5 +1,5 @@
import json
from typing import Literal
from typing import IO, Literal
import requests

View File

@@ -65,13 +65,40 @@ const DatasetConfig: FC = () => {
const onRemove = (id: string) => {
const filteredDataSets = dataSet.filter(item => item.id !== id)
setDataSet(filteredDataSets)
const retrievalConfig = getMultipleRetrievalConfig(datasetConfigs as any, filteredDataSets, dataSet, {
const { datasets, retrieval_model, score_threshold_enabled, ...restConfigs } = datasetConfigs
const {
top_k,
score_threshold,
reranking_model,
reranking_mode,
weights,
reranking_enable,
} = restConfigs
const oldRetrievalConfig = {
top_k,
score_threshold,
reranking_model: (reranking_model.reranking_provider_name && reranking_model.reranking_model_name) ? {
provider: reranking_model.reranking_provider_name,
model: reranking_model.reranking_model_name,
} : undefined,
reranking_mode,
weights,
reranking_enable,
}
const retrievalConfig = getMultipleRetrievalConfig(oldRetrievalConfig, filteredDataSets, dataSet, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
setDatasetConfigs({
...(datasetConfigs as any),
...datasetConfigsRef.current,
...retrievalConfig,
reranking_model: {
reranking_provider_name: retrievalConfig?.reranking_model?.provider || '',
reranking_model_name: retrievalConfig?.reranking_model?.model || '',
},
retrieval_model,
score_threshold_enabled,
datasets,
})
const {
allExternal,

View File

@@ -30,11 +30,11 @@ import { noop } from 'lodash-es'
type Props = {
datasetConfigs: DatasetConfigs
onChange: (configs: DatasetConfigs, isRetrievalModeChange?: boolean) => void
selectedDatasets?: DataSet[]
isInWorkflow?: boolean
singleRetrievalModelConfig?: ModelConfig
onSingleRetrievalModelChange?: (config: ModelConfig) => void
onSingleRetrievalModelParamsChange?: (config: ModelConfig) => void
selectedDatasets?: DataSet[]
}
const ConfigContent: FC<Props> = ({
@@ -61,22 +61,28 @@ const ConfigContent: FC<Props> = ({
const {
modelList: rerankModelList,
currentModel: validDefaultRerankModel,
currentProvider: validDefaultRerankProvider,
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
/**
* If reranking model is set and is valid, use the reranking model
* Otherwise, check if the default reranking model is valid
*/
const {
currentModel: currentRerankModel,
} = useCurrentProviderAndModel(
rerankModelList,
{
provider: datasetConfigs.reranking_model?.reranking_provider_name,
model: datasetConfigs.reranking_model?.reranking_model_name,
provider: datasetConfigs.reranking_model?.reranking_provider_name || validDefaultRerankProvider?.provider || '',
model: datasetConfigs.reranking_model?.reranking_model_name || validDefaultRerankModel?.model || '',
},
)
const rerankModel = useMemo(() => {
return {
provider_name: datasetConfigs?.reranking_model?.reranking_provider_name ?? '',
model_name: datasetConfigs?.reranking_model?.reranking_model_name ?? '',
provider_name: datasetConfigs.reranking_model?.reranking_provider_name ?? '',
model_name: datasetConfigs.reranking_model?.reranking_model_name ?? '',
}
}, [datasetConfigs.reranking_model])
@@ -135,7 +141,7 @@ const ConfigContent: FC<Props> = ({
})
}
const model = singleRetrievalConfig
const model = singleRetrievalConfig // Legacy code, for compatibility, have to keep it
const rerankingModeOptions = [
{
@@ -158,7 +164,7 @@ const ConfigContent: FC<Props> = ({
const canManuallyToggleRerank = useMemo(() => {
return (selectedDatasetsMode.allInternal && selectedDatasetsMode.allEconomic)
|| selectedDatasetsMode.allExternal
|| selectedDatasetsMode.allExternal
}, [selectedDatasetsMode.allEconomic, selectedDatasetsMode.allExternal, selectedDatasetsMode.allInternal])
const showRerankModel = useMemo(() => {
@@ -168,7 +174,7 @@ const ConfigContent: FC<Props> = ({
return datasetConfigs.reranking_enable
}, [datasetConfigs.reranking_enable, canManuallyToggleRerank])
const handleDisabledSwitchClick = useCallback((enable: boolean) => {
const handleManuallyToggleRerank = useCallback((enable: boolean) => {
if (!currentRerankModel && enable)
Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') })
onChange({
@@ -255,12 +261,11 @@ const ConfigContent: FC<Props> = ({
<div className='mt-2'>
<div className='flex items-center'>
{
selectedDatasetsMode.allEconomic && !selectedDatasetsMode.mixtureInternalAndExternal && (
canManuallyToggleRerank && (
<Switch
size='md'
defaultValue={showRerankModel}
disabled={!canManuallyToggleRerank}
onChange={handleDisabledSwitchClick}
onChange={handleManuallyToggleRerank}
/>
)
}

View File

@@ -284,18 +284,28 @@ const Configuration: FC = () => {
setRerankSettingModalOpen(true)
const { datasets, retrieval_model, score_threshold_enabled, ...restConfigs } = datasetConfigs
const {
top_k,
score_threshold,
reranking_model,
reranking_mode,
weights,
reranking_enable,
} = restConfigs
const retrievalConfig = getMultipleRetrievalConfig({
top_k: restConfigs.top_k,
score_threshold: restConfigs.score_threshold,
reranking_model: restConfigs.reranking_model && {
provider: restConfigs.reranking_model.reranking_provider_name,
model: restConfigs.reranking_model.reranking_model_name,
},
reranking_mode: restConfigs.reranking_mode,
weights: restConfigs.weights,
reranking_enable: restConfigs.reranking_enable,
}, newDatasets, dataSets, {
const oldRetrievalConfig = {
top_k,
score_threshold,
reranking_model: (reranking_model.reranking_provider_name && reranking_model.reranking_model_name) ? {
provider: reranking_model.reranking_provider_name,
model: reranking_model.reranking_model_name,
} : undefined,
reranking_mode,
weights,
reranking_enable,
}
const retrievalConfig = getMultipleRetrievalConfig(oldRetrievalConfig, newDatasets, dataSets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})

View File

@@ -160,8 +160,13 @@ const Chat: FC<ChatProps> = ({
})
useEffect(() => {
window.addEventListener('resize', debounce(handleWindowResize))
return () => window.removeEventListener('resize', handleWindowResize)
const debouncedHandler = debounce(handleWindowResize, 200)
window.addEventListener('resize', debouncedHandler)
return () => {
window.removeEventListener('resize', debouncedHandler)
debouncedHandler.cancel()
}
}, [handleWindowResize])
useEffect(() => {

View File

@@ -1,6 +1,6 @@
.dot-flashing {
position: relative;
animation: 1s infinite linear alternate;
animation: dot-flashing 1s infinite linear alternate;
animation-delay: 0.5s;
}
@@ -10,7 +10,7 @@
display: inline-block;
position: absolute;
top: 0;
animation: 1s infinite linear alternate;
animation: dot-flashing 1s infinite linear alternate;
}
.dot-flashing::before {
@@ -51,15 +51,21 @@
border-radius: 50%;
background-color: #667085;
color: #667085;
animation-name: dot-flashing;
animation: dot-flashing 1s infinite linear alternate;
}
.text {
animation-delay: 0.5s;
}
.text::before {
left: -7px;
animation-delay: 0s;
}
.text::after {
left: 7px;
animation-delay: 1s;
}
.avatar,
@@ -70,13 +76,19 @@
border-radius: 50%;
background-color: #155EEF;
color: #155EEF;
animation-name: dot-flashing-avatar;
animation: dot-flashing-avatar 1s infinite linear alternate;
}
.avatar {
animation-delay: 0.5s;
}
.avatar::before {
left: -5px;
animation-delay: 0s;
}
.avatar::after {
left: 5px;
animation-delay: 1s;
}

View File

@@ -40,7 +40,7 @@ const RetrievalMethodConfig: FC<Props> = ({
onChange({
...value,
search_method: retrieveMethod,
...(!value.reranking_model.reranking_model_name
...((!value.reranking_model.reranking_model_name || !value.reranking_model.reranking_provider_name)
? {
reranking_model: {
reranking_provider_name: isRerankDefaultModelValid ? rerankDefaultModel?.provider?.provider ?? '' : '',
@@ -57,7 +57,7 @@ const RetrievalMethodConfig: FC<Props> = ({
onChange({
...value,
search_method: retrieveMethod,
...(!value.reranking_model.reranking_model_name
...((!value.reranking_model.reranking_model_name || !value.reranking_model.reranking_provider_name)
? {
reranking_model: {
reranking_provider_name: isRerankDefaultModelValid ? rerankDefaultModel?.provider?.provider ?? '' : '',

View File

@@ -54,7 +54,7 @@ const RetrievalParamConfig: FC<Props> = ({
},
)
const handleDisabledSwitchClick = useCallback((enable: boolean) => {
const handleToggleRerankEnable = useCallback((enable: boolean) => {
if (enable && !currentModel)
Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') })
onChange({
@@ -119,7 +119,7 @@ const RetrievalParamConfig: FC<Props> = ({
<Switch
size='md'
defaultValue={value.reranking_enable}
onChange={handleDisabledSwitchClick}
onChange={handleToggleRerankEnable}
/>
)}
<div className='flex items-center'>

View File

@@ -52,7 +52,7 @@ const InstallFromMarketplace = ({
<div className='flex items-center justify-between'>
<div className='system-md-semibold flex cursor-pointer items-center gap-1 text-text-primary' onClick={() => setCollapse(!collapse)}>
<RiArrowDownSLine className={cn('h-4 w-4', collapse && '-rotate-90')} />
{t('common.modelProvider.installProvider')}
{t('common.modelProvider.installDataSourceProvider')}
</div>
<div className='mb-2 flex items-center pt-2'>
<span className='system-sm-regular pr-1 text-text-tertiary'>{t('common.modelProvider.discoverMore')}</span>

View File

@@ -323,15 +323,18 @@ export const useRefreshModel = () => {
const { eventEmitter } = useEventEmitterContextContext()
const updateModelProviders = useUpdateModelProviders()
const updateModelList = useUpdateModelList()
const handleRefreshModel = useCallback((provider: ModelProvider, configurationMethod: ConfigurationMethodEnum, CustomConfigurationModelFixedFields?: CustomConfigurationModelFixedFields) => {
const handleRefreshModel = useCallback((
provider: ModelProvider,
CustomConfigurationModelFixedFields?: CustomConfigurationModelFixedFields,
refreshModelList?: boolean,
) => {
updateModelProviders()
provider.supported_model_types.forEach((type) => {
updateModelList(type)
})
if (configurationMethod === ConfigurationMethodEnum.customizableModel
&& provider.custom_configuration.status === CustomConfigurationStatusEnum.active) {
if (refreshModelList && provider.custom_configuration.status === CustomConfigurationStatusEnum.active) {
eventEmitter?.emit({
type: UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST,
payload: provider.provider,

View File

@@ -90,7 +90,7 @@ export const useAuth = (
type: 'success',
message: t('common.api.actionSuccess'),
})
handleRefreshModel(provider, configurationMethod, undefined)
handleRefreshModel(provider, undefined, true)
}
finally {
handleSetDoingAction(false)
@@ -125,7 +125,7 @@ export const useAuth = (
type: 'success',
message: t('common.api.actionSuccess'),
})
handleRefreshModel(provider, configurationMethod, undefined)
handleRefreshModel(provider, undefined, true)
onRemove?.(pendingOperationCredentialId.current ?? '')
closeConfirmDelete()
}
@@ -147,7 +147,7 @@ export const useAuth = (
if (res.result === 'success') {
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
handleRefreshModel(provider, configurationMethod, undefined)
handleRefreshModel(provider, undefined, !payload.credential_id)
}
}
finally {

View File

@@ -159,7 +159,7 @@ const ModelLoadBalancingModal = ({
)
if (res.result === 'success') {
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
handleRefreshModel(provider, configurateMethod, currentCustomConfigurationModelFixedFields)
handleRefreshModel(provider, currentCustomConfigurationModelFixedFields, false)
onSave?.(provider.provider)
onClose?.()
}

View File

@@ -17,7 +17,7 @@ import CardMoreInfo from '@/app/components/plugins/card/card-more-info'
import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel'
import MCPList from './mcp'
import { useAllToolProviders } from '@/service/use-tools'
import { useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins'
import { useCheckInstalled, useInvalidateInstalledPluginList } from '@/service/use-plugins'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { ToolTypeEnum } from '../workflow/block-selector/types'
import { useMarketplace } from './marketplace/hooks'
@@ -77,12 +77,14 @@ const ProviderList = () => {
const currentProvider = useMemo<Collection | undefined>(() => {
return filteredCollectionList.find(collection => collection.id === currentProviderId)
}, [currentProviderId, filteredCollectionList])
const { data: pluginList } = useInstalledPluginList()
const { data: checkedInstalledData } = useCheckInstalled({
pluginIds: currentProvider?.plugin_id ? [currentProvider.plugin_id] : [],
enabled: !!currentProvider?.plugin_id,
})
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
const currentPluginDetail = useMemo(() => {
const detail = pluginList?.plugins.find(plugin => plugin.plugin_id === currentProvider?.plugin_id)
return detail
}, [currentProvider?.plugin_id, pluginList?.plugins])
return checkedInstalledData?.plugins?.[0]
}, [checkedInstalledData])
const toolListTailRef = useRef<HTMLDivElement>(null)
const showMarketplacePanel = useCallback(() => {

View File

@@ -86,7 +86,10 @@ const OptionCard = memo(({
readonly && 'cursor-not-allowed',
wrapperClassName && (typeof wrapperClassName === 'function' ? wrapperClassName(isActive) : wrapperClassName),
)}
onClick={() => !readonly && enableSelect && id && onClick?.(id)}
onClick={(e) => {
e.stopPropagation()
!readonly && enableSelect && id && onClick?.(id)
}}
>
<div className={cn(
'relative flex rounded-t-xl p-2',

View File

@@ -2,6 +2,7 @@ import type { NodeDefault } from '../../types'
import type { KnowledgeBaseNodeType } from './types'
import { genNodeMetaData } from '@/app/components/workflow/utils'
import { BlockEnum } from '@/app/components/workflow/types'
import { IndexingType } from '@/app/components/datasets/create/step-two'
const metaData = genNodeMetaData({
sort: 3.1,
@@ -27,8 +28,17 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
chunk_structure,
indexing_technique,
retrieval_model,
embedding_model,
embedding_model_provider,
index_chunk_variable_selector,
} = payload
const {
search_method,
reranking_enable,
reranking_model,
} = retrieval_model || {}
if (!chunk_structure) {
return {
isValid: false,
@@ -36,6 +46,13 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
}
}
if (index_chunk_variable_selector.length === 0) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.chunksVariableIsRequired'),
}
}
if (!indexing_technique) {
return {
isValid: false,
@@ -43,13 +60,27 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
}
}
if (!retrieval_model || !retrieval_model.search_method) {
if (indexing_technique === IndexingType.QUALIFIED && (!embedding_model || !embedding_model_provider)) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.embeddingModelIsRequired'),
}
}
if (!retrieval_model || !search_method) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.retrievalSettingIsRequired'),
}
}
if (reranking_enable && (!reranking_model || !reranking_model.reranking_provider_name || !reranking_model.reranking_model_name)) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.rerankingModelIsRequired'),
}
}
return {
isValid: true,
errorMessage: '',

View File

@@ -9,13 +9,17 @@ import {
ChunkStructureEnum,
IndexMethodEnum,
RetrievalSearchMethodEnum,
WeightedScoreEnum,
} from '../types'
import type {
HybridSearchModeEnum,
KnowledgeBaseNodeType,
RerankingModel,
} from '../types'
import {
HybridSearchModeEnum,
} from '../types'
import { isHighQualitySearchMethod } from '../utils'
import { DEFAULT_WEIGHTED_SCORE, RerankingModeEnum } from '@/models/datasets'
export const useConfig = (id: string) => {
const store = useStoreApi()
@@ -35,6 +39,25 @@ export const useConfig = (id: string) => {
})
}, [id, handleNodeDataUpdateWithSyncDraft])
const getDefaultWeights = useCallback(({
embeddingModel,
embeddingModelProvider,
}: {
embeddingModel: string
embeddingModelProvider: string
}) => {
return {
vector_setting: {
vector_weight: DEFAULT_WEIGHTED_SCORE.other.semantic,
embedding_provider_name: embeddingModelProvider || '',
embedding_model_name: embeddingModel,
},
keyword_setting: {
keyword_weight: DEFAULT_WEIGHTED_SCORE.other.keyword,
},
}
}, [])
const handleChunkStructureChange = useCallback((chunkStructure: ChunkStructureEnum) => {
const nodeData = getNodeData()
const {
@@ -80,39 +103,72 @@ export const useConfig = (id: string) => {
embeddingModelProvider: string
}) => {
const nodeData = getNodeData()
handleNodeDataUpdate({
const defaultWeights = getDefaultWeights({
embeddingModel,
embeddingModelProvider,
})
const changeData = {
embedding_model: embeddingModel,
embedding_model_provider: embeddingModelProvider,
retrieval_model: {
...nodeData?.data.retrieval_model,
vector_setting: {
...nodeData?.data.retrieval_model.vector_setting,
embedding_provider_name: embeddingModelProvider,
embedding_model_name: embeddingModel,
},
},
})
}, [getNodeData, handleNodeDataUpdate])
}
if (changeData.retrieval_model.weights) {
changeData.retrieval_model = {
...changeData.retrieval_model,
weights: {
...changeData.retrieval_model.weights,
vector_setting: {
...changeData.retrieval_model.weights.vector_setting,
embedding_provider_name: embeddingModelProvider,
embedding_model_name: embeddingModel,
},
},
}
}
else {
changeData.retrieval_model = {
...changeData.retrieval_model,
weights: defaultWeights,
}
}
handleNodeDataUpdate(changeData)
}, [getNodeData, getDefaultWeights, handleNodeDataUpdate])
const handleRetrievalSearchMethodChange = useCallback((searchMethod: RetrievalSearchMethodEnum) => {
const nodeData = getNodeData()
handleNodeDataUpdate({
const changeData = {
retrieval_model: {
...nodeData?.data.retrieval_model,
search_method: searchMethod,
reranking_mode: nodeData?.data.retrieval_model.reranking_mode || RerankingModeEnum.RerankingModel,
},
})
}
if (searchMethod === RetrievalSearchMethodEnum.hybrid) {
changeData.retrieval_model = {
...changeData.retrieval_model,
reranking_enable: changeData.retrieval_model.reranking_mode === RerankingModeEnum.RerankingModel,
}
}
handleNodeDataUpdate(changeData)
}, [getNodeData, handleNodeDataUpdate])
const handleHybridSearchModeChange = useCallback((hybridSearchMode: HybridSearchModeEnum) => {
const nodeData = getNodeData()
const defaultWeights = getDefaultWeights({
embeddingModel: nodeData?.data.embedding_model || '',
embeddingModelProvider: nodeData?.data.embedding_model_provider || '',
})
handleNodeDataUpdate({
retrieval_model: {
...nodeData?.data.retrieval_model,
reranking_mode: hybridSearchMode,
reranking_enable: hybridSearchMode === HybridSearchModeEnum.RerankingModel,
weights: nodeData?.data.retrieval_model.weights || defaultWeights,
},
})
}, [getNodeData, handleNodeDataUpdate])
}, [getNodeData, getDefaultWeights, handleNodeDataUpdate])
const handleRerankingModelEnabledChange = useCallback((rerankingModelEnabled: boolean) => {
const nodeData = getNodeData()
@@ -130,11 +186,10 @@ export const useConfig = (id: string) => {
retrieval_model: {
...nodeData?.data.retrieval_model,
weights: {
weight_type: 'weighted_score',
weight_type: WeightedScoreEnum.Customized,
vector_setting: {
...nodeData?.data.retrieval_model.weights?.vector_setting,
vector_weight: weightedScore.value[0],
embedding_provider_name: '',
embedding_model_name: '',
},
keyword_setting: {
keyword_weight: weightedScore.value[1],

View File

@@ -28,9 +28,9 @@ const Node: FC<NodeProps<KnowledgeBaseNodeType>> = ({ data }) => {
</div>
<div
className='system-xs-medium grow truncate text-right text-text-secondary'
title={data.retrieval_model.search_method}
title={data.retrieval_model?.search_method}
>
{settingsDisplay[data.retrieval_model.search_method as keyof typeof settingsDisplay]}
{settingsDisplay[data.retrieval_model?.search_method as keyof typeof settingsDisplay]}
</div>
</div>
</div>

View File

@@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React, { useCallback, useState } from 'react'
import React, { useCallback, useMemo } from 'react'
import { RiEqualizer2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import type { MultipleRetrievalConfig, SingleRetrievalConfig } from '../types'
@@ -14,8 +14,6 @@ import {
import ConfigRetrievalContent from '@/app/components/app/configuration/dataset-config/params-config/config-content'
import { RETRIEVE_TYPE } from '@/types/app'
import { DATASET_DEFAULT } from '@/config'
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import Button from '@/app/components/base/button'
import type { DatasetConfigs } from '@/models/debug'
import type { DataSet } from '@/models/datasets'
@@ -32,8 +30,8 @@ type Props = {
onSingleRetrievalModelChange?: (config: ModelConfig) => void
onSingleRetrievalModelParamsChange?: (config: ModelConfig) => void
readonly?: boolean
openFromProps?: boolean
onOpenFromPropsChange?: (openFromProps: boolean) => void
rerankModalOpen: boolean
onRerankModelOpenChange: (open: boolean) => void
selectedDatasets: DataSet[]
}
@@ -45,26 +43,52 @@ const RetrievalConfig: FC<Props> = ({
onSingleRetrievalModelChange,
onSingleRetrievalModelParamsChange,
readonly,
openFromProps,
onOpenFromPropsChange,
rerankModalOpen,
onRerankModelOpenChange,
selectedDatasets,
}) => {
const { t } = useTranslation()
const [open, setOpen] = useState(false)
const mergedOpen = openFromProps !== undefined ? openFromProps : open
const { retrieval_mode, multiple_retrieval_config } = payload
const handleOpen = useCallback((newOpen: boolean) => {
setOpen(newOpen)
onOpenFromPropsChange?.(newOpen)
}, [onOpenFromPropsChange])
onRerankModelOpenChange(newOpen)
}, [onRerankModelOpenChange])
const {
currentProvider: validRerankDefaultProvider,
currentModel: validRerankDefaultModel,
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
const datasetConfigs = useMemo(() => {
const {
reranking_model,
top_k,
score_threshold,
reranking_mode,
weights,
reranking_enable,
} = multiple_retrieval_config || {}
return {
retrieval_model: retrieval_mode,
reranking_model: (reranking_model?.provider && reranking_model?.model)
? {
reranking_provider_name: reranking_model?.provider,
reranking_model_name: reranking_model?.model,
}
: {
reranking_provider_name: '',
reranking_model_name: '',
},
top_k: top_k || DATASET_DEFAULT.top_k,
score_threshold_enabled: !(score_threshold === undefined || score_threshold === null),
score_threshold,
datasets: {
datasets: [],
},
reranking_mode,
weights,
reranking_enable,
}
}, [retrieval_mode, multiple_retrieval_config])
const { multiple_retrieval_config } = payload
const handleChange = useCallback((configs: DatasetConfigs, isRetrievalModeChange?: boolean) => {
// Legacy code, for compatibility, have to keep it
if (isRetrievalModeChange) {
onRetrievalModeChange(configs.retrieval_model)
return
@@ -72,13 +96,11 @@ const RetrievalConfig: FC<Props> = ({
onMultipleRetrievalConfigChange({
top_k: configs.top_k,
score_threshold: configs.score_threshold_enabled ? (configs.score_threshold ?? DATASET_DEFAULT.score_threshold) : null,
reranking_model: payload.retrieval_mode === RETRIEVE_TYPE.oneWay
reranking_model: retrieval_mode === RETRIEVE_TYPE.oneWay
? undefined
// eslint-disable-next-line sonarjs/no-nested-conditional
: (!configs.reranking_model?.reranking_provider_name
? {
provider: validRerankDefaultProvider?.provider || '',
model: validRerankDefaultModel?.model || '',
}
? undefined
: {
provider: configs.reranking_model?.reranking_provider_name,
model: configs.reranking_model?.reranking_model_name,
@@ -87,11 +109,11 @@ const RetrievalConfig: FC<Props> = ({
weights: configs.weights,
reranking_enable: configs.reranking_enable,
})
}, [onMultipleRetrievalConfigChange, payload.retrieval_mode, validRerankDefaultProvider, validRerankDefaultModel, onRetrievalModeChange])
}, [onMultipleRetrievalConfigChange, retrieval_mode, onRetrievalModeChange])
return (
<PortalToFollowElem
open={mergedOpen}
open={rerankModalOpen}
onOpenChange={handleOpen}
placement='bottom-end'
offset={{
@@ -102,14 +124,14 @@ const RetrievalConfig: FC<Props> = ({
onClick={() => {
if (readonly)
return
handleOpen(!mergedOpen)
handleOpen(!rerankModalOpen)
}}
>
<Button
variant='ghost'
size='small'
disabled={readonly}
className={cn(open && 'bg-components-button-ghost-bg-hover')}
className={cn(rerankModalOpen && 'bg-components-button-ghost-bg-hover')}
>
<RiEqualizer2Line className='mr-1 h-3.5 w-3.5' />
{t('dataset.retrievalSettings')}
@@ -118,35 +140,13 @@ const RetrievalConfig: FC<Props> = ({
<PortalToFollowElemContent style={{ zIndex: 1001 }}>
<div className='w-[404px] rounded-2xl border border-components-panel-border bg-components-panel-bg px-4 pb-4 pt-3 shadow-xl'>
<ConfigRetrievalContent
datasetConfigs={
{
retrieval_model: payload.retrieval_mode,
reranking_model: multiple_retrieval_config?.reranking_model?.provider
? {
reranking_provider_name: multiple_retrieval_config.reranking_model?.provider,
reranking_model_name: multiple_retrieval_config.reranking_model?.model,
}
: {
reranking_provider_name: '',
reranking_model_name: '',
},
top_k: multiple_retrieval_config?.top_k || DATASET_DEFAULT.top_k,
score_threshold_enabled: !(multiple_retrieval_config?.score_threshold === undefined || multiple_retrieval_config.score_threshold === null),
score_threshold: multiple_retrieval_config?.score_threshold,
datasets: {
datasets: [],
},
reranking_mode: multiple_retrieval_config?.reranking_mode,
weights: multiple_retrieval_config?.weights,
reranking_enable: multiple_retrieval_config?.reranking_enable,
}
}
datasetConfigs={datasetConfigs}
onChange={handleChange}
selectedDatasets={selectedDatasets}
isInWorkflow
singleRetrievalModelConfig={singleRetrievalModelConfig}
onSingleRetrievalModelChange={onSingleRetrievalModelChange}
onSingleRetrievalModelParamsChange={onSingleRetrievalModelParamsChange}
selectedDatasets={selectedDatasets}
/>
</div>
</PortalToFollowElemContent>

View File

@@ -1,6 +1,6 @@
import type { NodeDefault } from '../../types'
import type { KnowledgeRetrievalNodeType } from './types'
import { checkoutRerankModelConfigedInRetrievalSettings } from './utils'
import { checkoutRerankModelConfiguredInRetrievalSettings } from './utils'
import { DATASET_DEFAULT } from '@/config'
import { RETRIEVE_TYPE } from '@/types/app'
import { genNodeMetaData } from '@/app/components/workflow/utils'
@@ -36,7 +36,7 @@ const nodeDefault: NodeDefault<KnowledgeRetrievalNodeType> = {
const { _datasets, multiple_retrieval_config, retrieval_mode } = payload
if (retrieval_mode === RETRIEVE_TYPE.multiWay) {
const checked = checkoutRerankModelConfigedInRetrievalSettings(_datasets || [], multiple_retrieval_config)
const checked = checkoutRerankModelConfiguredInRetrievalSettings(_datasets || [], multiple_retrieval_config)
if (!errorMessages && !checked)
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.errorMsg.fields.rerankModel`) })

View File

@@ -1,7 +1,6 @@
import type { FC } from 'react'
import {
memo,
useCallback,
useMemo,
} from 'react'
import { intersectionBy } from 'lodash-es'
@@ -53,10 +52,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
availableNumberNodesWithParent,
} = useConfig(id, data)
const handleOpenFromPropsChange = useCallback((openFromProps: boolean) => {
setRerankModelOpen(openFromProps)
}, [setRerankModelOpen])
const metadataList = useMemo(() => {
return intersectionBy(...selectedDatasets.filter((dataset) => {
return !!dataset.doc_metadata
@@ -68,7 +63,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
return (
<div className='pt-2'>
<div className='space-y-4 px-4 pb-2'>
{/* {JSON.stringify(inputs, null, 2)} */}
<Field
title={t(`${i18nPrefix}.queryVariable`)}
required
@@ -100,8 +94,8 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
onSingleRetrievalModelChange={handleModelChanged as any}
onSingleRetrievalModelParamsChange={handleCompletionParamsChange}
readonly={readOnly || !selectedDatasets.length}
openFromProps={rerankModelOpen}
onOpenFromPropsChange={handleOpenFromPropsChange}
rerankModalOpen={rerankModelOpen}
onRerankModelOpenChange={setRerankModelOpen}
selectedDatasets={selectedDatasets}
/>
{!readOnly && (<div className='h-3 w-px bg-divider-regular'></div>)}

View File

@@ -204,10 +204,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
const handleMultipleRetrievalConfigChange = useCallback((newConfig: MultipleRetrievalConfig) => {
const newInputs = produce(inputs, (draft) => {
draft.multiple_retrieval_config = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, {
const newMultipleRetrievalConfig = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
draft.multiple_retrieval_config = newMultipleRetrievalConfig
})
setInputs(newInputs)
}, [inputs, setInputs, selectedDatasets, currentRerankModel, currentRerankProvider])
@@ -254,10 +255,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
if (payload.retrieval_mode === RETRIEVE_TYPE.multiWay && newDatasets.length > 0) {
const multipleRetrievalConfig = draft.multiple_retrieval_config
draft.multiple_retrieval_config = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, {
const newMultipleRetrievalConfig = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
draft.multiple_retrieval_config = newMultipleRetrievalConfig
}
})
updateDatasetsDetail(newDatasets)

View File

@@ -10,6 +10,7 @@ import type {
import {
DEFAULT_WEIGHTED_SCORE,
RerankingModeEnum,
WeightedScoreEnum,
} from '@/models/datasets'
import { RETRIEVE_METHOD } from '@/types/app'
import { DATASET_DEFAULT } from '@/config'
@@ -93,10 +94,12 @@ export const getMultipleRetrievalConfig = (
multipleRetrievalConfig: MultipleRetrievalConfig,
selectedDatasets: DataSet[],
originalDatasets: DataSet[],
validRerankModel?: { provider?: string; model?: string },
fallbackRerankModel?: { provider?: string; model?: string }, // fallback rerank model
) => {
const shouldSetWeightDefaultValue = xorBy(selectedDatasets, originalDatasets, 'id').length > 0
const rerankModelIsValid = validRerankModel?.provider && validRerankModel?.model
// Check if the selected datasets are different from the original datasets
const isDatasetsChanged = xorBy(selectedDatasets, originalDatasets, 'id').length > 0
// Check if the rerank model is valid
const isFallbackRerankModelValid = !!(fallbackRerankModel?.provider && fallbackRerankModel?.model)
const {
allHighQuality,
@@ -125,14 +128,16 @@ export const getMultipleRetrievalConfig = (
reranking_mode,
reranking_model,
weights,
reranking_enable: ((allInternal && allEconomic) || allExternal) ? reranking_enable : shouldSetWeightDefaultValue,
reranking_enable,
}
const setDefaultWeights = () => {
result.weights = {
weight_type: WeightedScoreEnum.Customized,
vector_setting: {
vector_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.semantic
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.semantic
: DEFAULT_WEIGHTED_SCORE.other.semantic,
@@ -142,6 +147,7 @@ export const getMultipleRetrievalConfig = (
keyword_setting: {
keyword_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.keyword
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.keyword
: DEFAULT_WEIGHTED_SCORE.other.keyword,
@@ -149,65 +155,106 @@ export const getMultipleRetrievalConfig = (
}
}
if (allEconomic || mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || allExternal || mixtureInternalAndExternal) {
/**
* In this case, user can manually toggle reranking
* So should keep the reranking_enable value
* But the default reranking_model should be set
*/
if ((allEconomic && allInternal) || allExternal) {
result.reranking_mode = RerankingModeEnum.RerankingModel
if (!result.reranking_model?.provider || !result.reranking_model?.model) {
if (rerankModelIsValid) {
result.reranking_enable = reranking_enable !== false
result.reranking_model = {
provider: validRerankModel?.provider || '',
model: validRerankModel?.model || '',
}
}
else {
result.reranking_model = {
provider: '',
model: '',
}
// Need to check if the reranking model should be set to default when first time initialized
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
else {
result.reranking_enable = reranking_enable !== false
}
result.reranking_enable = reranking_enable
}
/**
* In this case, reranking_enable must be true
* And if rerank model is not set, should set the default rerank model
*/
if (mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || mixtureInternalAndExternal) {
result.reranking_mode = RerankingModeEnum.RerankingModel
// Need to check if the reranking model should be set to default when first time initialized
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
result.reranking_enable = true
}
/**
* In this case, user can choose to use weighted score or rerank model
* But if the reranking_mode is not initialized, should set the default rerank model and reranking_enable to true
* and set reranking_mode to reranking_model
*/
if (allHighQuality && !inconsistentEmbeddingModel && allInternal) {
// If not initialized, check if the default rerank model is valid
if (!reranking_mode) {
if (validRerankModel?.provider && validRerankModel?.model) {
if (isFallbackRerankModelValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = reranking_enable !== false
result.reranking_enable = true
result.reranking_model = {
provider: validRerankModel.provider,
model: validRerankModel.model,
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
else {
result.reranking_mode = RerankingModeEnum.WeightedScore
result.reranking_enable = false
setDefaultWeights()
}
}
if (reranking_mode === RerankingModeEnum.WeightedScore && !weights)
setDefaultWeights()
if (reranking_mode === RerankingModeEnum.WeightedScore && weights && shouldSetWeightDefaultValue) {
if (rerankModelIsValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = reranking_enable !== false
// After initialization, if datasets has no change, make sure the config has correct value
if (reranking_mode === RerankingModeEnum.WeightedScore) {
result.reranking_enable = false
if (!weights)
setDefaultWeights()
}
if (reranking_mode === RerankingModeEnum.RerankingModel) {
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: validRerankModel.provider || '',
model: validRerankModel.model || '',
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
result.reranking_enable = true
}
// Need to check if reranking_mode should be set to reranking_model when datasets changed
if (reranking_mode === RerankingModeEnum.WeightedScore && weights && isDatasetsChanged) {
if ((result.reranking_model?.provider && result.reranking_model?.model) || isFallbackRerankModelValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = true
// eslint-disable-next-line sonarjs/nested-control-flow
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
}
else {
setDefaultWeights()
}
}
if (reranking_mode === RerankingModeEnum.RerankingModel && !rerankModelIsValid && shouldSetWeightDefaultValue) {
// Need to switch to weighted score when reranking model is not valid and datasets changed
if (
reranking_mode === RerankingModeEnum.RerankingModel
&& (!result.reranking_model?.provider || !result.reranking_model?.model)
&& !isFallbackRerankModelValid
&& isDatasetsChanged
) {
result.reranking_mode = RerankingModeEnum.WeightedScore
result.reranking_enable = false
setDefaultWeights()
}
}
@@ -215,7 +262,7 @@ export const getMultipleRetrievalConfig = (
return result
}
export const checkoutRerankModelConfigedInRetrievalSettings = (
export const checkoutRerankModelConfiguredInRetrievalSettings = (
datasets: DataSet[],
multipleRetrievalConfig?: MultipleRetrievalConfig,
) => {
@@ -225,6 +272,7 @@ export const checkoutRerankModelConfigedInRetrievalSettings = (
const {
allEconomic,
allExternal,
allInternal,
} = getSelectedDatasetsMode(datasets)
const {
@@ -233,12 +281,8 @@ export const checkoutRerankModelConfigedInRetrievalSettings = (
reranking_model,
} = multipleRetrievalConfig
if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model)) {
if ((allEconomic || allExternal) && !reranking_enable)
return true
return false
}
if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model))
return ((allEconomic && allInternal) || allExternal) && !reranking_enable
return true
}

View File

@@ -493,6 +493,7 @@ const translation = {
toBeConfigured: 'To be configured',
configureTip: 'Set up api-key or add model to use',
installProvider: 'Install model providers',
installDataSourceProvider: 'Install data source providers',
discoverMore: 'Discover more in ',
emptyProviderTitle: 'Model provider not set up',
emptyProviderTip: 'Please install a model provider first.',

View File

@@ -955,7 +955,10 @@ const translation = {
aboutRetrieval: 'about retrieval method.',
chunkIsRequired: 'Chunk structure is required',
indexMethodIsRequired: 'Index method is required',
chunksVariableIsRequired: 'Chunks variable is required',
embeddingModelIsRequired: 'Embedding model is required',
retrievalSettingIsRequired: 'Retrieval setting is required',
rerankingModelIsRequired: 'Reranking model is required',
},
},
tracing: {

View File

@@ -484,6 +484,7 @@ const translation = {
emptyProviderTitle: 'モデルプロバイダーが設定されていません',
discoverMore: 'もっと発見する',
installProvider: 'モデルプロバイダーをインストールする',
installDataSourceProvider: 'データソースプロバイダーをインストールする',
configureTip: 'API キーを設定するか、使用するモデルを追加してください',
toBeConfigured: '設定中',
emptyProviderTip: '最初にモデルプロバイダーをインストールしてください。',

View File

@@ -487,6 +487,7 @@ const translation = {
toBeConfigured: '待配置',
configureTip: '请配置 API 密钥,添加模型。',
installProvider: '安装模型供应商',
installDataSourceProvider: '安装数据源供应商',
discoverMore: '发现更多就在',
emptyProviderTitle: '尚未安装模型供应商',
emptyProviderTip: '请安装模型供应商。',

View File

@@ -955,7 +955,10 @@ const translation = {
aboutRetrieval: '关于知识检索。',
chunkIsRequired: '分段结构是必需的',
indexMethodIsRequired: '索引方法是必需的',
chunksVariableIsRequired: 'Chunks 变量是必需的',
embeddingModelIsRequired: 'Embedding 模型是必需的',
retrievalSettingIsRequired: '检索设置是必需的',
rerankingModelIsRequired: 'Reranking 模型是必需的',
},
},
tracing: {

View File

@@ -158,7 +158,7 @@
"@chromatic-com/storybook": "^3.1.0",
"@eslint-react/eslint-plugin": "^1.15.0",
"@eslint/eslintrc": "^3.1.0",
"@eslint/js": "^9.20.0",
"@eslint/js": "^9.36.0",
"@faker-js/faker": "^9.0.3",
"@happy-dom/jest-environment": "^17.4.4",
"@mdx-js/loader": "^3.1.0",
@@ -177,7 +177,7 @@
"@storybook/react": "8.5.0",
"@storybook/test": "8.5.0",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.2",
"@testing-library/jest-dom": "^6.8.0",
"@testing-library/react": "^16.0.1",
"@types/crypto-js": "^4.2.2",
"@types/dagre": "^0.7.52",

47
web/pnpm-lock.yaml generated
View File

@@ -393,8 +393,8 @@ importers:
specifier: ^3.1.0
version: 3.3.1
'@eslint/js':
specifier: ^9.20.0
version: 9.31.0
specifier: ^9.36.0
version: 9.36.0
'@faker-js/faker':
specifier: ^9.0.3
version: 9.9.0
@@ -450,8 +450,8 @@ importers:
specifier: ^10.4.0
version: 10.4.0
'@testing-library/jest-dom':
specifier: ^6.6.2
version: 6.6.3
specifier: ^6.8.0
version: 6.8.0
'@testing-library/react':
specifier: ^16.0.1
version: 16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.11))(@types/react@19.1.11)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
@@ -593,8 +593,8 @@ importers:
packages:
'@adobe/css-tools@4.4.3':
resolution: {integrity: sha512-VQKMkwriZbaOgVCby1UDY/LDk5fIjhQicCvVPFqfe+69fWaPWydbWJ3wRt59/YzIwda1I81loas3oCoHxnqvdA==}
'@adobe/css-tools@4.4.4':
resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==}
'@alloc/quick-lru@5.2.0':
resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==}
@@ -1660,14 +1660,14 @@ packages:
resolution: {integrity: sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@eslint/js@9.31.0':
resolution: {integrity: sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@eslint/js@9.35.0':
resolution: {integrity: sha512-30iXE9whjlILfWobBkNerJo+TXYsgVM5ERQwMcMKCHckHflCmf7wXDAHlARoWnh0s1U72WqlbeyE7iAcCzuCPw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@eslint/js@9.36.0':
resolution: {integrity: sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@eslint/markdown@7.1.0':
resolution: {integrity: sha512-Y+X1B1j+/zupKDVJfkKc8uYMjQkGzfnd8lt7vK3y8x9Br6H5dBuhAfFrQ6ff7HAMm/1BwgecyEiRFkYCWPRxmA==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@@ -3208,8 +3208,8 @@ packages:
resolution: {integrity: sha512-xGGHpBXYSHUUr6XsKBfs85TWlYKpTc37cSBBVrXcib2MkHLboWlkClhWF37JKlDb9KEq3dHs+f2xR7XJEWGBxA==}
engines: {node: '>=14', npm: '>=6', yarn: '>=1'}
'@testing-library/jest-dom@6.6.3':
resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==}
'@testing-library/jest-dom@6.8.0':
resolution: {integrity: sha512-WgXcWzVM6idy5JaftTVC8Vs83NKRmGJz4Hqs4oyOuO2J4r/y79vvKZsb+CaGyCSEbUPI6OsewfPd0G1A0/TUZQ==}
engines: {node: '>=14', npm: '>=6', yarn: '>=1'}
'@testing-library/react@16.3.0':
@@ -8216,6 +8216,10 @@ packages:
resolution: {integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==}
engines: {node: '>=12'}
strip-indent@4.1.0:
resolution: {integrity: sha512-OA95x+JPmL7kc7zCu+e+TeYxEiaIyndRx0OrBcK2QPPH09oAndr2ALvymxWA+Lx1PYYvFUm4O63pRkdJAaW96w==}
engines: {node: '>=12'}
strip-json-comments@3.1.1:
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
engines: {node: '>=8'}
@@ -9006,7 +9010,7 @@ packages:
snapshots:
'@adobe/css-tools@4.4.3': {}
'@adobe/css-tools@4.4.4': {}
'@alloc/quick-lru@5.2.0': {}
@@ -10308,10 +10312,10 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@eslint/js@9.31.0': {}
'@eslint/js@9.35.0': {}
'@eslint/js@9.36.0': {}
'@eslint/markdown@7.1.0':
dependencies:
'@eslint/core': 0.15.1
@@ -12175,7 +12179,7 @@ snapshots:
'@testing-library/jest-dom@6.5.0':
dependencies:
'@adobe/css-tools': 4.4.3
'@adobe/css-tools': 4.4.4
aria-query: 5.3.2
chalk: 3.0.0
css.escape: 1.5.1
@@ -12183,14 +12187,13 @@ snapshots:
lodash: 4.17.21
redent: 3.0.0
'@testing-library/jest-dom@6.6.3':
'@testing-library/jest-dom@6.8.0':
dependencies:
'@adobe/css-tools': 4.4.3
'@adobe/css-tools': 4.4.4
aria-query: 5.3.2
chalk: 3.0.0
css.escape: 1.5.1
dom-accessibility-api: 0.6.3
lodash: 4.17.21
picocolors: 1.1.1
redent: 3.0.0
'@testing-library/react@16.3.0(@testing-library/dom@10.4.0)(@types/react-dom@19.1.7(@types/react@19.1.11))(@types/react@19.1.11)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)':
@@ -17474,7 +17477,7 @@ snapshots:
'@types/resolve': 1.20.6
doctrine: 3.0.0
resolve: 1.22.10
strip-indent: 4.0.0
strip-indent: 4.1.0
transitivePeerDependencies:
- supports-color
@@ -18313,6 +18316,8 @@ snapshots:
dependencies:
min-indent: 1.0.1
strip-indent@4.1.0: {}
strip-json-comments@3.1.1: {}
style-loader@3.3.4(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):