fluent api (#27093)

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Asuka Minato
2025-10-19 12:54:41 +09:00
committed by GitHub
parent 59c1fde351
commit 4488c090b2
97 changed files with 2179 additions and 1798 deletions

View File

@@ -33,119 +33,118 @@ def _validate_name(name):
# Define parsers for dataset operations
dataset_create_parser = reqparse.RequestParser()
dataset_create_parser.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
dataset_create_parser.add_argument(
"description",
type=validate_description_length,
nullable=True,
required=False,
default="",
)
dataset_create_parser.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
help="Invalid indexing technique.",
)
dataset_create_parser.add_argument(
"permission",
type=str,
location="json",
choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
help="Invalid permission.",
required=False,
nullable=False,
)
dataset_create_parser.add_argument(
"external_knowledge_api_id",
type=str,
nullable=True,
required=False,
default="_validate_name",
)
dataset_create_parser.add_argument(
"provider",
type=str,
nullable=True,
required=False,
default="vendor",
)
dataset_create_parser.add_argument(
"external_knowledge_id",
type=str,
nullable=True,
required=False,
)
dataset_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
dataset_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
dataset_create_parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
dataset_update_parser = reqparse.RequestParser()
dataset_update_parser.add_argument(
"name",
nullable=False,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
dataset_update_parser.add_argument(
"description", location="json", store_missing=False, type=validate_description_length
)
dataset_update_parser.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
dataset_update_parser.add_argument(
"permission",
type=str,
location="json",
choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
help="Invalid permission.",
)
dataset_update_parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.")
dataset_update_parser.add_argument(
"embedding_model_provider", type=str, location="json", help="Invalid embedding model provider."
)
dataset_update_parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
dataset_update_parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
dataset_update_parser.add_argument(
"external_retrieval_model",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid external retrieval model.",
)
dataset_update_parser.add_argument(
"external_knowledge_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge id.",
)
dataset_update_parser.add_argument(
"external_knowledge_api_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge api id.",
dataset_create_parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
.add_argument(
"description",
type=validate_description_length,
nullable=True,
required=False,
default="",
)
.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
help="Invalid indexing technique.",
)
.add_argument(
"permission",
type=str,
location="json",
choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
help="Invalid permission.",
required=False,
nullable=False,
)
.add_argument(
"external_knowledge_api_id",
type=str,
nullable=True,
required=False,
default="_validate_name",
)
.add_argument(
"provider",
type=str,
nullable=True,
required=False,
default="vendor",
)
.add_argument(
"external_knowledge_id",
type=str,
nullable=True,
required=False,
)
.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
)
tag_create_parser = reqparse.RequestParser()
tag_create_parser.add_argument(
dataset_update_parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
.add_argument("description", location="json", store_missing=False, type=validate_description_length)
.add_argument(
"indexing_technique",
type=str,
location="json",
choices=Dataset.INDEXING_TECHNIQUE_LIST,
nullable=True,
help="Invalid indexing technique.",
)
.add_argument(
"permission",
type=str,
location="json",
choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM),
help="Invalid permission.",
)
.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.")
.add_argument("embedding_model_provider", type=str, location="json", help="Invalid embedding model provider.")
.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
.add_argument(
"external_retrieval_model",
type=dict,
required=False,
nullable=True,
location="json",
help="Invalid external retrieval model.",
)
.add_argument(
"external_knowledge_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge id.",
)
.add_argument(
"external_knowledge_api_id",
type=str,
required=False,
nullable=True,
location="json",
help="Invalid external knowledge api id.",
)
)
tag_create_parser = reqparse.RequestParser().add_argument(
"name",
nullable=False,
required=True,
@@ -155,32 +154,37 @@ tag_create_parser.add_argument(
else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")),
)
tag_update_parser = reqparse.RequestParser()
tag_update_parser.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=lambda x: x
if x and 1 <= len(x) <= 50
else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")),
)
tag_update_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str)
tag_delete_parser = reqparse.RequestParser()
tag_delete_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str)
tag_binding_parser = reqparse.RequestParser()
tag_binding_parser.add_argument(
"tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required."
)
tag_binding_parser.add_argument(
"target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required."
tag_update_parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=lambda x: x
if x and 1 <= len(x) <= 50
else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")),
)
.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str)
)
tag_unbinding_parser = reqparse.RequestParser()
tag_unbinding_parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
tag_unbinding_parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
tag_delete_parser = reqparse.RequestParser().add_argument(
"tag_id", nullable=False, required=True, help="Id of a tag.", type=str
)
tag_binding_parser = (
reqparse.RequestParser()
.add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.")
.add_argument(
"target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required."
)
)
tag_unbinding_parser = (
reqparse.RequestParser()
.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
)
@service_api_ns.route("/datasets")

View File

@@ -35,37 +35,31 @@ from services.entities.knowledge_entities.knowledge_entities import KnowledgeCon
from services.file_service import FileService
# Define parsers for document operations
document_text_create_parser = reqparse.RequestParser()
document_text_create_parser.add_argument("name", type=str, required=True, nullable=False, location="json")
document_text_create_parser.add_argument("text", type=str, required=True, nullable=False, location="json")
document_text_create_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json")
document_text_create_parser.add_argument("original_document_id", type=str, required=False, location="json")
document_text_create_parser.add_argument(
"doc_form", type=str, default="text_model", required=False, nullable=False, location="json"
)
document_text_create_parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
document_text_create_parser.add_argument(
"indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
)
document_text_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
document_text_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
document_text_create_parser.add_argument(
"embedding_model_provider", type=str, required=False, nullable=True, location="json"
document_text_create_parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("text", type=str, required=True, nullable=False, location="json")
.add_argument("process_rule", type=dict, required=False, nullable=True, location="json")
.add_argument("original_document_id", type=str, required=False, location="json")
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
.add_argument(
"indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
)
.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
)
document_text_update_parser = reqparse.RequestParser()
document_text_update_parser.add_argument("name", type=str, required=False, nullable=True, location="json")
document_text_update_parser.add_argument("text", type=str, required=False, nullable=True, location="json")
document_text_update_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json")
document_text_update_parser.add_argument(
"doc_form", type=str, default="text_model", required=False, nullable=False, location="json"
document_text_update_parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=False, nullable=True, location="json")
.add_argument("text", type=str, required=False, nullable=True, location="json")
.add_argument("process_rule", type=dict, required=False, nullable=True, location="json")
.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
.add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json")
.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
)
document_text_update_parser.add_argument(
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
document_text_update_parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
@service_api_ns.route(

View File

@@ -15,21 +15,17 @@ from services.entities.knowledge_entities.knowledge_entities import (
from services.metadata_service import MetadataService
# Define parsers for metadata APIs
metadata_create_parser = reqparse.RequestParser()
metadata_create_parser.add_argument(
"type", type=str, required=True, nullable=False, location="json", help="Metadata type"
)
metadata_create_parser.add_argument(
"name", type=str, required=True, nullable=False, location="json", help="Metadata name"
metadata_create_parser = (
reqparse.RequestParser()
.add_argument("type", type=str, required=True, nullable=False, location="json", help="Metadata type")
.add_argument("name", type=str, required=True, nullable=False, location="json", help="Metadata name")
)
metadata_update_parser = reqparse.RequestParser()
metadata_update_parser.add_argument(
metadata_update_parser = reqparse.RequestParser().add_argument(
"name", type=str, required=True, nullable=False, location="json", help="New metadata name"
)
document_metadata_parser = reqparse.RequestParser()
document_metadata_parser.add_argument(
document_metadata_parser = reqparse.RequestParser().add_argument(
"operation_data", type=list, required=True, nullable=False, location="json", help="Metadata operation data"
)

View File

@@ -91,11 +91,13 @@ class DatasourceNodeRunApi(DatasetApiResource):
def post(self, tenant_id: str, dataset_id: str, node_id: str):
"""Resource for getting datasource plugins."""
# Get query parameter to determine published or draft
parser: RequestParser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("datasource_type", type=str, required=True, location="json")
parser.add_argument("credential_id", type=str, required=False, location="json")
parser.add_argument("is_published", type=bool, required=True, location="json")
parser: RequestParser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
.add_argument("is_published", type=bool, required=True, location="json")
)
args: ParseResult = parser.parse_args()
datasource_node_run_api_entity = DatasourceNodeRunApiEntity.model_validate(args)
@@ -147,19 +149,21 @@ class PipelineRunApi(DatasetApiResource):
)
def post(self, tenant_id: str, dataset_id: str):
"""Resource for running a rag pipeline."""
parser: RequestParser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("datasource_type", type=str, required=True, location="json")
parser.add_argument("datasource_info_list", type=list, required=True, location="json")
parser.add_argument("start_node_id", type=str, required=True, location="json")
parser.add_argument("is_published", type=bool, required=True, default=True, location="json")
parser.add_argument(
"response_mode",
type=str,
required=True,
choices=["streaming", "blocking"],
default="blocking",
location="json",
parser: RequestParser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("is_published", type=bool, required=True, default=True, location="json")
.add_argument(
"response_mode",
type=str,
required=True,
choices=["streaming", "blocking"],
default="blocking",
location="json",
)
)
args: ParseResult = parser.parse_args()

View File

@@ -24,26 +24,34 @@ from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDelete
from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingServiceError
# Define parsers for segment operations
segment_create_parser = reqparse.RequestParser()
segment_create_parser.add_argument("segments", type=list, required=False, nullable=True, location="json")
segment_create_parser = reqparse.RequestParser().add_argument(
"segments", type=list, required=False, nullable=True, location="json"
)
segment_list_parser = reqparse.RequestParser()
segment_list_parser.add_argument("status", type=str, action="append", default=[], location="args")
segment_list_parser.add_argument("keyword", type=str, default=None, location="args")
segment_list_parser = (
reqparse.RequestParser()
.add_argument("status", type=str, action="append", default=[], location="args")
.add_argument("keyword", type=str, default=None, location="args")
)
segment_update_parser = reqparse.RequestParser()
segment_update_parser.add_argument("segment", type=dict, required=False, nullable=True, location="json")
segment_update_parser = reqparse.RequestParser().add_argument(
"segment", type=dict, required=False, nullable=True, location="json"
)
child_chunk_create_parser = reqparse.RequestParser()
child_chunk_create_parser.add_argument("content", type=str, required=True, nullable=False, location="json")
child_chunk_create_parser = reqparse.RequestParser().add_argument(
"content", type=str, required=True, nullable=False, location="json"
)
child_chunk_list_parser = reqparse.RequestParser()
child_chunk_list_parser.add_argument("limit", type=int, default=20, location="args")
child_chunk_list_parser.add_argument("keyword", type=str, default=None, location="args")
child_chunk_list_parser.add_argument("page", type=int, default=1, location="args")
child_chunk_list_parser = (
reqparse.RequestParser()
.add_argument("limit", type=int, default=20, location="args")
.add_argument("keyword", type=str, default=None, location="args")
.add_argument("page", type=int, default=1, location="args")
)
child_chunk_update_parser = reqparse.RequestParser()
child_chunk_update_parser.add_argument("content", type=str, required=True, nullable=False, location="json")
child_chunk_update_parser = reqparse.RequestParser().add_argument(
"content", type=str, required=True, nullable=False, location="json"
)
@service_api_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments")