mirror of
https://github.com/langgenius/dify.git
synced 2025-12-22 10:45:35 -05:00
* test: adding some web tests (#27792) * feat: add validation to prevent saving empty opening statement in conversation opener modal (#27843) * fix(web): improve the consistency of the inputs-form UI (#27837) * fix(web): increase z-index of PortalToFollowElemContent (#27823) * fix: installation_id is missing when in tools page (#27849) * fix: avoid passing empty uniqueIdentifier to InstallFromMarketplace (#27802) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * test: create new test scripts and update some existing test scripts o… (#27850) * feat: change feedback to forum (#27862) * chore: translate i18n files and update type definitions (#27868) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> * Fix/template transformer line number (#27867) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> * bump vite to 6.4.1 (#27877) * Add WEAVIATE_GRPC_ENDPOINT as designed in weaviate migration guide (#27861) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> * Fix: correct DraftWorkflowApi.post response model (#27289) Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> * fix Version 2.0.0-beta.2: Chat annotations Api Error #25506 (#27206) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: Asuka Minato <i@asukaminato.eu.org> * fix jina reader creadential migration command (#27883) * fix agent putout the output of workflow-tool twice (#26835) (#27087) * fix jina reader transform (#27922) * fix: prevent fetch version info in enterprise edition (#27923) * fix(api): fix `VariablePool.get` adding unexpected keys to variable_dictionary (#26767) Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * refactor: implement tenant self queue for rag tasks (#27559) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> * fix: bump brotli to 1.2.0 resloved CVE-2025-6176 (#27950) Signed-off-by: kenwoodjw <blackxin55+@gmail.com> --------- Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com> Signed-off-by: kenwoodjw <blackxin55+@gmail.com> Co-authored-by: aka James4u <smart.jamesjin@gmail.com> Co-authored-by: Novice <novice12185727@gmail.com> Co-authored-by: yangzheli <43645580+yangzheli@users.noreply.github.com> Co-authored-by: Elliott <105957288+Elliott-byte@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: johnny0120 <johnny0120@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: Gritty_dev <101377478+codomposer@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: wangjifeng <163279492+kk-wangjifeng@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Boris Polonsky <BorisPolonsky@users.noreply.github.com> Co-authored-by: Yongtao Huang <yongtaoh2022@gmail.com> Co-authored-by: Cursx <33718736+Cursx@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Asuka Minato <i@asukaminato.eu.org> Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com> Co-authored-by: red_sun <56100962+redSun64@users.noreply.github.com> Co-authored-by: NFish <douxc512@gmail.com> Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: hj24 <huangjian@dify.ai> Co-authored-by: kenwoodjw <blackxin55+@gmail.com>
169 lines
6.5 KiB
Python
169 lines
6.5 KiB
Python
import logging
|
|
import time
|
|
from collections.abc import Callable, Sequence
|
|
|
|
import click
|
|
from celery import shared_task
|
|
|
|
from configs import dify_config
|
|
from core.entities.document_task import DocumentTask
|
|
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
|
from core.rag.pipeline.queue import TenantIsolatedTaskQueue
|
|
from enums.cloud_plan import CloudPlan
|
|
from extensions.ext_database import db
|
|
from libs.datetime_utils import naive_utc_now
|
|
from models.dataset import Dataset, Document
|
|
from services.feature_service import FeatureService
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(queue="dataset")
|
|
def document_indexing_task(dataset_id: str, document_ids: list):
|
|
"""
|
|
Async process document
|
|
:param dataset_id:
|
|
:param document_ids:
|
|
|
|
.. warning:: TO BE DEPRECATED
|
|
This function will be deprecated and removed in a future version.
|
|
Use normal_document_indexing_task or priority_document_indexing_task instead.
|
|
|
|
Usage: document_indexing_task.delay(dataset_id, document_ids)
|
|
"""
|
|
logger.warning("document indexing legacy mode received: %s - %s", dataset_id, document_ids)
|
|
_document_indexing(dataset_id, document_ids)
|
|
|
|
|
|
def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
|
|
"""
|
|
Process document for tasks
|
|
:param dataset_id:
|
|
:param document_ids:
|
|
|
|
Usage: _document_indexing(dataset_id, document_ids)
|
|
"""
|
|
documents = []
|
|
start_at = time.perf_counter()
|
|
|
|
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
|
if not dataset:
|
|
logger.info(click.style(f"Dataset is not found: {dataset_id}", fg="yellow"))
|
|
db.session.close()
|
|
return
|
|
# check document limit
|
|
features = FeatureService.get_features(dataset.tenant_id)
|
|
try:
|
|
if features.billing.enabled:
|
|
vector_space = features.vector_space
|
|
count = len(document_ids)
|
|
batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT)
|
|
if features.billing.subscription.plan == CloudPlan.SANDBOX and count > 1:
|
|
raise ValueError("Your current plan does not support batch upload, please upgrade your plan.")
|
|
if count > batch_upload_limit:
|
|
raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.")
|
|
if 0 < vector_space.limit <= vector_space.size:
|
|
raise ValueError(
|
|
"Your total number of documents plus the number of uploads have over the limit of "
|
|
"your subscription."
|
|
)
|
|
except Exception as e:
|
|
for document_id in document_ids:
|
|
document = (
|
|
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
|
)
|
|
if document:
|
|
document.indexing_status = "error"
|
|
document.error = str(e)
|
|
document.stopped_at = naive_utc_now()
|
|
db.session.add(document)
|
|
db.session.commit()
|
|
db.session.close()
|
|
return
|
|
|
|
for document_id in document_ids:
|
|
logger.info(click.style(f"Start process document: {document_id}", fg="green"))
|
|
|
|
document = (
|
|
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
|
)
|
|
|
|
if document:
|
|
document.indexing_status = "parsing"
|
|
document.processing_started_at = naive_utc_now()
|
|
documents.append(document)
|
|
db.session.add(document)
|
|
db.session.commit()
|
|
|
|
try:
|
|
indexing_runner = IndexingRunner()
|
|
indexing_runner.run(documents)
|
|
end_at = time.perf_counter()
|
|
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
|
except DocumentIsPausedError as ex:
|
|
logger.info(click.style(str(ex), fg="yellow"))
|
|
except Exception:
|
|
logger.exception("Document indexing task failed, dataset_id: %s", dataset_id)
|
|
finally:
|
|
db.session.close()
|
|
|
|
|
|
def _document_indexing_with_tenant_queue(
|
|
tenant_id: str, dataset_id: str, document_ids: Sequence[str], task_func: Callable[[str, str, Sequence[str]], None]
|
|
):
|
|
try:
|
|
_document_indexing(dataset_id, document_ids)
|
|
except Exception:
|
|
logger.exception("Error processing document indexing %s for tenant %s: %s", dataset_id, tenant_id)
|
|
finally:
|
|
tenant_isolated_task_queue = TenantIsolatedTaskQueue(tenant_id, "document_indexing")
|
|
|
|
# Check if there are waiting tasks in the queue
|
|
# Use rpop to get the next task from the queue (FIFO order)
|
|
next_tasks = tenant_isolated_task_queue.pull_tasks(count=dify_config.TENANT_ISOLATED_TASK_CONCURRENCY)
|
|
|
|
logger.info("document indexing tenant isolation queue next tasks: %s", next_tasks)
|
|
|
|
if next_tasks:
|
|
for next_task in next_tasks:
|
|
document_task = DocumentTask(**next_task)
|
|
# Process the next waiting task
|
|
# Keep the flag set to indicate a task is running
|
|
tenant_isolated_task_queue.set_task_waiting_time()
|
|
task_func.delay( # type: ignore
|
|
tenant_id=document_task.tenant_id,
|
|
dataset_id=document_task.dataset_id,
|
|
document_ids=document_task.document_ids,
|
|
)
|
|
else:
|
|
# No more waiting tasks, clear the flag
|
|
tenant_isolated_task_queue.delete_task_key()
|
|
|
|
|
|
@shared_task(queue="dataset")
|
|
def normal_document_indexing_task(tenant_id: str, dataset_id: str, document_ids: Sequence[str]):
|
|
"""
|
|
Async process document
|
|
:param tenant_id:
|
|
:param dataset_id:
|
|
:param document_ids:
|
|
|
|
Usage: normal_document_indexing_task.delay(tenant_id, dataset_id, document_ids)
|
|
"""
|
|
logger.info("normal document indexing task received: %s - %s - %s", tenant_id, dataset_id, document_ids)
|
|
_document_indexing_with_tenant_queue(tenant_id, dataset_id, document_ids, normal_document_indexing_task)
|
|
|
|
|
|
@shared_task(queue="priority_dataset")
|
|
def priority_document_indexing_task(tenant_id: str, dataset_id: str, document_ids: Sequence[str]):
|
|
"""
|
|
Priority async process document
|
|
:param tenant_id:
|
|
:param dataset_id:
|
|
:param document_ids:
|
|
|
|
Usage: priority_document_indexing_task.delay(tenant_id, dataset_id, document_ids)
|
|
"""
|
|
logger.info("priority document indexing task received: %s - %s - %s", tenant_id, dataset_id, document_ids)
|
|
_document_indexing_with_tenant_queue(tenant_id, dataset_id, document_ids, priority_document_indexing_task)
|