mirror of
https://github.com/langgenius/dify.git
synced 2026-03-27 20:00:35 -04:00
48 lines
1.1 KiB
Python
48 lines
1.1 KiB
Python
from typing import Literal
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from graphon.model_runtime.entities.message_entities import PromptMessageRole
|
|
|
|
|
|
class ChatModelMessage(BaseModel):
|
|
"""Graph-owned chat prompt template message."""
|
|
|
|
text: str
|
|
role: PromptMessageRole
|
|
edition_type: Literal["basic", "jinja2"] | None = None
|
|
|
|
|
|
class CompletionModelPromptTemplate(BaseModel):
|
|
"""Graph-owned completion prompt template."""
|
|
|
|
text: str
|
|
edition_type: Literal["basic", "jinja2"] | None = None
|
|
|
|
|
|
class MemoryConfig(BaseModel):
|
|
"""Graph-owned memory configuration for prompt assembly."""
|
|
|
|
class RolePrefix(BaseModel):
|
|
"""Role labels used when serializing completion-model histories."""
|
|
|
|
user: str
|
|
assistant: str
|
|
|
|
class WindowConfig(BaseModel):
|
|
"""History windowing controls."""
|
|
|
|
enabled: bool
|
|
size: int | None = None
|
|
|
|
role_prefix: RolePrefix | None = None
|
|
window: WindowConfig
|
|
query_prompt_template: str | None = None
|
|
|
|
|
|
__all__ = [
|
|
"ChatModelMessage",
|
|
"CompletionModelPromptTemplate",
|
|
"MemoryConfig",
|
|
]
|