chore: add ast-grep rule to convert Optional[T] to T | None (#25560)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
-LAN-
2025-09-15 13:06:33 +08:00
committed by GitHub
parent 2e44ebe98d
commit bab4975809
394 changed files with 2555 additions and 2792 deletions

View File

@@ -1,5 +1,5 @@
from collections.abc import Generator
from typing import Any, Optional
from typing import Any
from core.agent.entities import AgentInvokeMessage
from core.plugin.entities.plugin import GenericProviderID
@@ -82,10 +82,10 @@ class PluginAgentClient(BasePluginClient):
agent_provider: str,
agent_strategy: str,
agent_params: dict[str, Any],
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
context: Optional[PluginInvokeContext] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
context: PluginInvokeContext | None = None,
) -> Generator[AgentInvokeMessage, None, None]:
"""
Invoke the agent with the given tenant, user, plugin, provider, name and parameters.

View File

@@ -1,6 +1,6 @@
import binascii
from collections.abc import Generator, Sequence
from typing import IO, Optional
from typing import IO
from core.model_runtime.entities.llm_entities import LLMResultChunk
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool
@@ -151,9 +151,9 @@ class PluginModelClient(BasePluginClient):
model: str,
credentials: dict,
prompt_messages: list[PromptMessage],
model_parameters: Optional[dict] = None,
tools: Optional[list[PromptMessageTool]] = None,
stop: Optional[list[str]] = None,
model_parameters: dict | None = None,
tools: list[PromptMessageTool] | None = None,
stop: list[str] | None = None,
stream: bool = True,
) -> Generator[LLMResultChunk, None, None]:
"""
@@ -200,7 +200,7 @@ class PluginModelClient(BasePluginClient):
model: str,
credentials: dict,
prompt_messages: list[PromptMessage],
tools: Optional[list[PromptMessageTool]] = None,
tools: list[PromptMessageTool] | None = None,
) -> int:
"""
Get number of tokens for llm
@@ -325,8 +325,8 @@ class PluginModelClient(BasePluginClient):
credentials: dict,
query: str,
docs: list[str],
score_threshold: Optional[float] = None,
top_n: Optional[int] = None,
score_threshold: float | None = None,
top_n: int | None = None,
) -> RerankResult:
"""
Invoke rerank
@@ -414,7 +414,7 @@ class PluginModelClient(BasePluginClient):
provider: str,
model: str,
credentials: dict,
language: Optional[str] = None,
language: str | None = None,
):
"""
Get tts model voices

View File

@@ -1,5 +1,5 @@
from collections.abc import Generator
from typing import Any, Optional
from typing import Any
from pydantic import BaseModel
@@ -81,9 +81,9 @@ class PluginToolManager(BasePluginClient):
credentials: dict[str, Any],
credential_type: CredentialType,
tool_parameters: dict[str, Any],
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> Generator[ToolInvokeMessage, None, None]:
"""
Invoke the tool with the given tenant, user, plugin, provider, name, credentials and parameters.
@@ -153,9 +153,9 @@ class PluginToolManager(BasePluginClient):
provider: str,
credentials: dict[str, Any],
tool: str,
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> list[ToolParameter]:
"""
get the runtime parameters of the tool