chore: add ast-grep rule to convert Optional[T] to T | None (#25560)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
-LAN-
2025-09-15 13:06:33 +08:00
committed by GitHub
parent 2e44ebe98d
commit bab4975809
394 changed files with 2555 additions and 2792 deletions

View File

@@ -1,12 +1,10 @@
from typing import Optional
from core.app.app_config.entities import SensitiveWordAvoidanceEntity
from core.moderation.factory import ModerationFactory
class SensitiveWordAvoidanceConfigManager:
@classmethod
def convert(cls, config: dict) -> Optional[SensitiveWordAvoidanceEntity]:
def convert(cls, config: dict) -> SensitiveWordAvoidanceEntity | None:
sensitive_word_avoidance_dict = config.get("sensitive_word_avoidance")
if not sensitive_word_avoidance_dict:
return None

View File

@@ -1,12 +1,10 @@
from typing import Optional
from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity
from core.agent.prompt.template import REACT_PROMPT_TEMPLATES
class AgentConfigManager:
@classmethod
def convert(cls, config: dict) -> Optional[AgentEntity]:
def convert(cls, config: dict) -> AgentEntity | None:
"""
Convert model config to model config

View File

@@ -1,5 +1,4 @@
import uuid
from typing import Optional
from core.app.app_config.entities import (
DatasetEntity,
@@ -14,7 +13,7 @@ from services.dataset_service import DatasetService
class DatasetConfigManager:
@classmethod
def convert(cls, config: dict) -> Optional[DatasetEntity]:
def convert(cls, config: dict) -> DatasetEntity | None:
"""
Convert model config to model config

View File

@@ -1,6 +1,6 @@
from collections.abc import Sequence
from enum import StrEnum, auto
from typing import Any, Literal, Optional
from typing import Any, Literal
from pydantic import BaseModel, Field, field_validator
@@ -17,7 +17,7 @@ class ModelConfigEntity(BaseModel):
provider: str
model: str
mode: Optional[str] = None
mode: str | None = None
parameters: dict[str, Any] = Field(default_factory=dict)
stop: list[str] = Field(default_factory=list)
@@ -53,7 +53,7 @@ class AdvancedCompletionPromptTemplateEntity(BaseModel):
assistant: str
prompt: str
role_prefix: Optional[RolePrefixEntity] = None
role_prefix: RolePrefixEntity | None = None
class PromptTemplateEntity(BaseModel):
@@ -84,9 +84,9 @@ class PromptTemplateEntity(BaseModel):
raise ValueError(f"invalid prompt type value {value}")
prompt_type: PromptType
simple_prompt_template: Optional[str] = None
advanced_chat_prompt_template: Optional[AdvancedChatPromptTemplateEntity] = None
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
simple_prompt_template: str | None = None
advanced_chat_prompt_template: AdvancedChatPromptTemplateEntity | None = None
advanced_completion_prompt_template: AdvancedCompletionPromptTemplateEntity | None = None
class VariableEntityType(StrEnum):
@@ -112,7 +112,7 @@ class VariableEntity(BaseModel):
type: VariableEntityType
required: bool = False
hide: bool = False
max_length: Optional[int] = None
max_length: int | None = None
options: Sequence[str] = Field(default_factory=list)
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
allowed_file_extensions: Sequence[str] = Field(default_factory=list)
@@ -186,8 +186,8 @@ class MetadataFilteringCondition(BaseModel):
Metadata Filtering Condition.
"""
logical_operator: Optional[Literal["and", "or"]] = "and"
conditions: Optional[list[Condition]] = Field(default=None, deprecated=True)
logical_operator: Literal["and", "or"] | None = "and"
conditions: list[Condition] | None = Field(default=None, deprecated=True)
class DatasetRetrieveConfigEntity(BaseModel):
@@ -217,18 +217,18 @@ class DatasetRetrieveConfigEntity(BaseModel):
return mode
raise ValueError(f"invalid retrieve strategy value {value}")
query_variable: Optional[str] = None # Only when app mode is completion
query_variable: str | None = None # Only when app mode is completion
retrieve_strategy: RetrieveStrategy
top_k: Optional[int] = None
score_threshold: Optional[float] = 0.0
rerank_mode: Optional[str] = "reranking_model"
reranking_model: Optional[dict] = None
weights: Optional[dict] = None
reranking_enabled: Optional[bool] = True
metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled"
metadata_model_config: Optional[ModelConfig] = None
metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None
top_k: int | None = None
score_threshold: float | None = 0.0
rerank_mode: str | None = "reranking_model"
reranking_model: dict | None = None
weights: dict | None = None
reranking_enabled: bool | None = True
metadata_filtering_mode: Literal["disabled", "automatic", "manual"] | None = "disabled"
metadata_model_config: ModelConfig | None = None
metadata_filtering_conditions: MetadataFilteringCondition | None = None
class DatasetEntity(BaseModel):
@@ -255,8 +255,8 @@ class TextToSpeechEntity(BaseModel):
"""
enabled: bool
voice: Optional[str] = None
language: Optional[str] = None
voice: str | None = None
language: str | None = None
class TracingConfigEntity(BaseModel):
@@ -269,15 +269,15 @@ class TracingConfigEntity(BaseModel):
class AppAdditionalFeatures(BaseModel):
file_upload: Optional[FileUploadConfig] = None
opening_statement: Optional[str] = None
file_upload: FileUploadConfig | None = None
opening_statement: str | None = None
suggested_questions: list[str] = []
suggested_questions_after_answer: bool = False
show_retrieve_source: bool = False
more_like_this: bool = False
speech_to_text: bool = False
text_to_speech: Optional[TextToSpeechEntity] = None
trace_config: Optional[TracingConfigEntity] = None
text_to_speech: TextToSpeechEntity | None = None
trace_config: TracingConfigEntity | None = None
class AppConfig(BaseModel):
@@ -290,7 +290,7 @@ class AppConfig(BaseModel):
app_mode: AppMode
additional_features: AppAdditionalFeatures
variables: list[VariableEntity] = []
sensitive_word_avoidance: Optional[SensitiveWordAvoidanceEntity] = None
sensitive_word_avoidance: SensitiveWordAvoidanceEntity | None = None
class EasyUIBasedAppModelConfigFrom(StrEnum):
@@ -313,7 +313,7 @@ class EasyUIBasedAppConfig(AppConfig):
app_model_config_dict: dict
model: ModelConfigEntity
prompt_template: PromptTemplateEntity
dataset: Optional[DatasetEntity] = None
dataset: DatasetEntity | None = None
external_data_variables: list[ExternalDataVariableEntity] = []

View File

@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping
from typing import Any, Literal, Optional, Union, overload
from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
@@ -390,7 +390,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
application_generate_entity: AdvancedChatAppGenerateEntity,
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
conversation: Optional[Conversation] = None,
conversation: Conversation | None = None,
stream: bool = True,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:

View File

@@ -1,6 +1,6 @@
import logging
from collections.abc import Mapping
from typing import Any, Optional, cast
from typing import Any, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -231,7 +231,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
def query_app_annotations_to_reply(
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
) -> Optional[MessageAnnotation]:
) -> MessageAnnotation | None:
"""
Query app annotations to reply
:param app_record: app record

View File

@@ -4,7 +4,7 @@ import time
from collections.abc import Callable, Generator, Mapping
from contextlib import contextmanager
from threading import Thread
from typing import Any, Optional, Union
from typing import Any, Union
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -233,7 +233,7 @@ class AdvancedChatAppGenerateTaskPipeline:
return None
def _wrapper_process_stream_response(
self, trace_manager: Optional[TraceQueueManager] = None
self, trace_manager: TraceQueueManager | None = None
) -> Generator[StreamResponse, None, None]:
tts_publisher = None
task_id = self._application_generate_entity.task_id
@@ -294,7 +294,7 @@ class AdvancedChatAppGenerateTaskPipeline:
if not self._workflow_run_id:
raise ValueError("workflow run not initialized.")
def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState:
def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState:
"""Fluent validation for graph runtime state."""
if not graph_runtime_state:
raise ValueError("graph runtime state not initialized.")
@@ -411,8 +411,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueTextChunkEvent,
*,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle text chunk events."""
@@ -538,8 +538,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowSucceededEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow succeeded events."""
@@ -569,8 +569,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowPartialSuccessEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow partial success events."""
@@ -601,8 +601,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueWorkflowFailedEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow failed events."""
@@ -636,8 +636,8 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueStopEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle stop events."""
@@ -677,7 +677,7 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: QueueAdvancedChatMessageEndEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
graph_runtime_state: GraphRuntimeState | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle advanced chat message end events."""
@@ -775,10 +775,10 @@ class AdvancedChatAppGenerateTaskPipeline:
self,
event: Any,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
graph_runtime_state: GraphRuntimeState | None = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
) -> Generator[StreamResponse, None, None]:
"""Dispatch events using elegant pattern matching."""
handlers = self._get_event_handlers()
@@ -830,15 +830,15 @@ class AdvancedChatAppGenerateTaskPipeline:
def _process_stream_response(
self,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
) -> Generator[StreamResponse, None, None]:
"""
Process stream response using elegant Fluent Python patterns.
Maintains exact same functionality as original 57-if-statement version.
"""
# Initialize graph runtime state
graph_runtime_state: Optional[GraphRuntimeState] = None
graph_runtime_state: GraphRuntimeState | None = None
for queue_message in self._base_task_pipeline.queue_manager.listen():
event = queue_message.event
@@ -888,7 +888,7 @@ class AdvancedChatAppGenerateTaskPipeline:
if self._conversation_name_generate_thread:
self._conversation_name_generate_thread.join()
def _save_message(self, *, session: Session, graph_runtime_state: Optional[GraphRuntimeState] = None):
def _save_message(self, *, session: Session, graph_runtime_state: GraphRuntimeState | None = None):
message = self._get_message(session=session)
# If there are assistant files, remove markdown image links from answer

View File

@@ -1,6 +1,6 @@
import uuid
from collections.abc import Mapping
from typing import Any, Optional, cast
from typing import Any, cast
from core.agent.entities import AgentEntity
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
@@ -30,7 +30,7 @@ class AgentChatAppConfig(EasyUIBasedAppConfig):
Agent Chatbot App Config Entity.
"""
agent: Optional[AgentEntity] = None
agent: AgentEntity | None = None
class AgentChatAppConfigManager(BaseAppConfigManager):
@@ -39,8 +39,8 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
cls,
app_model: App,
app_model_config: AppModelConfig,
conversation: Optional[Conversation] = None,
override_config_dict: Optional[dict] = None,
conversation: Conversation | None = None,
override_config_dict: dict | None = None,
) -> AgentChatAppConfig:
"""
Convert app model config to agent chat app config

View File

@@ -1,5 +1,5 @@
from collections.abc import Generator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Optional, Union, final
from typing import TYPE_CHECKING, Any, Union, final
from sqlalchemy.orm import Session
@@ -24,7 +24,7 @@ class BaseAppGenerator:
def _prepare_user_inputs(
self,
*,
user_inputs: Optional[Mapping[str, Any]],
user_inputs: Mapping[str, Any] | None,
variables: Sequence["VariableEntity"],
tenant_id: str,
strict_type_validation: bool = False,

View File

@@ -2,7 +2,7 @@ import queue
import time
from abc import abstractmethod
from enum import IntEnum, auto
from typing import Any, Optional
from typing import Any
from sqlalchemy.orm import DeclarativeMeta
@@ -116,7 +116,7 @@ class AppQueueManager:
Set task stop flag
:return:
"""
result: Optional[Any] = redis_client.get(cls._generate_task_belong_cache_key(task_id))
result: Any | None = redis_client.get(cls._generate_task_belong_cache_key(task_id))
if result is None:
return

View File

@@ -1,7 +1,7 @@
import logging
import time
from collections.abc import Generator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Optional, Union
from typing import TYPE_CHECKING, Any, Union
from core.app.app_config.entities import ExternalDataVariableEntity, PromptTemplateEntity
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
@@ -82,11 +82,11 @@ class AppRunner:
prompt_template_entity: PromptTemplateEntity,
inputs: Mapping[str, str],
files: Sequence["File"],
query: Optional[str] = None,
context: Optional[str] = None,
memory: Optional[TokenBufferMemory] = None,
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
) -> tuple[list[PromptMessage], Optional[list[str]]]:
query: str | None = None,
context: str | None = None,
memory: TokenBufferMemory | None = None,
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
) -> tuple[list[PromptMessage], list[str] | None]:
"""
Organize prompt messages
:param context:
@@ -161,7 +161,7 @@ class AppRunner:
prompt_messages: list,
text: str,
stream: bool,
usage: Optional[LLMUsage] = None,
usage: LLMUsage | None = None,
):
"""
Direct output
@@ -375,7 +375,7 @@ class AppRunner:
def query_app_annotations_to_reply(
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
) -> Optional[MessageAnnotation]:
) -> MessageAnnotation | None:
"""
Query app annotations to reply
:param app_record: app record

View File

@@ -1,5 +1,3 @@
from typing import Optional
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
@@ -32,8 +30,8 @@ class ChatAppConfigManager(BaseAppConfigManager):
cls,
app_model: App,
app_model_config: AppModelConfig,
conversation: Optional[Conversation] = None,
override_config_dict: Optional[dict] = None,
conversation: Conversation | None = None,
override_config_dict: dict | None = None,
) -> ChatAppConfig:
"""
Convert app model config to chat app config

View File

@@ -1,7 +1,7 @@
import time
from collections.abc import Mapping, Sequence
from datetime import UTC, datetime
from typing import Any, Optional, Union, cast
from typing import Any, Union, cast
from sqlalchemy.orm import Session
@@ -140,7 +140,7 @@ class WorkflowResponseConverter:
event: QueueNodeStartedEvent,
task_id: str,
workflow_node_execution: WorkflowNodeExecution,
) -> Optional[NodeStartStreamResponse]:
) -> NodeStartStreamResponse | None:
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
return None
if not workflow_node_execution.workflow_execution_id:
@@ -190,7 +190,7 @@ class WorkflowResponseConverter:
| QueueNodeExceptionEvent,
task_id: str,
workflow_node_execution: WorkflowNodeExecution,
) -> Optional[NodeFinishStreamResponse]:
) -> NodeFinishStreamResponse | None:
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
return None
if not workflow_node_execution.workflow_execution_id:
@@ -235,7 +235,7 @@ class WorkflowResponseConverter:
event: QueueNodeRetryEvent,
task_id: str,
workflow_node_execution: WorkflowNodeExecution,
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
) -> Union[NodeRetryStreamResponse, NodeFinishStreamResponse] | None:
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
return None
if not workflow_node_execution.workflow_execution_id:

View File

@@ -1,5 +1,3 @@
from typing import Optional
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
@@ -24,7 +22,7 @@ class CompletionAppConfig(EasyUIBasedAppConfig):
class CompletionAppConfigManager(BaseAppConfigManager):
@classmethod
def get_app_config(
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: Optional[dict] = None
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: dict | None = None
) -> CompletionAppConfig:
"""
Convert app model config to completion app config

View File

@@ -1,7 +1,7 @@
import json
import logging
from collections.abc import Generator
from typing import Optional, Union, cast
from typing import Union, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -84,7 +84,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
logger.exception("Failed to handle response, conversation_id: %s", conversation.id)
raise e
def _get_app_model_config(self, app_model: App, conversation: Optional[Conversation] = None) -> AppModelConfig:
def _get_app_model_config(self, app_model: App, conversation: Conversation | None = None) -> AppModelConfig:
if conversation:
stmt = select(AppModelConfig).where(
AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id
@@ -112,7 +112,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
AgentChatAppGenerateEntity,
AdvancedChatAppGenerateEntity,
],
conversation: Optional[Conversation] = None,
conversation: Conversation | None = None,
) -> tuple[Conversation, Message]:
"""
Initialize generate records

View File

@@ -3,7 +3,7 @@ import logging
import threading
import uuid
from collections.abc import Generator, Mapping, Sequence
from typing import Any, Literal, Optional, Union, overload
from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
@@ -53,7 +53,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: Literal[True],
call_depth: int,
workflow_thread_pool_id: Optional[str],
workflow_thread_pool_id: str | None,
) -> Generator[Mapping | str, None, None]: ...
@overload
@@ -67,7 +67,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: Literal[False],
call_depth: int,
workflow_thread_pool_id: Optional[str],
workflow_thread_pool_id: str | None,
) -> Mapping[str, Any]: ...
@overload
@@ -81,7 +81,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: bool,
call_depth: int,
workflow_thread_pool_id: Optional[str],
workflow_thread_pool_id: str | None,
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ...
def generate(
@@ -94,7 +94,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
invoke_from: InvokeFrom,
streaming: bool = True,
call_depth: int = 0,
workflow_thread_pool_id: Optional[str] = None,
workflow_thread_pool_id: str | None = None,
) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]:
files: Sequence[Mapping[str, Any]] = args.get("files") or []
@@ -200,7 +200,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
workflow_execution_repository: WorkflowExecutionRepository,
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
streaming: bool = True,
workflow_thread_pool_id: Optional[str] = None,
workflow_thread_pool_id: str | None = None,
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]:
"""
@@ -434,7 +434,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
queue_manager: AppQueueManager,
context: contextvars.Context,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
workflow_thread_pool_id: str | None = None,
):
"""
Generate worker in a new thread.

View File

@@ -1,5 +1,5 @@
import logging
from typing import Optional, cast
from typing import cast
from configs import dify_config
from core.app.apps.base_app_queue_manager import AppQueueManager
@@ -31,7 +31,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner):
application_generate_entity: WorkflowAppGenerateEntity,
queue_manager: AppQueueManager,
variable_loader: VariableLoader,
workflow_thread_pool_id: Optional[str] = None,
workflow_thread_pool_id: str | None = None,
workflow: Workflow,
system_user_id: str,
):

View File

@@ -2,7 +2,7 @@ import logging
import time
from collections.abc import Callable, Generator
from contextlib import contextmanager
from typing import Any, Optional, Union
from typing import Any, Union
from sqlalchemy.orm import Session
@@ -206,7 +206,7 @@ class WorkflowAppGenerateTaskPipeline:
return None
def _wrapper_process_stream_response(
self, trace_manager: Optional[TraceQueueManager] = None
self, trace_manager: TraceQueueManager | None = None
) -> Generator[StreamResponse, None, None]:
tts_publisher = None
task_id = self._application_generate_entity.task_id
@@ -268,7 +268,7 @@ class WorkflowAppGenerateTaskPipeline:
if not self._workflow_run_id:
raise ValueError("workflow run not initialized.")
def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState:
def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState:
"""Fluent validation for graph runtime state."""
if not graph_runtime_state:
raise ValueError("graph runtime state not initialized.")
@@ -474,8 +474,8 @@ class WorkflowAppGenerateTaskPipeline:
self,
event: QueueWorkflowSucceededEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow succeeded events."""
@@ -508,8 +508,8 @@ class WorkflowAppGenerateTaskPipeline:
self,
event: QueueWorkflowPartialSuccessEvent,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow partial success events."""
@@ -543,8 +543,8 @@ class WorkflowAppGenerateTaskPipeline:
self,
event: Union[QueueWorkflowFailedEvent, QueueStopEvent],
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
trace_manager: Optional[TraceQueueManager] = None,
graph_runtime_state: GraphRuntimeState | None = None,
trace_manager: TraceQueueManager | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle workflow failed and stop events."""
@@ -581,8 +581,8 @@ class WorkflowAppGenerateTaskPipeline:
self,
event: QueueTextChunkEvent,
*,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
**kwargs,
) -> Generator[StreamResponse, None, None]:
"""Handle text chunk events."""
@@ -635,10 +635,10 @@ class WorkflowAppGenerateTaskPipeline:
self,
event: Any,
*,
graph_runtime_state: Optional[GraphRuntimeState] = None,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
graph_runtime_state: GraphRuntimeState | None = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
) -> Generator[StreamResponse, None, None]:
"""Dispatch events using elegant pattern matching."""
handlers = self._get_event_handlers()
@@ -701,8 +701,8 @@ class WorkflowAppGenerateTaskPipeline:
def _process_stream_response(
self,
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
trace_manager: Optional[TraceQueueManager] = None,
tts_publisher: AppGeneratorTTSPublisher | None = None,
trace_manager: TraceQueueManager | None = None,
) -> Generator[StreamResponse, None, None]:
"""
Process stream response using elegant Fluent Python patterns.
@@ -769,7 +769,7 @@ class WorkflowAppGenerateTaskPipeline:
session.commit()
def _text_chunk_to_stream_response(
self, text: str, from_variable_selector: Optional[list[str]] = None
self, text: str, from_variable_selector: list[str] | None = None
) -> TextChunkStreamResponse:
"""
Handle completed event.

View File

@@ -1,6 +1,6 @@
from collections.abc import Mapping, Sequence
from enum import StrEnum
from typing import Any, Optional
from typing import Any
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
@@ -96,7 +96,7 @@ class AppGenerateEntity(BaseModel):
# app config
app_config: Any = None
file_upload_config: Optional[FileUploadConfig] = None
file_upload_config: FileUploadConfig | None = None
inputs: Mapping[str, Any]
files: Sequence[File]
@@ -114,7 +114,7 @@ class AppGenerateEntity(BaseModel):
# tracing instance
# Using Any to avoid circular import with TraceQueueManager
trace_manager: Optional[Any] = None
trace_manager: Any | None = None
class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
@@ -126,7 +126,7 @@ class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
app_config: EasyUIBasedAppConfig = None # type: ignore
model_conf: ModelConfigWithCredentialsEntity
query: Optional[str] = None
query: str | None = None
# pydantic configs
model_config = ConfigDict(protected_namespaces=())
@@ -137,8 +137,8 @@ class ConversationAppGenerateEntity(AppGenerateEntity):
Base entity for conversation-based app generation.
"""
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = Field(
conversation_id: str | None = None
parent_message_id: str | None = Field(
default=None,
description=(
"Starting from v0.9.0, parent_message_id is used to support message regeneration for internal chat API."
@@ -188,7 +188,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity):
# app config
app_config: WorkflowUIBasedAppConfig = None # type: ignore
workflow_run_id: Optional[str] = None
workflow_run_id: str | None = None
query: str
class SingleIterationRunEntity(BaseModel):
@@ -199,7 +199,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity):
node_id: str
inputs: Mapping
single_iteration_run: Optional[SingleIterationRunEntity] = None
single_iteration_run: SingleIterationRunEntity | None = None
class SingleLoopRunEntity(BaseModel):
"""
@@ -209,7 +209,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity):
node_id: str
inputs: Mapping
single_loop_run: Optional[SingleLoopRunEntity] = None
single_loop_run: SingleLoopRunEntity | None = None
class WorkflowAppGenerateEntity(AppGenerateEntity):
@@ -229,7 +229,7 @@ class WorkflowAppGenerateEntity(AppGenerateEntity):
node_id: str
inputs: dict
single_iteration_run: Optional[SingleIterationRunEntity] = None
single_iteration_run: SingleIterationRunEntity | None = None
class SingleLoopRunEntity(BaseModel):
"""
@@ -239,4 +239,4 @@ class WorkflowAppGenerateEntity(AppGenerateEntity):
node_id: str
inputs: dict
single_loop_run: Optional[SingleLoopRunEntity] = None
single_loop_run: SingleLoopRunEntity | None = None

View File

@@ -1,7 +1,7 @@
from collections.abc import Mapping, Sequence
from datetime import datetime
from enum import StrEnum, auto
from typing import Any, Optional
from typing import Any
from pydantic import BaseModel
@@ -81,20 +81,20 @@ class QueueIterationStartEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
start_at: datetime
node_run_index: int
inputs: Optional[Mapping[str, Any]] = None
predecessor_node_id: Optional[str] = None
metadata: Optional[Mapping[str, Any]] = None
inputs: Mapping[str, Any] | None = None
predecessor_node_id: str | None = None
metadata: Mapping[str, Any] | None = None
class QueueIterationNextEvent(AppQueueEvent):
@@ -109,19 +109,19 @@ class QueueIterationNextEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
parallel_mode_run_id: Optional[str] = None
parallel_mode_run_id: str | None = None
"""iteration run in parallel mode run id"""
node_run_index: int
output: Optional[Any] = None # output for the current iteration
duration: Optional[float] = None
output: Any | None = None # output for the current iteration
duration: float | None = None
class QueueIterationCompletedEvent(AppQueueEvent):
@@ -135,23 +135,23 @@ class QueueIterationCompletedEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
start_at: datetime
node_run_index: int
inputs: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
metadata: Optional[Mapping[str, Any]] = None
inputs: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
metadata: Mapping[str, Any] | None = None
steps: int = 0
error: Optional[str] = None
error: str | None = None
class QueueLoopStartEvent(AppQueueEvent):
@@ -164,20 +164,20 @@ class QueueLoopStartEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
start_at: datetime
node_run_index: int
inputs: Optional[Mapping[str, Any]] = None
predecessor_node_id: Optional[str] = None
metadata: Optional[Mapping[str, Any]] = None
inputs: Mapping[str, Any] | None = None
predecessor_node_id: str | None = None
metadata: Mapping[str, Any] | None = None
class QueueLoopNextEvent(AppQueueEvent):
@@ -192,19 +192,19 @@ class QueueLoopNextEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
parallel_mode_run_id: Optional[str] = None
parallel_mode_run_id: str | None = None
"""iteration run in parallel mode run id"""
node_run_index: int
output: Optional[Any] = None # output for the current loop
duration: Optional[float] = None
output: Any | None = None # output for the current loop
duration: float | None = None
class QueueLoopCompletedEvent(AppQueueEvent):
@@ -218,23 +218,23 @@ class QueueLoopCompletedEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
start_at: datetime
node_run_index: int
inputs: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
metadata: Optional[Mapping[str, Any]] = None
inputs: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
metadata: Mapping[str, Any] | None = None
steps: int = 0
error: Optional[str] = None
error: str | None = None
class QueueTextChunkEvent(AppQueueEvent):
@@ -244,11 +244,11 @@ class QueueTextChunkEvent(AppQueueEvent):
event: QueueEvent = QueueEvent.TEXT_CHUNK
text: str
from_variable_selector: Optional[list[str]] = None
from_variable_selector: list[str] | None = None
"""from variable selector"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
@@ -285,9 +285,9 @@ class QueueRetrieverResourcesEvent(AppQueueEvent):
event: QueueEvent = QueueEvent.RETRIEVER_RESOURCES
retriever_resources: Sequence[RetrievalSourceMetadata]
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
@@ -306,7 +306,7 @@ class QueueMessageEndEvent(AppQueueEvent):
"""
event: QueueEvent = QueueEvent.MESSAGE_END
llm_result: Optional[LLMResult] = None
llm_result: LLMResult | None = None
class QueueAdvancedChatMessageEndEvent(AppQueueEvent):
@@ -332,7 +332,7 @@ class QueueWorkflowSucceededEvent(AppQueueEvent):
"""
event: QueueEvent = QueueEvent.WORKFLOW_SUCCEEDED
outputs: Optional[dict[str, Any]] = None
outputs: dict[str, Any] | None = None
class QueueWorkflowFailedEvent(AppQueueEvent):
@@ -352,7 +352,7 @@ class QueueWorkflowPartialSuccessEvent(AppQueueEvent):
event: QueueEvent = QueueEvent.WORKFLOW_PARTIAL_SUCCEEDED
exceptions_count: int
outputs: Optional[dict[str, Any]] = None
outputs: dict[str, Any] | None = None
class QueueNodeStartedEvent(AppQueueEvent):
@@ -367,23 +367,23 @@ class QueueNodeStartedEvent(AppQueueEvent):
node_type: NodeType
node_data: BaseNodeData
node_run_index: int = 1
predecessor_node_id: Optional[str] = None
parallel_id: Optional[str] = None
predecessor_node_id: str | None = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
parallel_mode_run_id: Optional[str] = None
parallel_mode_run_id: str | None = None
"""iteration run in parallel mode run id"""
agent_strategy: Optional[AgentNodeStrategyInit] = None
agent_strategy: AgentNodeStrategyInit | None = None
class QueueNodeSucceededEvent(AppQueueEvent):
@@ -397,30 +397,30 @@ class QueueNodeSucceededEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: Optional[str] = None
error: str | None = None
"""single iteration duration map"""
iteration_duration_map: Optional[dict[str, float]] = None
iteration_duration_map: dict[str, float] | None = None
"""single loop duration map"""
loop_duration_map: Optional[dict[str, float]] = None
loop_duration_map: dict[str, float] | None = None
class QueueAgentLogEvent(AppQueueEvent):
@@ -436,7 +436,7 @@ class QueueAgentLogEvent(AppQueueEvent):
error: str | None = None
status: str
data: Mapping[str, Any]
metadata: Optional[Mapping[str, Any]] = None
metadata: Mapping[str, Any] | None = None
node_id: str
@@ -445,10 +445,10 @@ class QueueNodeRetryEvent(QueueNodeStartedEvent):
event: QueueEvent = QueueEvent.RETRY
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
retry_index: int # retry index
@@ -465,24 +465,24 @@ class QueueNodeInIterationFailedEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
@@ -498,24 +498,24 @@ class QueueNodeInLoopFailedEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
@@ -531,24 +531,24 @@ class QueueNodeExceptionEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
@@ -564,24 +564,24 @@ class QueueNodeFailedEvent(AppQueueEvent):
node_id: str
node_type: NodeType
node_data: BaseNodeData
parallel_id: Optional[str] = None
parallel_id: str | None = None
"""parallel id if node is in parallel"""
parallel_start_node_id: Optional[str] = None
parallel_start_node_id: str | None = None
"""parallel start node id if node is in parallel"""
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
start_at: datetime
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
error: str
@@ -610,7 +610,7 @@ class QueueErrorEvent(AppQueueEvent):
"""
event: QueueEvent = QueueEvent.ERROR
error: Optional[Any] = None
error: Any | None = None
class QueuePingEvent(AppQueueEvent):
@@ -689,13 +689,13 @@ class QueueParallelBranchRunStartedEvent(AppQueueEvent):
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
@@ -708,13 +708,13 @@ class QueueParallelBranchRunSucceededEvent(AppQueueEvent):
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
@@ -727,12 +727,12 @@ class QueueParallelBranchRunFailedEvent(AppQueueEvent):
parallel_id: str
parallel_start_node_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_id: str | None = None
"""parent parallel id if node is in parallel"""
parent_parallel_start_node_id: Optional[str] = None
parent_parallel_start_node_id: str | None = None
"""parent parallel start node id if node is in parallel"""
in_iteration_id: Optional[str] = None
in_iteration_id: str | None = None
"""iteration id if node is in iteration"""
in_loop_id: Optional[str] = None
in_loop_id: str | None = None
"""loop id if node is in loop"""
error: str

View File

@@ -1,6 +1,6 @@
from collections.abc import Mapping, Sequence
from enum import StrEnum, auto
from typing import Any, Optional
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
@@ -110,7 +110,7 @@ class MessageStreamResponse(StreamResponse):
event: StreamEvent = StreamEvent.MESSAGE
id: str
answer: str
from_variable_selector: Optional[list[str]] = None
from_variable_selector: list[str] | None = None
class MessageAudioStreamResponse(StreamResponse):
@@ -139,7 +139,7 @@ class MessageEndStreamResponse(StreamResponse):
event: StreamEvent = StreamEvent.MESSAGE_END
id: str
metadata: dict = Field(default_factory=dict)
files: Optional[Sequence[Mapping[str, Any]]] = None
files: Sequence[Mapping[str, Any]] | None = None
class MessageFileStreamResponse(StreamResponse):
@@ -172,12 +172,12 @@ class AgentThoughtStreamResponse(StreamResponse):
event: StreamEvent = StreamEvent.AGENT_THOUGHT
id: str
position: int
thought: Optional[str] = None
observation: Optional[str] = None
tool: Optional[str] = None
tool_labels: Optional[dict] = None
tool_input: Optional[str] = None
message_files: Optional[list[str]] = None
thought: str | None = None
observation: str | None = None
tool: str | None = None
tool_labels: dict | None = None
tool_input: str | None = None
message_files: list[str] | None = None
class AgentMessageStreamResponse(StreamResponse):
@@ -223,16 +223,16 @@ class WorkflowFinishStreamResponse(StreamResponse):
id: str
workflow_id: str
status: str
outputs: Optional[Mapping[str, Any]] = None
error: Optional[str] = None
outputs: Mapping[str, Any] | None = None
error: str | None = None
elapsed_time: float
total_tokens: int
total_steps: int
created_by: Optional[dict] = None
created_by: dict | None = None
created_at: int
finished_at: int
exceptions_count: Optional[int] = 0
files: Optional[Sequence[Mapping[str, Any]]] = []
exceptions_count: int | None = 0
files: Sequence[Mapping[str, Any]] | None = []
event: StreamEvent = StreamEvent.WORKFLOW_FINISHED
workflow_run_id: str
@@ -254,18 +254,18 @@ class NodeStartStreamResponse(StreamResponse):
node_type: str
title: str
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
created_at: int
extras: dict = Field(default_factory=dict)
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
parallel_run_id: Optional[str] = None
agent_strategy: Optional[AgentNodeStrategyInit] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
parent_parallel_id: str | None = None
parent_parallel_start_node_id: str | None = None
iteration_id: str | None = None
loop_id: str | None = None
parallel_run_id: str | None = None
agent_strategy: AgentNodeStrategyInit | None = None
event: StreamEvent = StreamEvent.NODE_STARTED
workflow_run_id: str
@@ -311,23 +311,23 @@ class NodeFinishStreamResponse(StreamResponse):
node_type: str
title: str
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
status: str
error: Optional[str] = None
error: str | None = None
elapsed_time: float
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
created_at: int
finished_at: int
files: Optional[Sequence[Mapping[str, Any]]] = []
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
files: Sequence[Mapping[str, Any]] | None = []
parallel_id: str | None = None
parallel_start_node_id: str | None = None
parent_parallel_id: str | None = None
parent_parallel_start_node_id: str | None = None
iteration_id: str | None = None
loop_id: str | None = None
event: StreamEvent = StreamEvent.NODE_FINISHED
workflow_run_id: str
@@ -380,23 +380,23 @@ class NodeRetryStreamResponse(StreamResponse):
node_type: str
title: str
index: int
predecessor_node_id: Optional[str] = None
inputs: Optional[Mapping[str, Any]] = None
process_data: Optional[Mapping[str, Any]] = None
outputs: Optional[Mapping[str, Any]] = None
predecessor_node_id: str | None = None
inputs: Mapping[str, Any] | None = None
process_data: Mapping[str, Any] | None = None
outputs: Mapping[str, Any] | None = None
status: str
error: Optional[str] = None
error: str | None = None
elapsed_time: float
execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None
execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None
created_at: int
finished_at: int
files: Optional[Sequence[Mapping[str, Any]]] = []
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
files: Sequence[Mapping[str, Any]] | None = []
parallel_id: str | None = None
parallel_start_node_id: str | None = None
parent_parallel_id: str | None = None
parent_parallel_start_node_id: str | None = None
iteration_id: str | None = None
loop_id: str | None = None
retry_index: int = 0
event: StreamEvent = StreamEvent.NODE_RETRY
@@ -448,10 +448,10 @@ class ParallelBranchStartStreamResponse(StreamResponse):
parallel_id: str
parallel_branch_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
parent_parallel_id: str | None = None
parent_parallel_start_node_id: str | None = None
iteration_id: str | None = None
loop_id: str | None = None
created_at: int
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_STARTED
@@ -471,12 +471,12 @@ class ParallelBranchFinishedStreamResponse(StreamResponse):
parallel_id: str
parallel_branch_id: str
parent_parallel_id: Optional[str] = None
parent_parallel_start_node_id: Optional[str] = None
iteration_id: Optional[str] = None
loop_id: Optional[str] = None
parent_parallel_id: str | None = None
parent_parallel_start_node_id: str | None = None
iteration_id: str | None = None
loop_id: str | None = None
status: str
error: Optional[str] = None
error: str | None = None
created_at: int
event: StreamEvent = StreamEvent.PARALLEL_BRANCH_FINISHED
@@ -502,8 +502,8 @@ class IterationNodeStartStreamResponse(StreamResponse):
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
event: StreamEvent = StreamEvent.ITERATION_STARTED
workflow_run_id: str
@@ -526,12 +526,12 @@ class IterationNodeNextStreamResponse(StreamResponse):
title: str
index: int
created_at: int
pre_iteration_output: Optional[Any] = None
pre_iteration_output: Any | None = None
extras: dict = Field(default_factory=dict)
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_mode_run_id: Optional[str] = None
duration: Optional[float] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
parallel_mode_run_id: str | None = None
duration: float | None = None
event: StreamEvent = StreamEvent.ITERATION_NEXT
workflow_run_id: str
@@ -552,19 +552,19 @@ class IterationNodeCompletedStreamResponse(StreamResponse):
node_id: str
node_type: str
title: str
outputs: Optional[Mapping] = None
outputs: Mapping | None = None
created_at: int
extras: Optional[dict] = None
inputs: Optional[Mapping] = None
extras: dict | None = None
inputs: Mapping | None = None
status: WorkflowNodeExecutionStatus
error: Optional[str] = None
error: str | None = None
elapsed_time: float
total_tokens: int
execution_metadata: Optional[Mapping] = None
execution_metadata: Mapping | None = None
finished_at: int
steps: int
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
event: StreamEvent = StreamEvent.ITERATION_COMPLETED
workflow_run_id: str
@@ -589,8 +589,8 @@ class LoopNodeStartStreamResponse(StreamResponse):
extras: dict = Field(default_factory=dict)
metadata: Mapping = {}
inputs: Mapping = {}
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
event: StreamEvent = StreamEvent.LOOP_STARTED
workflow_run_id: str
@@ -613,12 +613,12 @@ class LoopNodeNextStreamResponse(StreamResponse):
title: str
index: int
created_at: int
pre_loop_output: Optional[Any] = None
pre_loop_output: Any | None = None
extras: dict = Field(default_factory=dict)
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_mode_run_id: Optional[str] = None
duration: Optional[float] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
parallel_mode_run_id: str | None = None
duration: float | None = None
event: StreamEvent = StreamEvent.LOOP_NEXT
workflow_run_id: str
@@ -639,19 +639,19 @@ class LoopNodeCompletedStreamResponse(StreamResponse):
node_id: str
node_type: str
title: str
outputs: Optional[Mapping] = None
outputs: Mapping | None = None
created_at: int
extras: Optional[dict] = None
inputs: Optional[Mapping] = None
extras: dict | None = None
inputs: Mapping | None = None
status: WorkflowNodeExecutionStatus
error: Optional[str] = None
error: str | None = None
elapsed_time: float
total_tokens: int
execution_metadata: Optional[Mapping] = None
execution_metadata: Mapping | None = None
finished_at: int
steps: int
parallel_id: Optional[str] = None
parallel_start_node_id: Optional[str] = None
parallel_id: str | None = None
parallel_start_node_id: str | None = None
event: StreamEvent = StreamEvent.LOOP_COMPLETED
workflow_run_id: str
@@ -669,7 +669,7 @@ class TextChunkStreamResponse(StreamResponse):
"""
text: str
from_variable_selector: Optional[list[str]] = None
from_variable_selector: list[str] | None = None
event: StreamEvent = StreamEvent.TEXT_CHUNK
data: Data
@@ -731,7 +731,7 @@ class WorkflowAppStreamResponse(AppStreamResponse):
WorkflowAppStreamResponse entity
"""
workflow_run_id: Optional[str] = None
workflow_run_id: str | None = None
class AppBlockingResponse(BaseModel):
@@ -796,8 +796,8 @@ class WorkflowAppBlockingResponse(AppBlockingResponse):
id: str
workflow_id: str
status: str
outputs: Optional[Mapping[str, Any]] = None
error: Optional[str] = None
outputs: Mapping[str, Any] | None = None
error: str | None = None
elapsed_time: float
total_tokens: int
total_steps: int
@@ -825,7 +825,7 @@ class AgentLogStreamResponse(StreamResponse):
error: str | None = None
status: str
data: Mapping[str, Any]
metadata: Optional[Mapping[str, Any]] = None
metadata: Mapping[str, Any] | None = None
node_id: str
event: StreamEvent = StreamEvent.AGENT_LOG

View File

@@ -1,5 +1,4 @@
import logging
from typing import Optional
from sqlalchemy import select
@@ -17,7 +16,7 @@ logger = logging.getLogger(__name__)
class AnnotationReplyFeature:
def query(
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
) -> Optional[MessageAnnotation]:
) -> MessageAnnotation | None:
"""
Query app annotations to reply
:param app_record: app record

View File

@@ -3,7 +3,7 @@ import time
import uuid
from collections.abc import Generator, Mapping
from datetime import timedelta
from typing import Any, Optional, Union
from typing import Any, Union
from core.errors.error import AppInvokeQuotaExceededError
from extensions.ext_redis import redis_client
@@ -63,7 +63,7 @@ class RateLimit:
if timeout_requests:
redis_client.hdel(self.active_requests_key, *timeout_requests)
def enter(self, request_id: Optional[str] = None) -> str:
def enter(self, request_id: str | None = None) -> str:
if self.disabled():
return RateLimit._UNLIMITED_REQUEST_ID
if time.time() - self.last_recalculate_time > RateLimit._ACTIVE_REQUESTS_COUNT_FLUSH_INTERVAL:

View File

@@ -1,6 +1,5 @@
import logging
import time
from typing import Optional
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -101,7 +100,7 @@ class BasedGenerateTaskPipeline:
"""
return PingStreamResponse(task_id=self._application_generate_entity.task_id)
def _init_output_moderation(self) -> Optional[OutputModeration]:
def _init_output_moderation(self) -> OutputModeration | None:
"""
Init output moderation.
:return:
@@ -118,7 +117,7 @@ class BasedGenerateTaskPipeline:
)
return None
def handle_output_moderation_when_task_finished(self, completion: str) -> Optional[str]:
def handle_output_moderation_when_task_finished(self, completion: str) -> str | None:
"""
Handle output moderation when task finished.
:param completion: completion

View File

@@ -2,7 +2,7 @@ import logging
import time
from collections.abc import Generator
from threading import Thread
from typing import Optional, Union, cast
from typing import Union, cast
from sqlalchemy import select
from sqlalchemy.orm import Session
@@ -109,7 +109,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
task_state=self._task_state,
)
self._conversation_name_generate_thread: Optional[Thread] = None
self._conversation_name_generate_thread: Thread | None = None
def process(
self,
@@ -209,7 +209,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
return None
def _wrapper_process_stream_response(
self, trace_manager: Optional[TraceQueueManager] = None
self, trace_manager: TraceQueueManager | None = None
) -> Generator[StreamResponse, None, None]:
tenant_id = self._application_generate_entity.app_config.tenant_id
task_id = self._application_generate_entity.task_id
@@ -252,7 +252,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
def _process_stream_response(
self, publisher: Optional[AppGeneratorTTSPublisher], trace_manager: Optional[TraceQueueManager] = None
self, publisher: AppGeneratorTTSPublisher | None, trace_manager: TraceQueueManager | None = None
) -> Generator[StreamResponse, None, None]:
"""
Process stream response.
@@ -362,7 +362,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
if self._conversation_name_generate_thread:
self._conversation_name_generate_thread.join()
def _save_message(self, *, session: Session, trace_manager: Optional[TraceQueueManager] = None):
def _save_message(self, *, session: Session, trace_manager: TraceQueueManager | None = None):
"""
Save message.
:return:
@@ -466,14 +466,14 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
task_id=self._application_generate_entity.task_id, id=message_id, answer=answer
)
def _agent_thought_to_stream_response(self, event: QueueAgentThoughtEvent) -> Optional[AgentThoughtStreamResponse]:
def _agent_thought_to_stream_response(self, event: QueueAgentThoughtEvent) -> AgentThoughtStreamResponse | None:
"""
Agent thought to stream response.
:param event: agent thought event
:return:
"""
with Session(db.engine, expire_on_commit=False) as session:
agent_thought: Optional[MessageAgentThought] = (
agent_thought: MessageAgentThought | None = (
session.query(MessageAgentThought).where(MessageAgentThought.id == event.agent_thought_id).first()
)

View File

@@ -1,6 +1,6 @@
import logging
from threading import Thread
from typing import Optional, Union
from typing import Union
from flask import Flask, current_app
from sqlalchemy import select
@@ -52,7 +52,7 @@ class MessageCycleManager:
self._application_generate_entity = application_generate_entity
self._task_state = task_state
def generate_conversation_name(self, *, conversation_id: str, query: str) -> Optional[Thread]:
def generate_conversation_name(self, *, conversation_id: str, query: str) -> Thread | None:
"""
Generate conversation name.
:param conversation_id: conversation id
@@ -111,7 +111,7 @@ class MessageCycleManager:
db.session.commit()
db.session.close()
def handle_annotation_reply(self, event: QueueAnnotationReplyEvent) -> Optional[MessageAnnotation]:
def handle_annotation_reply(self, event: QueueAnnotationReplyEvent) -> MessageAnnotation | None:
"""
Handle annotation reply.
:param event: event
@@ -141,7 +141,7 @@ class MessageCycleManager:
if self._application_generate_entity.app_config.additional_features.show_retrieve_source:
self._task_state.metadata.retriever_resources = event.retriever_resources
def message_file_to_stream_response(self, event: QueueMessageFileEvent) -> Optional[MessageFileStreamResponse]:
def message_file_to_stream_response(self, event: QueueMessageFileEvent) -> MessageFileStreamResponse | None:
"""
Message file to stream response.
:param event: event
@@ -180,7 +180,7 @@ class MessageCycleManager:
return None
def message_to_stream_response(
self, answer: str, message_id: str, from_variable_selector: Optional[list[str]] = None
self, answer: str, message_id: str, from_variable_selector: list[str] | None = None
) -> MessageStreamResponse:
"""
Message to stream response.