support returning structured output when using LLM API non streaming invocation (#26451)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
goofy
2025-09-30 10:58:35 +08:00
committed by GitHub
parent 8d803a26eb
commit 86c3c58e64
2 changed files with 12 additions and 1 deletions

View File

@@ -20,6 +20,7 @@ class ModelInvokeCompletedEvent(NodeEventBase):
usage: LLMUsage
finish_reason: str | None = None
reasoning_content: str | None = None
structured_output: dict | None = None
class RunRetryEvent(NodeEventBase):