feat: Phase 4 - LLM/Agent fallback chain, cross-agent knowledge sharing, async agent execution

- 4.1 Fallback chain: LLM fallback_llm config in AgentLLMConfig, retry with alternate model on API failure; Agent fallback_agent in DAG nodes
- 4.2 Knowledge sharing: GlobalKnowledge model with embedding-based semantic search, auto-extraction of tool names as tags after execution
- 4.3 Async execution: execute_agent_task fully implemented with AgentRuntime, scheduler dual-path for workflow/non-workflow agents

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
renjianbo
2026-05-05 00:27:54 +08:00
parent 7e00b027d4
commit 592bca4f39
7 changed files with 461 additions and 70 deletions

View File

@@ -95,6 +95,11 @@ class AgentMemory:
if vector_text:
parts.append(vector_text)
# 3. 全局知识检索:从 GlobalKnowledge 表加载相关条目
global_text = await self._global_knowledge_search(query)
if global_text:
parts.append(global_text)
return "\n\n".join(parts) if parts else ""
async def _vector_search(self, query: str = "") -> str:
@@ -171,6 +176,119 @@ class AgentMemory:
if db:
db.close()
async def _global_knowledge_search(self, query: str = "") -> str:
"""从 GlobalKnowledge 表检索相关的全局知识条目。"""
from app.models.agent import GlobalKnowledge
db: Optional[Session] = None
try:
db = SessionLocal()
rows = (
db.query(GlobalKnowledge)
.order_by(GlobalKnowledge.created_at.desc())
.limit(50)
.all()
)
if not rows:
return ""
# 如果有 query用向量相似度筛选否则返回最近的条目
if query and query.strip():
entries: List[VectorEntry] = []
for row in rows:
if not row.embedding:
continue
try:
emb = embedding_service.deserialize_embedding(row.embedding)
except Exception:
emb = []
if emb:
entries.append({
"id": row.id,
"scope_kind": "global",
"scope_id": "global",
"content_text": row.content,
"embedding": emb,
"metadata": {
"source_agent_id": row.source_agent_id,
"tags": row.tags or [],
},
})
if entries:
query_emb = await embedding_service.generate_embedding(query)
if query_emb:
matched = await embedding_service.similarity_search(
query_emb, entries, top_k=min(5, len(entries)),
)
if matched:
lines = ["## 全局知识库"]
for i, m in enumerate(matched, 1):
tags = m.get("metadata", {}).get("tags", [])
tag_str = f" [{', '.join(tags[:3])}]" if tags else ""
lines.append(f"{i}.{tag_str} {m.get('content_text', '')[:500]}")
return "\n".join(lines)
else:
# 无 query返回最近 5 条全局知识
recent = rows[:5]
if recent:
lines = ["## 全局知识库(最近)"]
for i, row in enumerate(recent, 1):
tag_str = f" [{(', '.join(row.tags[:3]))}]" if row.tags else ""
lines.append(f"{i}.{tag_str} {row.content[:500]}")
return "\n".join(lines)
return ""
except Exception as e:
logger.warning("全局知识检索失败: %s", e)
return ""
finally:
if db:
db.close()
async def save_global_knowledge(
self, content: str, source_agent_id: str = "",
source_user_id: str = "", tags: Optional[List[str]] = None,
) -> None:
"""将知识条目写入全局知识池。"""
from app.models.agent import GlobalKnowledge
if not content or len(content) < 20:
return
db: Optional[Session] = None
try:
db = SessionLocal()
# 生成 embedding
embedding_json = ""
try:
emb = await embedding_service.generate_embedding(content)
if emb:
embedding_json = embedding_service.serialize_embedding(emb) or ""
except Exception:
pass
record = GlobalKnowledge(
content=content[:2000],
embedding=embedding_json or None,
source_agent_id=source_agent_id or "",
source_user_id=source_user_id or "",
tags=tags or [],
scope_kind=self.scope_kind,
scope_id=self.scope_id or "global",
)
db.add(record)
db.commit()
logger.info("已写入全局知识: agent=%s tags=%s", source_agent_id, tags)
except Exception as e:
logger.warning("保存全局知识失败: %s", e)
if db:
db.rollback()
finally:
if db:
db.close()
async def save_context(
self, user_message: str, assistant_reply: str,
messages: Optional[List[Dict[str, Any]]] = None,