2026-05-02 16:17:49 +08:00
|
|
|
|
"""橙子飞书长连接 — 固定路由到橙子助手 Agent"""
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
|
|
import json
|
|
|
|
|
|
import logging
|
|
|
|
|
|
import threading
|
|
|
|
|
|
from collections import deque
|
|
|
|
|
|
from typing import Optional
|
|
|
|
|
|
|
|
|
|
|
|
from app.core.config import settings
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
# 已处理消息 ID 去重(防止 WS 重连导致重复处理)
|
|
|
|
|
|
_processed_msg_ids: deque[str] = deque(maxlen=20)
|
|
|
|
|
|
|
|
|
|
|
|
_ws_thread: threading.Thread | None = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_message_id(data) -> Optional[str]:
|
|
|
|
|
|
"""从消息事件中提取 message_id。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
ev = data.event
|
|
|
|
|
|
msg = getattr(ev, "message", None)
|
|
|
|
|
|
if msg:
|
|
|
|
|
|
return getattr(msg, "message_id", None)
|
|
|
|
|
|
except Exception:
|
|
|
|
|
|
return None
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_message_text(data) -> Optional[str]:
|
|
|
|
|
|
"""从消息事件中提取纯文本内容。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
ev = data.event
|
|
|
|
|
|
msg = getattr(ev, "message", None)
|
|
|
|
|
|
if not msg:
|
|
|
|
|
|
return None
|
|
|
|
|
|
content_str = getattr(msg, "content", None)
|
|
|
|
|
|
msg_type = getattr(msg, "message_type", "")
|
|
|
|
|
|
if not content_str:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
if msg_type == "text":
|
|
|
|
|
|
parsed = json.loads(content_str)
|
|
|
|
|
|
return parsed.get("text", "")
|
|
|
|
|
|
return None
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.warning("解析橙子消息内容失败: %s", e)
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_sender_open_id(data) -> Optional[str]:
|
|
|
|
|
|
"""从消息事件中提取发送者 open_id。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
ev = data.event
|
|
|
|
|
|
sender = getattr(ev, "sender", None)
|
|
|
|
|
|
if not sender:
|
|
|
|
|
|
return None
|
|
|
|
|
|
sender_id = getattr(sender, "sender_id", None)
|
|
|
|
|
|
if not sender_id:
|
|
|
|
|
|
return None
|
|
|
|
|
|
return getattr(sender_id, "open_id", None)
|
|
|
|
|
|
except Exception:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_chat_type(data) -> str:
|
|
|
|
|
|
"""获取聊天类型。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
ev = data.event
|
|
|
|
|
|
msg = getattr(ev, "message", None)
|
|
|
|
|
|
return getattr(msg, "chat_type", "") if msg else ""
|
|
|
|
|
|
except Exception:
|
|
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _reply_to_feishu(open_id: str, text: str):
|
|
|
|
|
|
"""通过橙子应用回复用户消息。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
from app.services.orange_app_service import send_plain_text
|
|
|
|
|
|
send_plain_text(open_id, text)
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.warning("橙子回复消息失败: %s", e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _reply_card(open_id: str, title: str, content: str, status: str = "info"):
|
|
|
|
|
|
"""通过橙子应用回复卡片消息。"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
from app.services.orange_app_service import send_message_to_user
|
|
|
|
|
|
send_message_to_user(open_id, title, content, status=status)
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.warning("橙子回复卡片失败: %s", e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _make_llm_logger(db, agent_id: Optional[str] = None, user_id: Optional[str] = None):
|
|
|
|
|
|
"""创建 LLM 调用日志回调。"""
|
|
|
|
|
|
def _log(metrics: dict):
|
|
|
|
|
|
try:
|
|
|
|
|
|
from app.models.agent_llm_log import AgentLLMLog
|
|
|
|
|
|
log = AgentLLMLog(
|
|
|
|
|
|
agent_id=agent_id,
|
|
|
|
|
|
session_id=metrics.get("session_id"),
|
|
|
|
|
|
user_id=user_id,
|
|
|
|
|
|
model=metrics.get("model", ""),
|
|
|
|
|
|
provider=metrics.get("provider"),
|
|
|
|
|
|
prompt_tokens=metrics.get("prompt_tokens", 0),
|
|
|
|
|
|
completion_tokens=metrics.get("completion_tokens", 0),
|
|
|
|
|
|
total_tokens=metrics.get("total_tokens", 0),
|
|
|
|
|
|
latency_ms=metrics.get("latency_ms", 0),
|
|
|
|
|
|
iteration_number=metrics.get("iteration_number", 0),
|
|
|
|
|
|
step_type=metrics.get("step_type"),
|
|
|
|
|
|
tool_name=metrics.get("tool_name"),
|
|
|
|
|
|
status=metrics.get("status", "success"),
|
|
|
|
|
|
error_message=metrics.get("error_message"),
|
|
|
|
|
|
)
|
|
|
|
|
|
db.add(log)
|
|
|
|
|
|
db.commit()
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.warning("写入 AgentLLMLog 失败: %s", e)
|
|
|
|
|
|
return _log
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def _handle_message_async(data):
|
|
|
|
|
|
"""异步处理橙子消息 — 固定使用橙子助手 Agent。"""
|
|
|
|
|
|
open_id = _get_sender_open_id(data)
|
|
|
|
|
|
chat_type = _get_chat_type(data)
|
|
|
|
|
|
text = _get_message_text(data)
|
|
|
|
|
|
|
|
|
|
|
|
if not open_id or chat_type != "p2p":
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
logger.info("橙子收到消息: open_id=%s text=%s", open_id[:20], text[:50] if text else "(空)")
|
|
|
|
|
|
|
|
|
|
|
|
if not text:
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
from sqlalchemy.orm import Session
|
|
|
|
|
|
from app.core.database import SessionLocal
|
|
|
|
|
|
from app.models.agent import Agent
|
|
|
|
|
|
|
|
|
|
|
|
db: Optional[Session] = None
|
|
|
|
|
|
try:
|
|
|
|
|
|
db = SessionLocal()
|
|
|
|
|
|
|
|
|
|
|
|
# 固定使用橙子助手 Agent
|
|
|
|
|
|
agent_id = settings.ORANGE_AGENT_ID
|
|
|
|
|
|
if not agent_id:
|
|
|
|
|
|
_reply_to_feishu(open_id, "橙子助手尚未配置,请联系管理员。")
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
agent = db.query(Agent).filter(Agent.id == agent_id).first()
|
|
|
|
|
|
if not agent:
|
|
|
|
|
|
_reply_to_feishu(open_id, "橙子助手 Agent 已不存在,请联系管理员。")
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
_reply_to_feishu(open_id, "🤔 正在思考,请稍候...")
|
|
|
|
|
|
|
2026-05-02 21:44:47 +08:00
|
|
|
|
from app.agent_runtime import AgentRuntime, AgentConfig, AgentLLMConfig, AgentToolConfig, AgentMemoryConfig
|
2026-05-02 16:17:49 +08:00
|
|
|
|
|
|
|
|
|
|
wc = agent.workflow_config or {}
|
|
|
|
|
|
nodes = wc.get("nodes", [])
|
|
|
|
|
|
system_prompt = agent.description or ""
|
|
|
|
|
|
model = "deepseek-v4-flash"
|
|
|
|
|
|
provider = "deepseek"
|
|
|
|
|
|
temperature = 0.7
|
|
|
|
|
|
max_iterations = 10
|
|
|
|
|
|
|
|
|
|
|
|
for n in nodes:
|
2026-05-02 21:44:47 +08:00
|
|
|
|
if n.get("type") not in ("agent", "llm", "template"):
|
|
|
|
|
|
continue
|
|
|
|
|
|
cfg = n.get("data", {}) if isinstance(n, dict) else getattr(n, "data", {})
|
|
|
|
|
|
system_prompt = cfg.get("system_prompt", "") or system_prompt
|
|
|
|
|
|
model = cfg.get("model", model)
|
|
|
|
|
|
provider = cfg.get("provider", provider)
|
|
|
|
|
|
temperature = float(cfg.get("temperature", temperature))
|
|
|
|
|
|
max_iterations = int(cfg.get("max_iterations", max_iterations))
|
|
|
|
|
|
break
|
2026-05-02 16:17:49 +08:00
|
|
|
|
|
|
|
|
|
|
config = AgentConfig(
|
|
|
|
|
|
name=agent.name or "橙子助手",
|
|
|
|
|
|
system_prompt=system_prompt,
|
|
|
|
|
|
llm=AgentLLMConfig(
|
|
|
|
|
|
model=model,
|
|
|
|
|
|
provider=provider,
|
|
|
|
|
|
temperature=temperature,
|
|
|
|
|
|
max_iterations=max_iterations,
|
|
|
|
|
|
),
|
|
|
|
|
|
tools=AgentToolConfig(),
|
2026-05-02 21:44:47 +08:00
|
|
|
|
memory=AgentMemoryConfig(
|
|
|
|
|
|
max_history_messages=int(cfg.get("memory_max_history", 20)),
|
|
|
|
|
|
vector_memory_top_k=int(cfg.get("memory_vector_top_k", 5)),
|
|
|
|
|
|
persist_to_db=bool(cfg.get("memory_persist", True)),
|
|
|
|
|
|
vector_memory_enabled=bool(cfg.get("memory_vector_enabled", True)),
|
|
|
|
|
|
learning_enabled=bool(cfg.get("memory_learning", True)),
|
|
|
|
|
|
),
|
2026-05-02 16:17:49 +08:00
|
|
|
|
user_id=None,
|
|
|
|
|
|
memory_scope_id=str(agent.id),
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
on_llm_call = _make_llm_logger(db, agent_id=str(agent.id))
|
|
|
|
|
|
runtime = AgentRuntime(config=config, on_llm_call=on_llm_call)
|
|
|
|
|
|
result = await runtime.run(text)
|
|
|
|
|
|
|
|
|
|
|
|
if result.content:
|
|
|
|
|
|
_reply_card(open_id, f"🍊 {agent.name}", result.content.strip(), status="success")
|
|
|
|
|
|
else:
|
|
|
|
|
|
_reply_to_feishu(open_id, "Agent 未返回有效回复,请重试。")
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
|
|
"橙子 Agent 回复完成: open_id=%s agent=%s iterations=%d tools=%d",
|
|
|
|
|
|
open_id[:20], agent.name, result.iterations_used, result.tool_calls_made,
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error("橙子消息处理失败: %s", e)
|
|
|
|
|
|
try:
|
|
|
|
|
|
_reply_to_feishu(open_id, f"处理失败: {e!s}")
|
|
|
|
|
|
except Exception:
|
|
|
|
|
|
pass
|
|
|
|
|
|
finally:
|
|
|
|
|
|
if db:
|
|
|
|
|
|
db.close()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _handle_message_internal(data):
|
|
|
|
|
|
"""同步入口 — 创建异步任务处理橙子消息。"""
|
|
|
|
|
|
# 去重
|
|
|
|
|
|
msg_id = _get_message_id(data)
|
|
|
|
|
|
if msg_id:
|
|
|
|
|
|
if msg_id in _processed_msg_ids:
|
|
|
|
|
|
logger.debug("橙子跳过已处理消息: %s", msg_id)
|
|
|
|
|
|
return
|
|
|
|
|
|
_processed_msg_ids.append(msg_id)
|
|
|
|
|
|
|
|
|
|
|
|
open_id = _get_sender_open_id(data)
|
|
|
|
|
|
chat_type = _get_chat_type(data)
|
|
|
|
|
|
text = _get_message_text(data)
|
|
|
|
|
|
|
|
|
|
|
|
if not open_id or chat_type != "p2p" or not text:
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
logger.info("橙子收到消息: open_id=%s text=%s", open_id[:20], text[:50] if text else "(空)")
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
|
if loop.is_running():
|
|
|
|
|
|
asyncio.ensure_future(_handle_message_async(data))
|
|
|
|
|
|
else:
|
|
|
|
|
|
loop.run_until_complete(_handle_message_async(data))
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error("橙子创建消息处理任务失败: %s", e)
|
|
|
|
|
|
try:
|
|
|
|
|
|
_reply_to_feishu(open_id, f"处理失败: {e!s}")
|
|
|
|
|
|
except Exception:
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _build_event_handler():
|
|
|
|
|
|
"""构建橙子事件处理器。"""
|
|
|
|
|
|
from lark_oapi.event.dispatcher_handler import EventDispatcherHandler
|
|
|
|
|
|
|
|
|
|
|
|
def on_message_receive(data):
|
|
|
|
|
|
_handle_message_internal(data)
|
|
|
|
|
|
|
|
|
|
|
|
builder = EventDispatcherHandler.builder(
|
|
|
|
|
|
encrypt_key="",
|
|
|
|
|
|
verification_token="",
|
|
|
|
|
|
)
|
|
|
|
|
|
builder.register_p2_im_message_receive_v1(on_message_receive)
|
|
|
|
|
|
return builder.build()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def start_ws_client():
|
|
|
|
|
|
"""在 async 上下文中启动橙子飞书长连接(在主事件循环运行)。"""
|
|
|
|
|
|
if not settings.ORANGE_APP_ID or not settings.ORANGE_APP_SECRET:
|
|
|
|
|
|
logger.warning("橙子应用未配置,跳过橙子长连接启动")
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
from lark_oapi.ws import Client as WSClient
|
|
|
|
|
|
|
|
|
|
|
|
handler = _build_event_handler()
|
|
|
|
|
|
client = WSClient(
|
|
|
|
|
|
app_id=settings.ORANGE_APP_ID,
|
|
|
|
|
|
app_secret=settings.ORANGE_APP_SECRET,
|
|
|
|
|
|
event_handler=handler,
|
|
|
|
|
|
auto_reconnect=True,
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
logger.info("橙子长连接客户端启动中...")
|
|
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
|
try:
|
|
|
|
|
|
await client._connect()
|
|
|
|
|
|
logger.info("橙子长连接已建立")
|
|
|
|
|
|
# _ping_loop 内部创建 _receive_message_loop 并处理心跳
|
|
|
|
|
|
ping_task = asyncio.ensure_future(client._ping_loop())
|
|
|
|
|
|
# 用永久 sleep 保持协程存活
|
|
|
|
|
|
while True:
|
|
|
|
|
|
await asyncio.sleep(3600)
|
|
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
|
|
break
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.warning("橙子长连接断开,3秒后重连: %s", e)
|
|
|
|
|
|
await asyncio.sleep(3)
|