233 lines
6.3 KiB
Python
233 lines
6.3 KiB
Python
|
|
import asyncio
|
|||
|
|
import pytest
|
|||
|
|
|
|||
|
|
from app.services.workflow_engine import WorkflowEngine
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _engine_with(nodes, edges=None):
|
|||
|
|
wf_data = {"nodes": nodes, "edges": edges or []}
|
|||
|
|
return WorkflowEngine(workflow_id="wf_all", workflow_data=wf_data)
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_switch_branch():
|
|||
|
|
node = {
|
|||
|
|
"id": "sw1",
|
|||
|
|
"type": "switch",
|
|||
|
|
"data": {"field": "status", "cases": {"ok": "ok_handle"}, "default": "def"},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"status": "ok"})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["branch"] == "ok_handle"
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_merge_array_strategy():
|
|||
|
|
node = {"id": "m1", "type": "merge", "data": {"strategy": "array"}}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"a": 1, "b": 2})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert isinstance(res["output"], list)
|
|||
|
|
assert len(res["output"]) == 2
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_wait_time_mode():
|
|||
|
|
node = {
|
|||
|
|
"id": "w1",
|
|||
|
|
"type": "wait",
|
|||
|
|
"data": {"wait_type": "time", "wait_seconds": 0.01},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"ping": True})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["output"]["ping"] is True
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_json_parse_and_extract():
|
|||
|
|
node = {
|
|||
|
|
"id": "j1",
|
|||
|
|
"type": "json",
|
|||
|
|
"data": {"operation": "extract", "path": "$.data.value"},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"data": {"value": 42}})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["output"] == 42
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_text_split():
|
|||
|
|
node = {
|
|||
|
|
"id": "t1",
|
|||
|
|
"type": "text",
|
|||
|
|
"data": {"operation": "split", "delimiter": ","},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, "a,b,c")
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["output"] == ["a", "b", "c"]
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_cache_set_then_get():
|
|||
|
|
node_set = {
|
|||
|
|
"id": "cset",
|
|||
|
|
"type": "cache",
|
|||
|
|
"data": {"operation": "set", "key": "k1", "ttl": 1},
|
|||
|
|
}
|
|||
|
|
node_get = {
|
|||
|
|
"id": "cget",
|
|||
|
|
"type": "cache",
|
|||
|
|
"data": {"operation": "get", "key": "k1", "ttl": 1},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node_set, node_get])
|
|||
|
|
await engine.execute_node(node_set, {"value": "v"})
|
|||
|
|
res_get = await engine.execute_node(node_get, {})
|
|||
|
|
assert res_get["status"] == "success"
|
|||
|
|
assert res_get["output"] == "v"
|
|||
|
|
assert res_get["cache_hit"] is True
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_vector_db_upsert_search_delete():
|
|||
|
|
node = {
|
|||
|
|
"id": "vec",
|
|||
|
|
"type": "vector_db",
|
|||
|
|
"data": {"operation": "upsert", "collection": "col"},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
up = await engine.execute_node(node, {"embedding": [1.0, 0.0], "text": "hi"})
|
|||
|
|
assert up["status"] == "success"
|
|||
|
|
|
|||
|
|
node_search = {
|
|||
|
|
"id": "vecs",
|
|||
|
|
"type": "vector_db",
|
|||
|
|
"data": {
|
|||
|
|
"operation": "search",
|
|||
|
|
"collection": "col",
|
|||
|
|
"query_vector": [1.0, 0.0],
|
|||
|
|
"top_k": 1,
|
|||
|
|
},
|
|||
|
|
}
|
|||
|
|
res = await engine.execute_node(node_search, {})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert len(res["output"]) == 1
|
|||
|
|
|
|||
|
|
node_del = {
|
|||
|
|
"id": "vecd",
|
|||
|
|
"type": "vector_db",
|
|||
|
|
"data": {"operation": "delete", "collection": "col"},
|
|||
|
|
}
|
|||
|
|
del_res = await engine.execute_node(node_del, {})
|
|||
|
|
assert del_res["status"] == "success"
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_log_basic():
|
|||
|
|
node = {
|
|||
|
|
"id": "log1",
|
|||
|
|
"type": "log",
|
|||
|
|
"data": {"level": "info", "message": "hello {x}", "include_data": False},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"x": 1})
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["log"]["message"].startswith("节点执行") or res["log"]["message"].startswith("hello")
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_error_handler_notify():
|
|||
|
|
node = {
|
|||
|
|
"id": "err1",
|
|||
|
|
"type": "error_handler",
|
|||
|
|
"data": {"on_error": "notify"},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node])
|
|||
|
|
res = await engine.execute_node(node, {"status": "failed", "error": "boom"})
|
|||
|
|
assert res["status"] == "error_handled"
|
|||
|
|
assert res["error"] == "boom"
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_csv_parse_and_generate():
|
|||
|
|
node_parse = {
|
|||
|
|
"id": "csvp",
|
|||
|
|
"type": "csv",
|
|||
|
|
"data": {"operation": "parse", "delimiter": ",", "headers": True},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node_parse])
|
|||
|
|
csv_text = "a,b\n1,2\n"
|
|||
|
|
res = await engine.execute_node(node_parse, csv_text)
|
|||
|
|
assert res["status"] == "success"
|
|||
|
|
assert res["output"][0]["a"] == "1"
|
|||
|
|
|
|||
|
|
node_gen = {
|
|||
|
|
"id": "csvg",
|
|||
|
|
"type": "csv",
|
|||
|
|
"data": {"operation": "generate", "delimiter": ",", "headers": True},
|
|||
|
|
}
|
|||
|
|
res_gen = await engine.execute_node(node_gen, [{"a": 1, "b": 2}])
|
|||
|
|
assert res_gen["status"] == "success"
|
|||
|
|
assert "a,b" in res_gen["output"]
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_object_storage_upload_download():
|
|||
|
|
node_up = {
|
|||
|
|
"id": "osup",
|
|||
|
|
"type": "object_storage",
|
|||
|
|
"data": {
|
|||
|
|
"operation": "upload",
|
|||
|
|
"provider": "s3",
|
|||
|
|
"bucket": "bk",
|
|||
|
|
"key": "file.txt",
|
|||
|
|
},
|
|||
|
|
}
|
|||
|
|
engine = _engine_with([node_up])
|
|||
|
|
res_up = await engine.execute_node(node_up, {"file": "data"})
|
|||
|
|
assert res_up["status"] == "success"
|
|||
|
|
assert res_up["output"]["status"] == "uploaded"
|
|||
|
|
|
|||
|
|
node_down = {
|
|||
|
|
"id": "osdown",
|
|||
|
|
"type": "object_storage",
|
|||
|
|
"data": {
|
|||
|
|
"operation": "download",
|
|||
|
|
"provider": "s3",
|
|||
|
|
"bucket": "bk",
|
|||
|
|
"key": "file.txt",
|
|||
|
|
},
|
|||
|
|
}
|
|||
|
|
res_down = await engine.execute_node(node_down, {})
|
|||
|
|
assert res_down["status"] == "success"
|
|||
|
|
assert res_down["output"]["status"] == "downloaded"
|
|||
|
|
|
|||
|
|
|
|||
|
|
# 集成/外部依赖重的节点标记跳过,避免网络/编译/二进制依赖
|
|||
|
|
heavy_nodes = [
|
|||
|
|
"llm",
|
|||
|
|
"agent",
|
|||
|
|
"http",
|
|||
|
|
"webhook",
|
|||
|
|
"email",
|
|||
|
|
"message_queue",
|
|||
|
|
"database",
|
|||
|
|
"file",
|
|||
|
|
"pdf",
|
|||
|
|
"image",
|
|||
|
|
"excel",
|
|||
|
|
"slack",
|
|||
|
|
"dingtalk",
|
|||
|
|
"wechat_work",
|
|||
|
|
"sms",
|
|||
|
|
]
|
|||
|
|
|
|||
|
|
|
|||
|
|
@pytest.mark.skip(reason="重依赖/外部IO,保留集成测试")
|
|||
|
|
@pytest.mark.asyncio
|
|||
|
|
async def test_heavy_nodes_placeholder():
|
|||
|
|
assert True
|