This commit is contained in:
rjb
2026-01-22 09:59:02 +08:00
parent 47dac9f33b
commit f7702f4e72
18 changed files with 8012 additions and 104 deletions

View File

@@ -0,0 +1,232 @@
import asyncio
import pytest
from app.services.workflow_engine import WorkflowEngine
def _engine_with(nodes, edges=None):
wf_data = {"nodes": nodes, "edges": edges or []}
return WorkflowEngine(workflow_id="wf_all", workflow_data=wf_data)
@pytest.mark.asyncio
async def test_switch_branch():
node = {
"id": "sw1",
"type": "switch",
"data": {"field": "status", "cases": {"ok": "ok_handle"}, "default": "def"},
}
engine = _engine_with([node])
res = await engine.execute_node(node, {"status": "ok"})
assert res["status"] == "success"
assert res["branch"] == "ok_handle"
@pytest.mark.asyncio
async def test_merge_array_strategy():
node = {"id": "m1", "type": "merge", "data": {"strategy": "array"}}
engine = _engine_with([node])
res = await engine.execute_node(node, {"a": 1, "b": 2})
assert res["status"] == "success"
assert isinstance(res["output"], list)
assert len(res["output"]) == 2
@pytest.mark.asyncio
async def test_wait_time_mode():
node = {
"id": "w1",
"type": "wait",
"data": {"wait_type": "time", "wait_seconds": 0.01},
}
engine = _engine_with([node])
res = await engine.execute_node(node, {"ping": True})
assert res["status"] == "success"
assert res["output"]["ping"] is True
@pytest.mark.asyncio
async def test_json_parse_and_extract():
node = {
"id": "j1",
"type": "json",
"data": {"operation": "extract", "path": "$.data.value"},
}
engine = _engine_with([node])
res = await engine.execute_node(node, {"data": {"value": 42}})
assert res["status"] == "success"
assert res["output"] == 42
@pytest.mark.asyncio
async def test_text_split():
node = {
"id": "t1",
"type": "text",
"data": {"operation": "split", "delimiter": ","},
}
engine = _engine_with([node])
res = await engine.execute_node(node, "a,b,c")
assert res["status"] == "success"
assert res["output"] == ["a", "b", "c"]
@pytest.mark.asyncio
async def test_cache_set_then_get():
node_set = {
"id": "cset",
"type": "cache",
"data": {"operation": "set", "key": "k1", "ttl": 1},
}
node_get = {
"id": "cget",
"type": "cache",
"data": {"operation": "get", "key": "k1", "ttl": 1},
}
engine = _engine_with([node_set, node_get])
await engine.execute_node(node_set, {"value": "v"})
res_get = await engine.execute_node(node_get, {})
assert res_get["status"] == "success"
assert res_get["output"] == "v"
assert res_get["cache_hit"] is True
@pytest.mark.asyncio
async def test_vector_db_upsert_search_delete():
node = {
"id": "vec",
"type": "vector_db",
"data": {"operation": "upsert", "collection": "col"},
}
engine = _engine_with([node])
up = await engine.execute_node(node, {"embedding": [1.0, 0.0], "text": "hi"})
assert up["status"] == "success"
node_search = {
"id": "vecs",
"type": "vector_db",
"data": {
"operation": "search",
"collection": "col",
"query_vector": [1.0, 0.0],
"top_k": 1,
},
}
res = await engine.execute_node(node_search, {})
assert res["status"] == "success"
assert len(res["output"]) == 1
node_del = {
"id": "vecd",
"type": "vector_db",
"data": {"operation": "delete", "collection": "col"},
}
del_res = await engine.execute_node(node_del, {})
assert del_res["status"] == "success"
@pytest.mark.asyncio
async def test_log_basic():
node = {
"id": "log1",
"type": "log",
"data": {"level": "info", "message": "hello {x}", "include_data": False},
}
engine = _engine_with([node])
res = await engine.execute_node(node, {"x": 1})
assert res["status"] == "success"
assert res["log"]["message"].startswith("节点执行") or res["log"]["message"].startswith("hello")
@pytest.mark.asyncio
async def test_error_handler_notify():
node = {
"id": "err1",
"type": "error_handler",
"data": {"on_error": "notify"},
}
engine = _engine_with([node])
res = await engine.execute_node(node, {"status": "failed", "error": "boom"})
assert res["status"] == "error_handled"
assert res["error"] == "boom"
@pytest.mark.asyncio
async def test_csv_parse_and_generate():
node_parse = {
"id": "csvp",
"type": "csv",
"data": {"operation": "parse", "delimiter": ",", "headers": True},
}
engine = _engine_with([node_parse])
csv_text = "a,b\n1,2\n"
res = await engine.execute_node(node_parse, csv_text)
assert res["status"] == "success"
assert res["output"][0]["a"] == "1"
node_gen = {
"id": "csvg",
"type": "csv",
"data": {"operation": "generate", "delimiter": ",", "headers": True},
}
res_gen = await engine.execute_node(node_gen, [{"a": 1, "b": 2}])
assert res_gen["status"] == "success"
assert "a,b" in res_gen["output"]
@pytest.mark.asyncio
async def test_object_storage_upload_download():
node_up = {
"id": "osup",
"type": "object_storage",
"data": {
"operation": "upload",
"provider": "s3",
"bucket": "bk",
"key": "file.txt",
},
}
engine = _engine_with([node_up])
res_up = await engine.execute_node(node_up, {"file": "data"})
assert res_up["status"] == "success"
assert res_up["output"]["status"] == "uploaded"
node_down = {
"id": "osdown",
"type": "object_storage",
"data": {
"operation": "download",
"provider": "s3",
"bucket": "bk",
"key": "file.txt",
},
}
res_down = await engine.execute_node(node_down, {})
assert res_down["status"] == "success"
assert res_down["output"]["status"] == "downloaded"
# 集成/外部依赖重的节点标记跳过,避免网络/编译/二进制依赖
heavy_nodes = [
"llm",
"agent",
"http",
"webhook",
"email",
"message_queue",
"database",
"file",
"pdf",
"image",
"excel",
"slack",
"dingtalk",
"wechat_work",
"sms",
]
@pytest.mark.skip(reason="重依赖/外部IO保留集成测试")
@pytest.mark.asyncio
async def test_heavy_nodes_placeholder():
assert True

View File

@@ -0,0 +1,136 @@
import pytest
from app.services.workflow_engine import WorkflowEngine
def _make_engine_with_node(node):
"""构造仅含单节点的工作流引擎"""
wf_data = {"nodes": [node], "edges": []}
return WorkflowEngine(workflow_id="wf_test", workflow_data=wf_data)
@pytest.mark.asyncio
async def test_subworkflow_mapping():
node = {
"id": "sub-1",
"type": "subworkflow",
"data": {
"workflow_id": "child_wf",
"input_mapping": {"mapped": "source"},
},
}
engine = _make_engine_with_node(node)
result = await engine.execute_node(node, {"source": 123, "other": 1})
assert result["status"] == "success"
assert result["output"]["workflow_id"] == "child_wf"
assert result["output"]["input"]["mapped"] == 123
@pytest.mark.asyncio
async def test_code_python_success():
node = {
"id": "code-1",
"type": "code",
"data": {
"language": "python",
"code": "result = input_data['x'] * 2",
},
}
engine = _make_engine_with_node(node)
result = await engine.execute_node(node, {"x": 3})
assert result["status"] == "success"
assert result["output"] == 6
@pytest.mark.asyncio
async def test_code_unsupported_language():
node = {
"id": "code-2",
"type": "code",
"data": {"language": "go", "code": "result = 1"},
}
engine = _make_engine_with_node(node)
result = await engine.execute_node(node, {})
assert result["status"] == "success"
assert "不支持的语言" in result["output"]["error"]
@pytest.mark.asyncio
async def test_oauth_mock_token():
node = {
"id": "oauth-1",
"type": "oauth",
"data": {"provider": "google", "client_id": "id", "client_secret": "sec"},
}
engine = _make_engine_with_node(node)
result = await engine.execute_node(node, {})
assert result["status"] == "success"
token = result["output"]
assert token["access_token"].startswith("mock_access_token_google")
assert token["token_type"] == "Bearer"
@pytest.mark.asyncio
async def test_validator_reject_and_continue():
# reject 分支 -> failed
node_reject = {
"id": "val-1",
"type": "validator",
"data": {"schema": {"type": "object"}, "on_error": "reject"},
}
engine = _make_engine_with_node(node_reject)
res_reject = await engine.execute_node(node_reject, "bad_type")
assert res_reject["status"] == "failed"
# continue 分支 -> success 且 warning
node_continue = {
"id": "val-2",
"type": "validator",
"data": {"schema": {"type": "object"}, "on_error": "continue"},
}
engine = _make_engine_with_node(node_continue)
res_continue = await engine.execute_node(node_continue, "bad_type")
assert res_continue["status"] == "success"
assert "warning" in res_continue
@pytest.mark.asyncio
async def test_batch_split_group_aggregate():
data = list(range(5))
# split
node_split = {
"id": "batch-1",
"type": "batch",
"data": {"batch_size": 2, "mode": "split"},
}
engine = _make_engine_with_node(node_split)
res_split = await engine.execute_node(node_split, data)
assert res_split["status"] == "success"
assert res_split["output"][0] == [0, 1]
assert res_split["output"][1] == [2, 3]
assert res_split["output"][2] == [4]
# group同 split 逻辑)
node_group = {
"id": "batch-2",
"type": "batch",
"data": {"batch_size": 3, "mode": "group"},
}
engine = _make_engine_with_node(node_group)
res_group = await engine.execute_node(node_group, data)
assert res_group["status"] == "success"
assert res_group["output"][0] == [0, 1, 2]
assert res_group["output"][1] == [3, 4]
# aggregate
node_agg = {
"id": "batch-3",
"type": "batch",
"data": {"mode": "aggregate"},
}
engine = _make_engine_with_node(node_agg)
res_agg = await engine.execute_node(node_agg, data)
assert res_agg["status"] == "success"
assert res_agg["output"]["count"] == 5
assert res_agg["output"]["samples"][:2] == [0, 1]