mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 02:51:14 +00:00
test: Add integration tests for execution flows in native Python runner (#19198)
This commit is contained in:
186
packages/@n8n/task-runner-python/tests/fixtures/local_task_broker.py
vendored
Normal file
186
packages/@n8n/task-runner-python/tests/fixtures/local_task_broker.py
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
import asyncio
|
||||
import json
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web, web_ws
|
||||
from src.nanoid import nanoid
|
||||
|
||||
from tests.fixtures.test_constants import (
|
||||
TASK_RESPONSE_WAIT,
|
||||
LOCAL_TASK_BROKER_PORT,
|
||||
LOCAL_TASK_BROKER_WS_PATH,
|
||||
)
|
||||
|
||||
TaskId = str
|
||||
TaskSettings = dict[str, Any]
|
||||
WebsocketMessage = dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveTask:
|
||||
settings: TaskSettings
|
||||
|
||||
|
||||
class LocalTaskBroker:
|
||||
def __init__(self):
|
||||
self.port = LOCAL_TASK_BROKER_PORT
|
||||
self.app = web.Application()
|
||||
self.runner: web.AppRunner | None = None
|
||||
self.site: web.TCPSite | None = None
|
||||
self.connections: dict[str, web_ws.WebSocketResponse] = {}
|
||||
self.pending_messages: dict[str, asyncio.Queue[WebsocketMessage]] = {}
|
||||
self.received_messages: list[WebsocketMessage] = []
|
||||
self.active_tasks: dict[TaskId, ActiveTask] = {}
|
||||
self.task_settings: dict[TaskId, TaskSettings] = {}
|
||||
self.rpc_messages: dict[TaskId, list[dict]] = {}
|
||||
self.app.router.add_get(LOCAL_TASK_BROKER_WS_PATH, self.websocket_handler)
|
||||
|
||||
async def start(self) -> None:
|
||||
self.runner = web.AppRunner(self.app)
|
||||
await self.runner.setup()
|
||||
self.site = web.TCPSite(self.runner, "localhost", self.port)
|
||||
await self.site.start()
|
||||
print(f"Local task broker started on port {self.port}")
|
||||
|
||||
async def stop(self) -> None:
|
||||
for ws in self.connections.values():
|
||||
await ws.close()
|
||||
self.connections.clear()
|
||||
|
||||
if self.site:
|
||||
await self.site.stop()
|
||||
|
||||
if self.runner:
|
||||
await self.runner.cleanup()
|
||||
|
||||
async def websocket_handler(self, request: web.Request) -> web_ws.WebSocketResponse:
|
||||
print(f"WebSocket connection request from {request.remote}")
|
||||
ws = web_ws.WebSocketResponse()
|
||||
await ws.prepare(request)
|
||||
connection_id = nanoid()
|
||||
self.connections[connection_id] = ws
|
||||
self.pending_messages[connection_id] = asyncio.Queue()
|
||||
|
||||
sender_coroutine = asyncio.create_task(self._message_sender(connection_id, ws))
|
||||
|
||||
try:
|
||||
await self.send_to_connection(connection_id, {"type": "broker:inforequest"})
|
||||
|
||||
async for message in ws:
|
||||
if message.type == web_ws.WSMsgType.TEXT:
|
||||
json_message = json.loads(message.data)
|
||||
self.received_messages.append(json_message)
|
||||
await self._handle_message(connection_id, json_message)
|
||||
finally:
|
||||
sender_coroutine.cancel()
|
||||
try:
|
||||
await sender_coroutine
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
del self.connections[connection_id]
|
||||
del self.pending_messages[connection_id]
|
||||
|
||||
return ws
|
||||
|
||||
async def _message_sender(self, connection_id: str, ws: web_ws.WebSocketResponse):
|
||||
while True:
|
||||
message = await self.pending_messages[connection_id].get()
|
||||
await ws.send_str(json.dumps(message))
|
||||
|
||||
async def _handle_message(self, connection_id: str, message: WebsocketMessage):
|
||||
match message.get("type"):
|
||||
case "runner:info":
|
||||
await self.send_to_connection(
|
||||
connection_id, {"type": "broker:runnerregistered"}
|
||||
)
|
||||
|
||||
case "runner:taskoffer":
|
||||
pass # Handled by send_task() which waits for them
|
||||
|
||||
case "runner:taskaccepted":
|
||||
task_id = message.get("taskId")
|
||||
if task_id in self.task_settings:
|
||||
await self.send_to_connection(
|
||||
connection_id,
|
||||
{
|
||||
"type": "broker:tasksettings",
|
||||
"taskId": task_id,
|
||||
"settings": self.task_settings[task_id],
|
||||
},
|
||||
)
|
||||
|
||||
case "runner:taskdone" | "runner:taskerror":
|
||||
task_id = message.get("taskId")
|
||||
if task_id in self.active_tasks:
|
||||
del self.active_tasks[task_id]
|
||||
|
||||
case "runner:rpc":
|
||||
task_id = message.get("taskId")
|
||||
if task_id:
|
||||
if task_id not in self.rpc_messages:
|
||||
self.rpc_messages[task_id] = []
|
||||
self.rpc_messages[task_id].append(
|
||||
{"method": message.get("name"), "params": message.get("params")}
|
||||
)
|
||||
|
||||
async def send_to_connection(self, connection_id: str, message: WebsocketMessage):
|
||||
if connection_id in self.pending_messages:
|
||||
await self.pending_messages[connection_id].put(message)
|
||||
|
||||
async def send_task(
|
||||
self,
|
||||
task_id: TaskId,
|
||||
task_settings: TaskSettings,
|
||||
):
|
||||
self.active_tasks[task_id] = ActiveTask(task_settings)
|
||||
self.task_settings[task_id] = task_settings
|
||||
|
||||
offer = await self.wait_for_msg("runner:taskoffer", timeout=2.0)
|
||||
|
||||
if offer:
|
||||
accept = {
|
||||
"type": "broker:taskofferaccept",
|
||||
"taskId": task_id,
|
||||
"offerId": offer.get("offerId"),
|
||||
}
|
||||
|
||||
if self.connections:
|
||||
connection = next(iter(self.connections.keys()))
|
||||
await self.send_to_connection(connection, accept)
|
||||
|
||||
async def cancel_task(self, task_id: TaskId, reason: str):
|
||||
cancel_message = {
|
||||
"type": "broker:taskcancel",
|
||||
"taskId": task_id,
|
||||
"reason": reason,
|
||||
}
|
||||
|
||||
for connection_id in self.connections:
|
||||
await self.send_to_connection(connection_id, cancel_message)
|
||||
|
||||
async def wait_for_msg(
|
||||
self,
|
||||
msg_type: str,
|
||||
timeout: float = TASK_RESPONSE_WAIT,
|
||||
predicate: Callable[[WebsocketMessage], bool] | None = None,
|
||||
) -> WebsocketMessage | None:
|
||||
start_time = asyncio.get_running_loop().time()
|
||||
|
||||
while asyncio.get_running_loop().time() - start_time < timeout:
|
||||
for msg in self.received_messages:
|
||||
if msg.get("type") == msg_type:
|
||||
if predicate is None or predicate(msg):
|
||||
return msg
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
return None
|
||||
|
||||
def get_messages_of_type(self, msg_type: str) -> list[WebsocketMessage]:
|
||||
return [msg for msg in self.received_messages if msg.get("type") == msg_type]
|
||||
|
||||
def get_task_rpc_messages(self, task_id: TaskId) -> list[dict]:
|
||||
return self.rpc_messages.get(task_id, [])
|
||||
99
packages/@n8n/task-runner-python/tests/fixtures/task_runner_manager.py
vendored
Normal file
99
packages/@n8n/task-runner-python/tests/fixtures/task_runner_manager.py
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from src.constants import (
|
||||
ENV_GRACEFUL_SHUTDOWN_TIMEOUT,
|
||||
ENV_GRANT_TOKEN,
|
||||
ENV_HEALTH_CHECK_SERVER_ENABLED,
|
||||
ENV_HEALTH_CHECK_SERVER_PORT,
|
||||
ENV_TASK_BROKER_URI,
|
||||
ENV_TASK_TIMEOUT,
|
||||
)
|
||||
|
||||
from tests.fixtures.test_constants import (
|
||||
GRACEFUL_SHUTDOWN_TIMEOUT,
|
||||
HEALTH_CHECK_PORT,
|
||||
LOCAL_TASK_BROKER_URL,
|
||||
TASK_TIMEOUT,
|
||||
)
|
||||
|
||||
|
||||
class TaskRunnerManager:
|
||||
"""Responsible for managing the lifecycle of a task runner subprocess."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
task_broker_url: str = LOCAL_TASK_BROKER_URL,
|
||||
graceful_shutdown_timeout: float | None = None,
|
||||
):
|
||||
self.task_broker_url = task_broker_url
|
||||
self.graceful_shutdown_timeout = graceful_shutdown_timeout
|
||||
self.subprocess: asyncio.subprocess.Process | None = None
|
||||
self.stdout_buffer: list[str] = []
|
||||
self.stderr_buffer: list[str] = []
|
||||
|
||||
async def start(self):
|
||||
project_root = Path(__file__).parent.parent.parent
|
||||
runner_path = project_root / "src" / "main.py"
|
||||
|
||||
env_vars = os.environ.copy()
|
||||
env_vars[ENV_GRANT_TOKEN] = "test_token"
|
||||
env_vars[ENV_TASK_BROKER_URI] = self.task_broker_url
|
||||
env_vars[ENV_TASK_TIMEOUT] = str(TASK_TIMEOUT)
|
||||
env_vars[ENV_HEALTH_CHECK_SERVER_ENABLED] = "true"
|
||||
env_vars[ENV_HEALTH_CHECK_SERVER_PORT] = str(HEALTH_CHECK_PORT)
|
||||
if self.graceful_shutdown_timeout is not None:
|
||||
env_vars[ENV_GRACEFUL_SHUTDOWN_TIMEOUT] = str(
|
||||
self.graceful_shutdown_timeout
|
||||
)
|
||||
env_vars["PYTHONPATH"] = str(project_root)
|
||||
|
||||
self.subprocess = await asyncio.create_subprocess_exec(
|
||||
sys.executable,
|
||||
str(runner_path),
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
env=env_vars,
|
||||
cwd=str(project_root),
|
||||
)
|
||||
|
||||
asyncio.create_task(self._read_stdout())
|
||||
asyncio.create_task(self._read_stderr())
|
||||
|
||||
def is_running(self) -> bool:
|
||||
return self.subprocess is not None and self.subprocess.returncode is None
|
||||
|
||||
async def stop(self) -> None:
|
||||
if not self.subprocess or self.subprocess.returncode is not None:
|
||||
return
|
||||
|
||||
self.subprocess.terminate()
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self.subprocess.wait(), timeout=GRACEFUL_SHUTDOWN_TIMEOUT
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
self.subprocess.kill()
|
||||
await self.subprocess.wait()
|
||||
|
||||
async def _read_stdout(self):
|
||||
if not self.subprocess or not self.subprocess.stdout:
|
||||
return
|
||||
|
||||
while True:
|
||||
line = await self.subprocess.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
self.stdout_buffer.append(line.decode().strip())
|
||||
|
||||
async def _read_stderr(self):
|
||||
if not self.subprocess or not self.subprocess.stderr:
|
||||
return
|
||||
|
||||
while True:
|
||||
line = await self.subprocess.stderr.readline()
|
||||
if not line:
|
||||
break
|
||||
self.stderr_buffer.append(line.decode().strip())
|
||||
13
packages/@n8n/task-runner-python/tests/fixtures/test_constants.py
vendored
Normal file
13
packages/@n8n/task-runner-python/tests/fixtures/test_constants.py
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Local task broker
|
||||
LOCAL_TASK_BROKER_PORT = 8080
|
||||
LOCAL_TASK_BROKER_URL = f"http://localhost:{LOCAL_TASK_BROKER_PORT}"
|
||||
LOCAL_TASK_BROKER_WS_PATH = "/runners/_ws"
|
||||
|
||||
# Timing
|
||||
TASK_RESPONSE_WAIT = 3
|
||||
TASK_TIMEOUT = 2
|
||||
GRACEFUL_SHUTDOWN_TIMEOUT = 1
|
||||
|
||||
# Health check
|
||||
HEALTH_CHECK_PORT = 5679
|
||||
HEALTH_CHECK_URL = f"http://localhost:{HEALTH_CHECK_PORT}"
|
||||
@@ -0,0 +1,69 @@
|
||||
import pytest_asyncio
|
||||
from src.message_types.broker import Items
|
||||
from src.message_serde import NODE_MODE_MAP
|
||||
|
||||
from tests.fixtures.local_task_broker import LocalTaskBroker
|
||||
from tests.fixtures.task_runner_manager import TaskRunnerManager
|
||||
from tests.fixtures.test_constants import (
|
||||
TASK_RESPONSE_WAIT,
|
||||
)
|
||||
|
||||
NODE_MODE_TO_BROKER_STYLE = {v: k for k, v in NODE_MODE_MAP.items()}
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def manager():
|
||||
manager = TaskRunnerManager()
|
||||
await manager.start()
|
||||
yield manager
|
||||
await manager.stop()
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def broker():
|
||||
broker = LocalTaskBroker()
|
||||
await broker.start()
|
||||
yield broker
|
||||
await broker.stop()
|
||||
|
||||
|
||||
def create_task_settings(
|
||||
code: str,
|
||||
node_mode: str,
|
||||
items: Items | None = None,
|
||||
continue_on_fail: bool = False,
|
||||
can_log: bool = False,
|
||||
):
|
||||
return {
|
||||
"code": code,
|
||||
"nodeMode": NODE_MODE_TO_BROKER_STYLE[node_mode],
|
||||
"items": items if items is not None else [],
|
||||
"continueOnFail": continue_on_fail,
|
||||
"canLog": can_log,
|
||||
}
|
||||
|
||||
|
||||
async def wait_for_task_done(broker, task_id: str, timeout: float = TASK_RESPONSE_WAIT):
|
||||
return await broker.wait_for_msg(
|
||||
"runner:taskdone",
|
||||
timeout=timeout,
|
||||
predicate=lambda msg: msg.get("taskId") == task_id,
|
||||
)
|
||||
|
||||
|
||||
async def wait_for_task_error(
|
||||
broker, task_id: str, timeout: float = TASK_RESPONSE_WAIT
|
||||
):
|
||||
return await broker.wait_for_msg(
|
||||
"runner:taskerror",
|
||||
timeout=timeout,
|
||||
predicate=lambda msg: msg.get("taskId") == task_id,
|
||||
)
|
||||
|
||||
|
||||
def get_browser_console_msgs(broker: LocalTaskBroker, task_id: str) -> list[list[str]]:
|
||||
console_msgs = []
|
||||
for msg in broker.get_task_rpc_messages(task_id):
|
||||
if msg.get("method") == "logNodeOutput":
|
||||
console_msgs.append(msg.get("params", []))
|
||||
return console_msgs
|
||||
@@ -0,0 +1,193 @@
|
||||
import asyncio
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
from src.nanoid import nanoid
|
||||
|
||||
from tests.integration.conftest import (
|
||||
create_task_settings,
|
||||
wait_for_task_done,
|
||||
wait_for_task_error,
|
||||
)
|
||||
from tests.fixtures.test_constants import TASK_TIMEOUT
|
||||
|
||||
|
||||
# ========== all_items mode ==========
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_items_with_success(broker, manager):
|
||||
task_id = nanoid()
|
||||
items = [
|
||||
{"json": {"name": "Alice", "age": 30}},
|
||||
{"json": {"name": "Bob", "age": 16}},
|
||||
{"json": {"name": "Charlie", "age": 35}},
|
||||
]
|
||||
code = textwrap.dedent("""
|
||||
result = []
|
||||
for item in _items:
|
||||
person = item['json']
|
||||
result.append({
|
||||
'name': person['name'],
|
||||
'age': person['age'],
|
||||
'adult': person['age'] >= 18
|
||||
})
|
||||
return result
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items", items=items)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
result = await wait_for_task_done(broker, task_id)
|
||||
|
||||
assert result["data"]["result"] == [
|
||||
{"name": "Alice", "age": 30, "adult": True},
|
||||
{"name": "Bob", "age": 16, "adult": False},
|
||||
{"name": "Charlie", "age": 35, "adult": True},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_items_with_error(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = "raise ValueError('Intentional error')"
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items")
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
error_msg = await wait_for_task_error(broker, task_id)
|
||||
|
||||
assert error_msg["taskId"] == task_id
|
||||
assert "Intentional error" in str(error_msg["error"]["message"])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_items_with_continue_on_fail(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = "raise ValueError('Intentional error')"
|
||||
task_settings = create_task_settings(
|
||||
code=code, node_mode="all_items", continue_on_fail=True
|
||||
)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
done_msg = await wait_for_task_done(broker, task_id)
|
||||
|
||||
assert len(done_msg["data"]["result"]) == 1
|
||||
assert "error" in done_msg["data"]["result"][0]["json"]
|
||||
assert "Intentional error" in str(done_msg["data"]["result"][0]["json"]["error"])
|
||||
|
||||
|
||||
# ========== per_item mode ==========
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_per_item_with_success(broker, manager):
|
||||
task_id = nanoid()
|
||||
items = [
|
||||
{"json": {"value": 10}},
|
||||
{"json": {"value": 20}},
|
||||
{"json": {"value": 30}},
|
||||
]
|
||||
code = "return {'doubled': _item['json']['value'] * 2}"
|
||||
task_settings = create_task_settings(code=code, node_mode="per_item", items=items)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
done_msg = await wait_for_task_done(broker, task_id)
|
||||
|
||||
assert done_msg["taskId"] == task_id
|
||||
assert done_msg["data"]["result"] == [
|
||||
{"doubled": 20, "pairedItem": {"item": 0}},
|
||||
{"doubled": 40, "pairedItem": {"item": 1}},
|
||||
{"doubled": 60, "pairedItem": {"item": 2}},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_per_item_with_filtering(broker, manager):
|
||||
task_id = nanoid()
|
||||
items = [
|
||||
{"json": {"value": 5}},
|
||||
{"json": {"value": 15}},
|
||||
{"json": {"value": 25}},
|
||||
{"json": {"value": 8}},
|
||||
]
|
||||
code = textwrap.dedent("""
|
||||
value = _item['json']['value']
|
||||
if value > 10:
|
||||
return {'value': value, 'passed': True}
|
||||
else:
|
||||
return None # Filter out this item
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="per_item", items=items)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
result = await wait_for_task_done(broker, task_id)
|
||||
|
||||
assert result["data"]["result"] == [
|
||||
{"value": 15, "passed": True, "pairedItem": {"item": 1}},
|
||||
{"value": 25, "passed": True, "pairedItem": {"item": 2}},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_per_item_with_continue_on_fail(broker, manager):
|
||||
task_id = nanoid()
|
||||
items = [
|
||||
{"json": {"value": 10}},
|
||||
{"json": {"value": 0}}, # Will cause division by zero
|
||||
{"json": {"value": 20}},
|
||||
]
|
||||
code = "return {'result': 100 / _item['json']['value']}"
|
||||
task_settings = create_task_settings(
|
||||
code=code,
|
||||
node_mode="per_item",
|
||||
items=items,
|
||||
continue_on_fail=True,
|
||||
)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
done_msg = await wait_for_task_done(broker, task_id)
|
||||
|
||||
assert len(done_msg["data"]["result"]) == 1
|
||||
assert "error" in done_msg["data"]["result"][0]["json"]
|
||||
assert "division by zero" in done_msg["data"]["result"][0]["json"]["error"]
|
||||
|
||||
|
||||
# ========== edge cases ===========
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_during_execution(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
import time
|
||||
for i in range(20):
|
||||
time.sleep(0.05)
|
||||
if i == 10:
|
||||
# Should be cancelled around here
|
||||
pass
|
||||
return [{"completed": "should not reach here"}]
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items")
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
await asyncio.sleep(0.3)
|
||||
await broker.cancel_task(task_id, reason="Cancelled during execution")
|
||||
|
||||
error_msg = await wait_for_task_error(broker, task_id)
|
||||
|
||||
assert error_msg["taskId"] == task_id
|
||||
assert "error" in error_msg
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_timeout_during_execution(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
while True:
|
||||
pass
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items")
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
error_msg = await wait_for_task_error(broker, task_id, timeout=TASK_TIMEOUT + 0.5)
|
||||
|
||||
assert error_msg["taskId"] == task_id
|
||||
assert "timed out" in error_msg["error"]["message"].lower()
|
||||
@@ -0,0 +1,40 @@
|
||||
import asyncio
|
||||
import textwrap
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
from src.nanoid import nanoid
|
||||
|
||||
from tests.integration.conftest import create_task_settings
|
||||
from tests.fixtures.test_constants import HEALTH_CHECK_URL
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health_check_server_responds(broker, manager):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
for _ in range(10):
|
||||
try:
|
||||
response = await session.get(HEALTH_CHECK_URL)
|
||||
if response.status == 200:
|
||||
assert await response.text() == "OK"
|
||||
return
|
||||
except aiohttp.ClientConnectionError:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health_check_server_ressponds_mid_execution(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
for _ in range(10_000_000):
|
||||
pass
|
||||
return [{"result": "completed"}]
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items")
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
await asyncio.sleep(0.3)
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
response = await session.get(HEALTH_CHECK_URL)
|
||||
assert response.status == 200
|
||||
assert await response.text() == "OK"
|
||||
116
packages/@n8n/task-runner-python/tests/integration/test_rpc.py
Normal file
116
packages/@n8n/task-runner-python/tests/integration/test_rpc.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
from src.nanoid import nanoid
|
||||
|
||||
from tests.integration.conftest import (
|
||||
create_task_settings,
|
||||
get_browser_console_msgs,
|
||||
wait_for_task_done,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_print_basic_types(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
print("Hello, World!")
|
||||
print(42)
|
||||
print(3.14)
|
||||
print(True)
|
||||
print(None)
|
||||
print("Multiple", "args", 123, False)
|
||||
return [{"printed": "ok"}]
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items", can_log=True)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
done_msg = await wait_for_task_done(broker, task_id, timeout=5.0)
|
||||
|
||||
assert done_msg["taskId"] == task_id
|
||||
assert done_msg["data"]["result"] == [{"printed": "ok"}]
|
||||
|
||||
msgs = get_browser_console_msgs(broker, task_id)
|
||||
|
||||
assert len(msgs) > 0, "Should have captured console messages"
|
||||
|
||||
all_args = []
|
||||
for msg in msgs:
|
||||
all_args.extend(msg)
|
||||
|
||||
expected = [
|
||||
"'Hello, World!'",
|
||||
"42",
|
||||
"3.14",
|
||||
"True",
|
||||
"None",
|
||||
"'Multiple'",
|
||||
"'args'",
|
||||
"123",
|
||||
"False",
|
||||
]
|
||||
for item in expected:
|
||||
assert item in all_args, f"Expected '{item}' not found in console output"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_print_complex_types(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
print({"name": "John", "age": 30, "active": True})
|
||||
print([1, 2, "three", {"four": 4}])
|
||||
print({"users": [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}]})
|
||||
return [{"result": "success"}]
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items", can_log=True)
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
result_msg = await wait_for_task_done(broker, task_id, timeout=5.0)
|
||||
|
||||
assert result_msg["data"]["result"] == [{"result": "success"}]
|
||||
|
||||
msgs = get_browser_console_msgs(broker, task_id)
|
||||
assert len(msgs) > 0, "Should have captured console messages"
|
||||
|
||||
all_output = " ".join(["".join(msg) for msg in msgs]).replace(" ", "")
|
||||
expected = [
|
||||
'{"name":"John","age":30,"active":true}',
|
||||
'[1,2,"three",{"four":4}]',
|
||||
]
|
||||
for item in expected:
|
||||
assert item in all_output, f"Expected '{item}' not found in console output"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_print_edge_cases(broker, manager):
|
||||
task_id = nanoid()
|
||||
code = textwrap.dedent("""
|
||||
print("Hello 世界 🌍")
|
||||
print({"emoji": "🚀", "chinese": "你好", "arabic": "مرحبا"})
|
||||
print("Line\\nbreak")
|
||||
print("Tab\\tseparated")
|
||||
print('Quote "test" here')
|
||||
print()
|
||||
print("")
|
||||
print(" ")
|
||||
print([])
|
||||
print({})
|
||||
print(())
|
||||
print("x" * 1_000)
|
||||
return [{"test": "complete"}]
|
||||
""")
|
||||
task_settings = create_task_settings(code=code, node_mode="all_items", can_log=True)
|
||||
|
||||
await broker.send_task(task_id=task_id, task_settings=task_settings)
|
||||
|
||||
done_msg = await wait_for_task_done(broker, task_id, timeout=5.0)
|
||||
|
||||
assert done_msg["data"]["result"] == [{"test": "complete"}]
|
||||
|
||||
msgs = get_browser_console_msgs(broker, task_id)
|
||||
assert len(msgs) > 0, "Should have captured console messages"
|
||||
|
||||
all_output = " ".join(["".join(msg) for msg in msgs])
|
||||
expected = ["世界", "🌍", "🚀", "你好", "[]", "{}"]
|
||||
for item in expected:
|
||||
assert item in all_output, f"Expected '{item}' not found in console output"
|
||||
221
packages/@n8n/task-runner-python/tests/unit/test_sentry.py
Normal file
221
packages/@n8n/task-runner-python/tests/unit/test_sentry.py
Normal file
@@ -0,0 +1,221 @@
|
||||
import logging
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from src.config.sentry_config import SentryConfig
|
||||
from src.errors.task_runtime_error import TaskRuntimeError
|
||||
from src.sentry import TaskRunnerSentry, setup_sentry
|
||||
from src.constants import (
|
||||
EXECUTOR_ALL_ITEMS_FILENAME,
|
||||
EXECUTOR_PER_ITEM_FILENAME,
|
||||
LOG_SENTRY_MISSING,
|
||||
SENTRY_TAG_SERVER_TYPE_KEY,
|
||||
SENTRY_TAG_SERVER_TYPE_VALUE,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sentry_config():
|
||||
return SentryConfig(
|
||||
dsn="https://test@sentry.io/123456",
|
||||
n8n_version="1.0.0",
|
||||
environment="test",
|
||||
deployment_name="test-deployment",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def disabled_sentry_config():
|
||||
return SentryConfig(
|
||||
dsn="",
|
||||
n8n_version="1.0.0",
|
||||
environment="test",
|
||||
deployment_name="test-deployment",
|
||||
)
|
||||
|
||||
|
||||
class TestTaskRunnerSentry:
|
||||
def test_init_configures_sentry_correctly(self, sentry_config):
|
||||
with (
|
||||
patch("sentry_sdk.init") as mock_init,
|
||||
patch("sentry_sdk.set_tag") as mock_set_tag,
|
||||
patch("sentry_sdk.integrations.logging.LoggingIntegration") as mock_logging,
|
||||
):
|
||||
mock_logging_instance = Mock()
|
||||
mock_logging.return_value = mock_logging_instance
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
|
||||
sentry.init()
|
||||
|
||||
mock_init.assert_called_once_with(
|
||||
dsn="https://test@sentry.io/123456",
|
||||
release="n8n@1.0.0",
|
||||
environment="test",
|
||||
server_name="test-deployment",
|
||||
before_send=sentry._filter_out_ignored_errors,
|
||||
attach_stacktrace=True,
|
||||
send_default_pii=False,
|
||||
auto_enabling_integrations=False,
|
||||
default_integrations=True,
|
||||
integrations=[mock_logging_instance],
|
||||
)
|
||||
mock_set_tag.assert_called_once_with(
|
||||
SENTRY_TAG_SERVER_TYPE_KEY, SENTRY_TAG_SERVER_TYPE_VALUE
|
||||
)
|
||||
|
||||
def test_shutdown_flushes_sentry(self, sentry_config):
|
||||
with patch("sentry_sdk.flush") as mock_flush:
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
|
||||
sentry.shutdown()
|
||||
|
||||
mock_flush.assert_called_once_with(timeout=2.0)
|
||||
|
||||
def test_filter_out_task_runtime_errors(self, sentry_config):
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
event = {"exception": {"values": []}}
|
||||
hint = {"exc_info": (TaskRuntimeError, None, None)}
|
||||
|
||||
result = sentry._filter_out_ignored_errors(event, hint)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_filter_out_user_code_errors_from_executors(self, sentry_config):
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
|
||||
for executor_filename in [
|
||||
EXECUTOR_ALL_ITEMS_FILENAME,
|
||||
EXECUTOR_PER_ITEM_FILENAME,
|
||||
]:
|
||||
event = {
|
||||
"exception": {
|
||||
"values": [
|
||||
{
|
||||
"stacktrace": {
|
||||
"frames": [
|
||||
{"filename": "some_file.py"},
|
||||
{"filename": executor_filename},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
hint = {}
|
||||
|
||||
result = sentry._filter_out_ignored_errors(event, hint)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_allows_non_user_code_errors(self, sentry_config):
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
event = {
|
||||
"exception": {
|
||||
"values": [
|
||||
{
|
||||
"stacktrace": {
|
||||
"frames": [
|
||||
{"filename": "some_system_file.py"},
|
||||
{"filename": "another_system_file.py"},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
hint = {}
|
||||
|
||||
result = sentry._filter_out_ignored_errors(event, hint)
|
||||
|
||||
assert result == event
|
||||
|
||||
def test_handles_malformed_exception_data(self, sentry_config):
|
||||
sentry = TaskRunnerSentry(sentry_config)
|
||||
|
||||
test_cases = [
|
||||
{},
|
||||
{"exception": {"values": []}},
|
||||
{"exception": {"values": [{"type": "ValueError"}]}},
|
||||
{"exception": {"values": [{"stacktrace": {}}]}},
|
||||
{"exception": {"values": [{"stacktrace": {"frames": []}}]}},
|
||||
]
|
||||
|
||||
for event in test_cases:
|
||||
result = sentry._filter_out_ignored_errors(event, {})
|
||||
assert result == event
|
||||
|
||||
|
||||
class TestSetupSentry:
|
||||
def test_returns_none_when_disabled(self, disabled_sentry_config):
|
||||
result = setup_sentry(disabled_sentry_config)
|
||||
assert result is None
|
||||
|
||||
@patch("src.sentry.TaskRunnerSentry")
|
||||
def test_initializes_sentry_when_enabled(self, mock_sentry_class, sentry_config):
|
||||
mock_sentry = Mock()
|
||||
mock_sentry_class.return_value = mock_sentry
|
||||
|
||||
result = setup_sentry(sentry_config)
|
||||
|
||||
mock_sentry_class.assert_called_once_with(sentry_config)
|
||||
mock_sentry.init.assert_called_once()
|
||||
assert result == mock_sentry
|
||||
|
||||
@patch("src.sentry.TaskRunnerSentry")
|
||||
def test_handles_import_error(self, mock_sentry_class, sentry_config, caplog):
|
||||
mock_sentry = Mock()
|
||||
mock_sentry.init.side_effect = ImportError("sentry_sdk not found")
|
||||
mock_sentry_class.return_value = mock_sentry
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
result = setup_sentry(sentry_config)
|
||||
|
||||
assert result is None
|
||||
assert LOG_SENTRY_MISSING in caplog.text
|
||||
|
||||
@patch("src.sentry.TaskRunnerSentry")
|
||||
def test_handles_general_exception(self, mock_sentry_class, sentry_config, caplog):
|
||||
mock_sentry = Mock()
|
||||
mock_sentry.init.side_effect = Exception("Something went wrong")
|
||||
mock_sentry_class.return_value = mock_sentry
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
result = setup_sentry(sentry_config)
|
||||
|
||||
assert result is None
|
||||
assert "Failed to initialize Sentry: Something went wrong" in caplog.text
|
||||
|
||||
|
||||
class TestSentryConfig:
|
||||
def test_enabled_returns_true_with_dsn(self, sentry_config):
|
||||
assert sentry_config.enabled is True
|
||||
|
||||
def test_enabled_returns_false_without_dsn(self, disabled_sentry_config):
|
||||
assert disabled_sentry_config.enabled is False
|
||||
|
||||
@patch.dict(
|
||||
"os.environ",
|
||||
{
|
||||
"N8N_SENTRY_DSN": "https://test@sentry.io/789",
|
||||
"N8N_VERSION": "2.0.0",
|
||||
"ENVIRONMENT": "production",
|
||||
"DEPLOYMENT_NAME": "prod-deployment",
|
||||
},
|
||||
)
|
||||
def test_from_env_creates_config_from_environment(self):
|
||||
config = SentryConfig.from_env()
|
||||
|
||||
assert config.dsn == "https://test@sentry.io/789"
|
||||
assert config.n8n_version == "2.0.0"
|
||||
assert config.environment == "production"
|
||||
assert config.deployment_name == "prod-deployment"
|
||||
|
||||
@patch.dict("os.environ", {}, clear=True)
|
||||
def test_from_env_uses_defaults_when_missing(self):
|
||||
config = SentryConfig.from_env()
|
||||
|
||||
assert config.dsn == ""
|
||||
assert config.n8n_version == ""
|
||||
assert config.environment == ""
|
||||
assert config.deployment_name == ""
|
||||
Reference in New Issue
Block a user