211 lines
7.0 KiB
Python
211 lines
7.0 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
n8n Workflow Orchestrator — Delegate API calls to n8n via webhooks.
|
|
|
|
The agent never touches external API credentials directly. Instead, it triggers
|
|
n8n workflows that hold the credentials securely.
|
|
|
|
Usage:
|
|
python3 n8n-orchestrator.py list # List configured workflows
|
|
python3 n8n-orchestrator.py trigger <workflow> [data] # Trigger a workflow
|
|
python3 n8n-orchestrator.py add <name> <webhook_url> # Register a workflow
|
|
python3 n8n-orchestrator.py status # Check n8n health
|
|
python3 n8n-orchestrator.py log # Show recent executions
|
|
|
|
Setup:
|
|
1. Install n8n: npm install -g n8n (or Docker)
|
|
2. Create webhook-triggered workflows in n8n
|
|
3. Register them here with 'add' command
|
|
4. Agents trigger workflows without knowing API keys
|
|
"""
|
|
|
|
import json
|
|
import sys
|
|
import time
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from urllib.request import urlopen, Request
|
|
from urllib.error import URLError, HTTPError
|
|
|
|
DATA_DIR = Path(__file__).parent.parent / "data" / "n8n-orchestrator"
|
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
WORKFLOWS_FILE = DATA_DIR / "workflows.json"
|
|
LOG_FILE = DATA_DIR / "executions.jsonl"
|
|
CONFIG_FILE = DATA_DIR / "config.json"
|
|
|
|
DEFAULT_CONFIG = {
|
|
"n8n_base_url": "http://localhost:5678",
|
|
"timeout_seconds": 30,
|
|
}
|
|
|
|
# Pre-built workflow templates agents can use
|
|
WORKFLOW_TEMPLATES = {
|
|
"send-email": {
|
|
"description": "Send an email via configured SMTP",
|
|
"expected_payload": {"to": "email", "subject": "string", "body": "string"},
|
|
},
|
|
"fetch-crypto-price": {
|
|
"description": "Get current price for a crypto asset",
|
|
"expected_payload": {"symbol": "BTC"},
|
|
},
|
|
"post-discord": {
|
|
"description": "Post a message to a Discord channel",
|
|
"expected_payload": {"channel": "channel-name", "message": "string"},
|
|
},
|
|
"google-sheets-append": {
|
|
"description": "Append a row to a Google Sheet",
|
|
"expected_payload": {"sheet_id": "string", "values": ["col1", "col2"]},
|
|
},
|
|
"telegram-notify": {
|
|
"description": "Send a Telegram notification",
|
|
"expected_payload": {"chat_id": "string", "message": "string"},
|
|
},
|
|
}
|
|
|
|
|
|
def load_config():
|
|
if CONFIG_FILE.exists():
|
|
return json.loads(CONFIG_FILE.read_text())
|
|
CONFIG_FILE.write_text(json.dumps(DEFAULT_CONFIG, indent=2))
|
|
return DEFAULT_CONFIG
|
|
|
|
|
|
def load_workflows():
|
|
if WORKFLOWS_FILE.exists():
|
|
return json.loads(WORKFLOWS_FILE.read_text())
|
|
return {}
|
|
|
|
|
|
def save_workflows(workflows):
|
|
WORKFLOWS_FILE.write_text(json.dumps(workflows, indent=2))
|
|
|
|
|
|
def log_execution(workflow_name, payload, response, success, duration_ms):
|
|
entry = {
|
|
"ts": datetime.now().isoformat(),
|
|
"workflow": workflow_name,
|
|
"payload_keys": list(payload.keys()) if isinstance(payload, dict) else str(type(payload)),
|
|
"success": success,
|
|
"duration_ms": duration_ms,
|
|
"response_preview": str(response)[:200] if response else None,
|
|
}
|
|
with open(LOG_FILE, "a") as f:
|
|
f.write(json.dumps(entry) + "\n")
|
|
|
|
|
|
def trigger_workflow(name, payload=None):
|
|
"""Trigger an n8n workflow by name."""
|
|
workflows = load_workflows()
|
|
if name not in workflows:
|
|
print(f"❌ Workflow '{name}' not found. Use 'list' to see available workflows.")
|
|
return None
|
|
|
|
wf = workflows[name]
|
|
url = wf["webhook_url"]
|
|
config = load_config()
|
|
|
|
print(f"🔄 Triggering workflow: {name}")
|
|
print(f" URL: {url}")
|
|
|
|
payload = payload or {}
|
|
data = json.dumps(payload).encode('utf-8')
|
|
req = Request(url, data=data, headers={
|
|
'Content-Type': 'application/json',
|
|
'User-Agent': 'n8n-orchestrator/1.0',
|
|
})
|
|
|
|
start = time.time()
|
|
try:
|
|
with urlopen(req, timeout=config.get("timeout_seconds", 30)) as resp:
|
|
duration = int((time.time() - start) * 1000)
|
|
body = resp.read().decode('utf-8')
|
|
try:
|
|
result = json.loads(body)
|
|
except json.JSONDecodeError:
|
|
result = body
|
|
|
|
log_execution(name, payload, result, True, duration)
|
|
print(f"✅ Success ({duration}ms)")
|
|
print(f" Response: {json.dumps(result, indent=2) if isinstance(result, dict) else result[:200]}")
|
|
return result
|
|
|
|
except (HTTPError, URLError) as e:
|
|
duration = int((time.time() - start) * 1000)
|
|
error_msg = str(e)
|
|
log_execution(name, payload, error_msg, False, duration)
|
|
print(f"❌ Failed ({duration}ms): {error_msg}")
|
|
return None
|
|
|
|
|
|
def add_workflow(name, webhook_url, description=""):
|
|
workflows = load_workflows()
|
|
workflows[name] = {
|
|
"webhook_url": webhook_url,
|
|
"description": description,
|
|
"added": datetime.now().isoformat(),
|
|
}
|
|
save_workflows(workflows)
|
|
print(f"✅ Added workflow: {name} -> {webhook_url}")
|
|
|
|
|
|
def list_workflows():
|
|
workflows = load_workflows()
|
|
print(f"\n📋 Registered Workflows ({len(workflows)}):")
|
|
print("=" * 50)
|
|
if not workflows:
|
|
print(" No workflows registered yet.")
|
|
print(" Use: python3 n8n-orchestrator.py add <name> <webhook_url>")
|
|
for name, wf in workflows.items():
|
|
print(f" 🔗 {name}")
|
|
print(f" URL: {wf['webhook_url']}")
|
|
if wf.get("description"):
|
|
print(f" Desc: {wf['description']}")
|
|
|
|
print(f"\n📦 Available Templates:")
|
|
for name, tmpl in WORKFLOW_TEMPLATES.items():
|
|
print(f" 📝 {name}: {tmpl['description']}")
|
|
print(f" Payload: {json.dumps(tmpl['expected_payload'])}")
|
|
|
|
|
|
def check_n8n_status():
|
|
config = load_config()
|
|
base = config.get("n8n_base_url", "http://localhost:5678")
|
|
print(f"🏥 Checking n8n at {base}...")
|
|
try:
|
|
req = Request(f"{base}/healthz", headers={'User-Agent': 'n8n-orchestrator/1.0'})
|
|
with urlopen(req, timeout=5) as resp:
|
|
print(f"✅ n8n is running (HTTP {resp.status})")
|
|
except Exception as e:
|
|
print(f"❌ n8n unreachable: {e}")
|
|
print(f" Start it with: n8n start (or docker run -d -p 5678:5678 n8nio/n8n)")
|
|
|
|
|
|
def show_log(limit=20):
|
|
if not LOG_FILE.exists():
|
|
print("No executions logged yet.")
|
|
return
|
|
lines = LOG_FILE.read_text().strip().split('\n')
|
|
print(f"\n📜 Recent Executions (last {limit}):")
|
|
for line in lines[-limit:]:
|
|
entry = json.loads(line)
|
|
icon = "✅" if entry["success"] else "❌"
|
|
print(f" {icon} [{entry['ts'][:19]}] {entry['workflow']} ({entry['duration_ms']}ms)")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
args = sys.argv[1:]
|
|
if not args or args[0] == "list":
|
|
list_workflows()
|
|
elif args[0] == "trigger" and len(args) >= 2:
|
|
payload = json.loads(args[2]) if len(args) > 2 else {}
|
|
trigger_workflow(args[1], payload)
|
|
elif args[0] == "add" and len(args) >= 3:
|
|
desc = args[3] if len(args) > 3 else ""
|
|
add_workflow(args[1], args[2], desc)
|
|
elif args[0] == "status":
|
|
check_n8n_status()
|
|
elif args[0] == "log":
|
|
show_log()
|
|
else:
|
|
print(__doc__)
|