chore(10-06): remove Python helper scripts
- Delete refactor_workflow.py - Delete task1_batch_update.py - Delete task2_batch_actions.py - Delete task3_logs_subworkflow.py - Delete task3_update_main.py - Delete task4_cleanup.py These were development-time helpers for JSON manipulation, not runtime code.
This commit is contained in:
@@ -1,164 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Refactor n8n workflow to use sub-workflows for batch operations and logs.
|
|
||||||
Task 1: Wire batch update to Container Update sub-workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import copy
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def load_workflow(filename):
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_workflow(workflow, filename):
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
|
|
||||||
def find_node_by_name(workflow, name):
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if node['name'] == name:
|
|
||||||
return node
|
|
||||||
return None
|
|
||||||
|
|
||||||
def remove_node(workflow, node_name):
|
|
||||||
"""Remove a node and all its connections"""
|
|
||||||
# Remove from nodes list
|
|
||||||
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] != node_name]
|
|
||||||
|
|
||||||
# Remove from connections (as source)
|
|
||||||
if node_name in workflow['connections']:
|
|
||||||
del workflow['connections'][node_name]
|
|
||||||
|
|
||||||
# Remove from connections (as target)
|
|
||||||
for source, outputs in list(workflow['connections'].items()):
|
|
||||||
for output_key, connections in list(outputs.items()):
|
|
||||||
workflow['connections'][source][output_key] = [
|
|
||||||
conn for conn in connections if conn['node'] != node_name
|
|
||||||
]
|
|
||||||
# Clean up empty output keys
|
|
||||||
if not workflow['connections'][source][output_key]:
|
|
||||||
del workflow['connections'][source][output_key]
|
|
||||||
# Clean up empty source nodes
|
|
||||||
if not workflow['connections'][source]:
|
|
||||||
del workflow['connections'][source]
|
|
||||||
|
|
||||||
def create_execute_workflow_node(name, workflow_id, position, parameters=None):
|
|
||||||
"""Create an Execute Workflow node with proper n8n 1.2 format"""
|
|
||||||
node = {
|
|
||||||
"parameters": {
|
|
||||||
"workflowId": {
|
|
||||||
"__rl": True,
|
|
||||||
"mode": "list",
|
|
||||||
"value": workflow_id
|
|
||||||
},
|
|
||||||
"options": {}
|
|
||||||
},
|
|
||||||
"id": f"auto-generated-{name.replace(' ', '-').lower()}",
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.executeWorkflow",
|
|
||||||
"typeVersion": 1.2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
if parameters:
|
|
||||||
node['parameters'].update(parameters)
|
|
||||||
|
|
||||||
return node
|
|
||||||
|
|
||||||
def trace_flow_from_node(workflow, start_node_name, max_depth=10):
|
|
||||||
"""Trace the flow starting from a node"""
|
|
||||||
flow = []
|
|
||||||
current = start_node_name
|
|
||||||
depth = 0
|
|
||||||
|
|
||||||
while current and depth < max_depth:
|
|
||||||
flow.append(current)
|
|
||||||
|
|
||||||
# Find next node
|
|
||||||
if current in workflow['connections']:
|
|
||||||
outputs = workflow['connections'][current]
|
|
||||||
if 'main' in outputs and len(outputs['main']) > 0 and len(outputs['main'][0]) > 0:
|
|
||||||
current = outputs['main'][0][0]['node']
|
|
||||||
depth += 1
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
return flow
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Loading workflow...")
|
|
||||||
workflow = load_workflow('n8n-workflow.json')
|
|
||||||
|
|
||||||
print(f"Current node count: {len(workflow['nodes'])}")
|
|
||||||
|
|
||||||
# Task 1: Refactor batch update to use Container Update sub-workflow
|
|
||||||
print("\n=== TASK 1: Wire batch update to Container Update sub-workflow ===")
|
|
||||||
|
|
||||||
# Find the batch update flow
|
|
||||||
batch_prep = find_node_by_name(workflow, 'Prepare Update All Batch')
|
|
||||||
if not batch_prep:
|
|
||||||
print("ERROR: Could not find 'Prepare Update All Batch' node")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print(f"Found batch update entry point at position {batch_prep['position']}")
|
|
||||||
|
|
||||||
# Trace what happens after batch prep
|
|
||||||
flow = trace_flow_from_node(workflow, 'Prepare Update All Batch')
|
|
||||||
print(f"Current batch update flow: {' -> '.join(flow)}")
|
|
||||||
|
|
||||||
# The current flow appears to be:
|
|
||||||
# Prepare Update All Batch -> Send Batch Start Message -> ... (Docker operations)
|
|
||||||
# We need to replace the Docker operations with a loop that calls the sub-workflow
|
|
||||||
|
|
||||||
# However, looking at the node list, I notice "Prepare Update All Batch" is the only
|
|
||||||
# batch update node. This suggests the batch update might be handled differently.
|
|
||||||
# Let me check if there's already loop/split logic
|
|
||||||
|
|
||||||
# Find all nodes that might be part of batch update
|
|
||||||
batch_nodes = []
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if 'batch' in node['name'].lower() and 'update' in node['name'].lower():
|
|
||||||
batch_nodes.append(node['name'])
|
|
||||||
|
|
||||||
print(f"\nBatch update related nodes: {batch_nodes}")
|
|
||||||
|
|
||||||
# Check if there's already a loop/split node in the flow
|
|
||||||
loop_found = False
|
|
||||||
for node_name in flow:
|
|
||||||
node = find_node_by_name(workflow, node_name)
|
|
||||||
if node and 'split' in node['type'].lower():
|
|
||||||
loop_found = True
|
|
||||||
print(f"Found loop/split node: {node_name}")
|
|
||||||
|
|
||||||
if not loop_found:
|
|
||||||
print("No loop node found - batch update may be using inline iteration")
|
|
||||||
|
|
||||||
# Actually, based on the analysis, "Prepare Update All Batch" might already prepare
|
|
||||||
# data for individual updates. Let me check what Execute Sub-workflow nodes exist
|
|
||||||
print("\n=== Checking for existing Execute Workflow nodes ===")
|
|
||||||
exec_nodes = []
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if node['type'] == 'n8n-nodes-base.executeWorkflow':
|
|
||||||
exec_nodes.append({
|
|
||||||
'name': node['name'],
|
|
||||||
'workflow_id': node['parameters'].get('workflowId', {}).get('value', 'N/A')
|
|
||||||
})
|
|
||||||
|
|
||||||
print(f"Found {len(exec_nodes)} Execute Workflow nodes:")
|
|
||||||
for en in exec_nodes:
|
|
||||||
print(f" - {en['name']}: {en['workflow_id']}")
|
|
||||||
|
|
||||||
# Save analysis for now
|
|
||||||
print("\nAnalysis complete. Next: implement refactoring...")
|
|
||||||
|
|
||||||
# For now, let's save the workflow unchanged and return analysis
|
|
||||||
print(f"\nFinal node count: {len(workflow['nodes'])}")
|
|
||||||
|
|
||||||
return workflow, exec_nodes, flow
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
workflow, exec_nodes, flow = main()
|
|
||||||
@@ -1,217 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Task 1: Wire batch update to Container Update sub-workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# Workflow IDs from STATE.md
|
|
||||||
CONTAINER_UPDATE_WF_ID = "7AvTzLtKXM2hZTio92_mC"
|
|
||||||
CONTAINER_ACTIONS_WF_ID = "fYSZS5PkH0VSEaT5"
|
|
||||||
|
|
||||||
def load_workflow():
|
|
||||||
with open('n8n-workflow.json', 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_workflow(workflow):
|
|
||||||
with open('n8n-workflow.json', 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
print(f"Saved workflow with {len(workflow['nodes'])} nodes")
|
|
||||||
|
|
||||||
def find_node(workflow, name):
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if node['name'] == name:
|
|
||||||
return node
|
|
||||||
return None
|
|
||||||
|
|
||||||
def create_execute_workflow_node(name, workflow_id, position):
|
|
||||||
"""Create an Execute Workflow node with n8n 1.2 format"""
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"workflowId": {
|
|
||||||
"__rl": True,
|
|
||||||
"mode": "list",
|
|
||||||
"value": workflow_id
|
|
||||||
},
|
|
||||||
"options": {}
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.executeWorkflow",
|
|
||||||
"typeVersion": 1.2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def create_code_node(name, code, position):
|
|
||||||
"""Create a Code node"""
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Loading workflow...")
|
|
||||||
workflow = load_workflow()
|
|
||||||
initial_count = len(workflow['nodes'])
|
|
||||||
print(f"Initial node count: {initial_count}")
|
|
||||||
|
|
||||||
# Find Route Batch Loop Action
|
|
||||||
route_node = find_node(workflow, "Route Batch Loop Action")
|
|
||||||
if not route_node:
|
|
||||||
print("ERROR: Could not find Route Batch Loop Action")
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f"\nFound Route Batch Loop Action at {route_node['position']}")
|
|
||||||
|
|
||||||
# The Route Batch Loop Action has 4 outputs:
|
|
||||||
# 0: update (currently empty)
|
|
||||||
# 1: start
|
|
||||||
# 2: stop
|
|
||||||
# 3: restart
|
|
||||||
|
|
||||||
# We need to add nodes for the update path
|
|
||||||
# Position: Route is at [3760, -500], so place new nodes at x=4000+
|
|
||||||
|
|
||||||
# 1. Create "Prepare Batch Update Input" code node
|
|
||||||
# This prepares the input for the sub-workflow
|
|
||||||
prepare_code = '''// Prepare input for Container Update sub-workflow
|
|
||||||
const data = $json;
|
|
||||||
const container = data.container;
|
|
||||||
|
|
||||||
// Extract container info
|
|
||||||
const containerId = container.id || container.Id || '';
|
|
||||||
const containerName = container.name || container.Name || '';
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
containerId: containerId,
|
|
||||||
containerName: containerName,
|
|
||||||
chatId: data.chatId,
|
|
||||||
messageId: data.progressMessageId || 0,
|
|
||||||
responseMode: "inline"
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
prepare_node = create_code_node(
|
|
||||||
"Prepare Batch Update Input",
|
|
||||||
prepare_code,
|
|
||||||
[4000, -800]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 2. Create "Execute Batch Update" execute workflow node
|
|
||||||
execute_node = create_execute_workflow_node(
|
|
||||||
"Execute Batch Update",
|
|
||||||
CONTAINER_UPDATE_WF_ID,
|
|
||||||
[4220, -800]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3. Create "Handle Batch Update Result" code node
|
|
||||||
# This processes the result from sub-workflow and prepares for next iteration
|
|
||||||
handle_code = '''// Handle update result from sub-workflow
|
|
||||||
const data = $('Build Progress Message').item.json;
|
|
||||||
const result = $json;
|
|
||||||
|
|
||||||
// Update counters based on result
|
|
||||||
let successCount = data.successCount || 0;
|
|
||||||
let failureCount = data.failureCount || 0;
|
|
||||||
let warningCount = data.warningCount || 0;
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
successCount++;
|
|
||||||
} else {
|
|
||||||
failureCount++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add to results array
|
|
||||||
const results = data.results || [];
|
|
||||||
results.push({
|
|
||||||
container: data.containerName,
|
|
||||||
action: 'update',
|
|
||||||
success: result.success,
|
|
||||||
message: result.message || ''
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
successCount: successCount,
|
|
||||||
failureCount: failureCount,
|
|
||||||
warningCount: warningCount,
|
|
||||||
results: results
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
handle_node = create_code_node(
|
|
||||||
"Handle Batch Update Result",
|
|
||||||
handle_code,
|
|
||||||
[4440, -800]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add nodes to workflow
|
|
||||||
print("\nAdding new nodes:")
|
|
||||||
print(f" - {prepare_node['name']}")
|
|
||||||
print(f" - {execute_node['name']}")
|
|
||||||
print(f" - {handle_node['name']}")
|
|
||||||
|
|
||||||
workflow['nodes'].extend([prepare_node, execute_node, handle_node])
|
|
||||||
|
|
||||||
# Add connections
|
|
||||||
print("\nAdding connections:")
|
|
||||||
|
|
||||||
# Route Batch Loop Action (output 0: update) -> Prepare Batch Update Input
|
|
||||||
if 'Route Batch Loop Action' not in workflow['connections']:
|
|
||||||
workflow['connections']['Route Batch Loop Action'] = {'main': [[], [], [], []]}
|
|
||||||
|
|
||||||
workflow['connections']['Route Batch Loop Action']['main'][0] = [{
|
|
||||||
"node": "Prepare Batch Update Input",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]
|
|
||||||
print(" - Route Batch Loop Action [update] -> Prepare Batch Update Input")
|
|
||||||
|
|
||||||
# Prepare Batch Update Input -> Execute Batch Update
|
|
||||||
workflow['connections']['Prepare Batch Update Input'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Execute Batch Update",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Prepare Batch Update Input -> Execute Batch Update")
|
|
||||||
|
|
||||||
# Execute Batch Update -> Handle Batch Update Result
|
|
||||||
workflow['connections']['Execute Batch Update'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Handle Batch Update Result",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Execute Batch Update -> Handle Batch Update Result")
|
|
||||||
|
|
||||||
# Handle Batch Update Result -> Prepare Next Iteration (same as other actions)
|
|
||||||
workflow['connections']['Handle Batch Update Result'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Prepare Next Iteration",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Handle Batch Update Result -> Prepare Next Iteration")
|
|
||||||
|
|
||||||
# Save
|
|
||||||
final_count = len(workflow['nodes'])
|
|
||||||
print(f"\nNode count: {initial_count} -> {final_count} ({final_count - initial_count:+d})")
|
|
||||||
|
|
||||||
save_workflow(workflow)
|
|
||||||
print("\n✓ Task 1 complete: Batch update now uses Container Update sub-workflow")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,241 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Task 2: Wire batch actions to Container Actions sub-workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
CONTAINER_ACTIONS_WF_ID = "fYSZS5PkH0VSEaT5"
|
|
||||||
|
|
||||||
def load_workflow():
|
|
||||||
with open('n8n-workflow.json', 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_workflow(workflow):
|
|
||||||
with open('n8n-workflow.json', 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
print(f"Saved workflow with {len(workflow['nodes'])} nodes")
|
|
||||||
|
|
||||||
def find_node(workflow, name):
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if node['name'] == name:
|
|
||||||
return node
|
|
||||||
return None
|
|
||||||
|
|
||||||
def remove_node(workflow, node_name):
|
|
||||||
"""Remove a node and all its connections"""
|
|
||||||
# Remove from nodes list
|
|
||||||
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] != node_name]
|
|
||||||
|
|
||||||
# Remove from connections (as source)
|
|
||||||
if node_name in workflow['connections']:
|
|
||||||
del workflow['connections'][node_name]
|
|
||||||
|
|
||||||
# Remove from connections (as target)
|
|
||||||
for source, outputs in list(workflow['connections'].items()):
|
|
||||||
for output_key, connections in list(outputs.items()):
|
|
||||||
workflow['connections'][source][output_key] = [
|
|
||||||
[conn for conn in conn_list if conn.get('node') != node_name]
|
|
||||||
for conn_list in connections
|
|
||||||
]
|
|
||||||
|
|
||||||
def create_execute_workflow_node(name, workflow_id, position):
|
|
||||||
"""Create an Execute Workflow node with n8n 1.2 format"""
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"workflowId": {
|
|
||||||
"__rl": True,
|
|
||||||
"mode": "list",
|
|
||||||
"value": workflow_id
|
|
||||||
},
|
|
||||||
"options": {}
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.executeWorkflow",
|
|
||||||
"typeVersion": 1.2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def create_code_node(name, code, position):
|
|
||||||
"""Create a Code node"""
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Loading workflow...")
|
|
||||||
workflow = load_workflow()
|
|
||||||
initial_count = len(workflow['nodes'])
|
|
||||||
print(f"Initial node count: {initial_count}")
|
|
||||||
|
|
||||||
# Current batch action flow:
|
|
||||||
# Route Batch Loop Action (outputs 1,2,3) -> Build Batch Action Command ->
|
|
||||||
# Execute Batch Container Action -> Check Batch Action Result -> ...
|
|
||||||
|
|
||||||
# We need to replace this with:
|
|
||||||
# Route Batch Loop Action -> Prepare Batch Action Input -> Execute Batch Action ->
|
|
||||||
# Handle Batch Action Result
|
|
||||||
|
|
||||||
# 1. Create "Prepare Batch Action Input" code node
|
|
||||||
prepare_code = '''// Prepare input for Container Actions sub-workflow
|
|
||||||
const data = $json;
|
|
||||||
const container = data.container;
|
|
||||||
const action = data.action;
|
|
||||||
|
|
||||||
// Extract container info
|
|
||||||
const containerId = container.id || container.Id || '';
|
|
||||||
const containerName = container.name || container.Name || '';
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
containerId: containerId,
|
|
||||||
containerName: containerName,
|
|
||||||
action: action,
|
|
||||||
chatId: data.chatId,
|
|
||||||
messageId: data.progressMessageId || 0,
|
|
||||||
responseMode: "inline"
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
prepare_node = create_code_node(
|
|
||||||
"Prepare Batch Action Input",
|
|
||||||
prepare_code,
|
|
||||||
[4000, -200]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 2. Create "Execute Batch Action" execute workflow node
|
|
||||||
execute_node = create_execute_workflow_node(
|
|
||||||
"Execute Batch Action Sub-workflow",
|
|
||||||
CONTAINER_ACTIONS_WF_ID,
|
|
||||||
[4220, -200]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3. Create "Handle Batch Action Result" code node
|
|
||||||
handle_code = '''// Handle action result from sub-workflow
|
|
||||||
const data = $('Build Progress Message').item.json;
|
|
||||||
const result = $json;
|
|
||||||
|
|
||||||
// Update counters based on result
|
|
||||||
let successCount = data.successCount || 0;
|
|
||||||
let failureCount = data.failureCount || 0;
|
|
||||||
let warningCount = data.warningCount || 0;
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
successCount++;
|
|
||||||
} else {
|
|
||||||
failureCount++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add to results array
|
|
||||||
const results = data.results || [];
|
|
||||||
results.push({
|
|
||||||
container: data.containerName,
|
|
||||||
action: data.action,
|
|
||||||
success: result.success,
|
|
||||||
message: result.message || ''
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
successCount: successCount,
|
|
||||||
failureCount: failureCount,
|
|
||||||
warningCount: warningCount,
|
|
||||||
results: results
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
handle_node = create_code_node(
|
|
||||||
"Handle Batch Action Result Sub",
|
|
||||||
handle_code,
|
|
||||||
[4440, -200]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add nodes to workflow
|
|
||||||
print("\nAdding new nodes:")
|
|
||||||
print(f" - {prepare_node['name']}")
|
|
||||||
print(f" - {execute_node['name']}")
|
|
||||||
print(f" - {handle_node['name']}")
|
|
||||||
|
|
||||||
workflow['nodes'].extend([prepare_node, execute_node, handle_node])
|
|
||||||
|
|
||||||
# Update connections from Route Batch Loop Action
|
|
||||||
# Outputs 1, 2, 3 (start, stop, restart) should go to Prepare Batch Action Input
|
|
||||||
print("\nUpdating connections:")
|
|
||||||
|
|
||||||
for i in [1, 2, 3]:
|
|
||||||
workflow['connections']['Route Batch Loop Action']['main'][i] = [{
|
|
||||||
"node": "Prepare Batch Action Input",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]
|
|
||||||
print(" - Route Batch Loop Action [start/stop/restart] -> Prepare Batch Action Input")
|
|
||||||
|
|
||||||
# Add new connections
|
|
||||||
workflow['connections']['Prepare Batch Action Input'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Execute Batch Action Sub-workflow",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Prepare Batch Action Input -> Execute Batch Action Sub-workflow")
|
|
||||||
|
|
||||||
workflow['connections']['Execute Batch Action Sub-workflow'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Handle Batch Action Result Sub",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Execute Batch Action Sub-workflow -> Handle Batch Action Result Sub")
|
|
||||||
|
|
||||||
workflow['connections']['Handle Batch Action Result Sub'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Prepare Next Iteration",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Handle Batch Action Result Sub -> Prepare Next Iteration")
|
|
||||||
|
|
||||||
# Now remove the old nodes that are no longer needed
|
|
||||||
print("\nRemoving obsolete nodes:")
|
|
||||||
nodes_to_remove = [
|
|
||||||
"Build Batch Action Command",
|
|
||||||
"Execute Batch Container Action",
|
|
||||||
"Check Batch Action Result",
|
|
||||||
"Needs Action Call",
|
|
||||||
"Execute Batch Action 2",
|
|
||||||
"Parse Batch Action 2",
|
|
||||||
"Handle Action Result"
|
|
||||||
]
|
|
||||||
|
|
||||||
removed_count = 0
|
|
||||||
for node_name in nodes_to_remove:
|
|
||||||
if find_node(workflow, node_name):
|
|
||||||
print(f" - Removing: {node_name}")
|
|
||||||
remove_node(workflow, node_name)
|
|
||||||
removed_count += 1
|
|
||||||
|
|
||||||
# Save
|
|
||||||
final_count = len(workflow['nodes'])
|
|
||||||
print(f"\nNode count: {initial_count} -> {final_count} ({final_count - initial_count:+d})")
|
|
||||||
print(f"Removed: {removed_count} nodes")
|
|
||||||
print(f"Added: 3 nodes")
|
|
||||||
print(f"Net change: {final_count - initial_count:+d} nodes")
|
|
||||||
|
|
||||||
save_workflow(workflow)
|
|
||||||
print("\n✓ Task 2 complete: Batch actions now use Container Actions sub-workflow")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,375 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Task 3: Extract logs flow to sub-workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
def create_logs_subworkflow():
|
|
||||||
"""Create n8n-container-logs.json sub-workflow"""
|
|
||||||
|
|
||||||
# Create workflow structure
|
|
||||||
workflow = {
|
|
||||||
"name": "Container Logs",
|
|
||||||
"nodes": [],
|
|
||||||
"connections": {},
|
|
||||||
"active": True,
|
|
||||||
"settings": {
|
|
||||||
"executionOrder": "v1"
|
|
||||||
},
|
|
||||||
"versionId": str(uuid.uuid4()),
|
|
||||||
"meta": {
|
|
||||||
"instanceId": "unraid-docker-manager"
|
|
||||||
},
|
|
||||||
"tags": []
|
|
||||||
}
|
|
||||||
|
|
||||||
# 1. Execute Workflow Trigger (entry point)
|
|
||||||
trigger = {
|
|
||||||
"parameters": {},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Execute Workflow Trigger",
|
|
||||||
"type": "n8n-nodes-base.executeWorkflowTrigger",
|
|
||||||
"typeVersion": 1,
|
|
||||||
"position": [240, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 2. Parse Input - validates and extracts input parameters
|
|
||||||
parse_input_code = '''// Parse and validate input
|
|
||||||
const input = $json;
|
|
||||||
|
|
||||||
// Get container identifier (ID or name)
|
|
||||||
const containerId = input.containerId || '';
|
|
||||||
const containerName = input.containerName || '';
|
|
||||||
const lineCount = input.lineCount || 50;
|
|
||||||
const chatId = input.chatId;
|
|
||||||
const messageId = input.messageId || 0;
|
|
||||||
const responseMode = input.responseMode || 'text';
|
|
||||||
|
|
||||||
if (!containerId && !containerName) {
|
|
||||||
throw new Error('Either containerId or containerName required');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!chatId) {
|
|
||||||
throw new Error('chatId required');
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
containerId: containerId,
|
|
||||||
containerName: containerName,
|
|
||||||
lineCount: Math.min(Math.max(parseInt(lineCount), 1), 1000),
|
|
||||||
chatId: chatId,
|
|
||||||
messageId: messageId,
|
|
||||||
responseMode: responseMode
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
parse_input = {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": parse_input_code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Parse Input",
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": [460, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 3. Get Container ID (if only name provided)
|
|
||||||
get_container_code = '''// Get container ID if needed
|
|
||||||
const data = $json;
|
|
||||||
|
|
||||||
// If we already have container ID, pass through
|
|
||||||
if (data.containerId) {
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
useDirectId: true
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, need to query Docker to find by name
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
useDirectId: false,
|
|
||||||
dockerCommand: 'curl -s --max-time 5 "http://docker-socket-proxy:2375/v1.47/containers/json?all=1"'
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
get_container = {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": get_container_code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Check Container ID",
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": [680, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 4. Route based on whether we need to query
|
|
||||||
route_node = {
|
|
||||||
"parameters": {
|
|
||||||
"rules": {
|
|
||||||
"values": [
|
|
||||||
{
|
|
||||||
"id": "has-id",
|
|
||||||
"conditions": {
|
|
||||||
"options": {
|
|
||||||
"caseSensitive": True,
|
|
||||||
"typeValidation": "loose"
|
|
||||||
},
|
|
||||||
"conditions": [
|
|
||||||
{
|
|
||||||
"id": "check-direct",
|
|
||||||
"leftValue": "={{ $json.useDirectId }}",
|
|
||||||
"rightValue": "true",
|
|
||||||
"operator": {
|
|
||||||
"type": "boolean",
|
|
||||||
"operation": "true"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"combinator": "and"
|
|
||||||
},
|
|
||||||
"renameOutput": True,
|
|
||||||
"outputKey": "direct"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"fallbackOutput": "extra"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Route ID Check",
|
|
||||||
"type": "n8n-nodes-base.switch",
|
|
||||||
"typeVersion": 3.2,
|
|
||||||
"position": [900, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 5. Query Docker (if needed)
|
|
||||||
query_docker = {
|
|
||||||
"parameters": {
|
|
||||||
"command": "={{ $json.dockerCommand }}"
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Query Docker",
|
|
||||||
"type": "n8n-nodes-base.executeCommand",
|
|
||||||
"typeVersion": 1,
|
|
||||||
"position": [1120, 400]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 6. Find Container by Name
|
|
||||||
find_container_code = '''// Find container by name
|
|
||||||
const dockerOutput = $input.item.json.stdout;
|
|
||||||
const data = $('Check Container ID').item.json;
|
|
||||||
const containerName = data.containerName.toLowerCase();
|
|
||||||
|
|
||||||
// Parse Docker response
|
|
||||||
let containers;
|
|
||||||
try {
|
|
||||||
containers = JSON.parse(dockerOutput);
|
|
||||||
} catch (e) {
|
|
||||||
throw new Error('Failed to parse Docker response');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize name function
|
|
||||||
function normalizeName(name) {
|
|
||||||
return name
|
|
||||||
.replace(/^\//, '')
|
|
||||||
.replace(/^(linuxserver[-_]|binhex[-_])/i, '')
|
|
||||||
.toLowerCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find exact match
|
|
||||||
const container = containers.find(c => normalizeName(c.Names[0]) === containerName);
|
|
||||||
|
|
||||||
if (!container) {
|
|
||||||
throw new Error(`Container "${containerName}" not found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
containerId: container.Id,
|
|
||||||
containerName: normalizeName(container.Names[0])
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
find_container = {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": find_container_code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Find Container",
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": [1340, 400]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 7. Build Logs Command
|
|
||||||
build_command_code = '''// Build Docker logs command
|
|
||||||
const data = $json;
|
|
||||||
const containerId = data.containerId;
|
|
||||||
const lineCount = data.lineCount;
|
|
||||||
|
|
||||||
const cmd = `curl -s --max-time 10 "http://docker-socket-proxy:2375/v1.47/containers/${containerId}/logs?stdout=1&stderr=1&tail=${lineCount}×tamps=1"`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
...data,
|
|
||||||
logsCommand: cmd
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
build_command = {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": build_command_code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Build Logs Command",
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": [1560, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 8. Execute Logs Command
|
|
||||||
execute_logs = {
|
|
||||||
"parameters": {
|
|
||||||
"command": "={{ $json.logsCommand }}"
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Execute Logs",
|
|
||||||
"type": "n8n-nodes-base.executeCommand",
|
|
||||||
"typeVersion": 1,
|
|
||||||
"position": [1780, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 9. Format Logs Output
|
|
||||||
format_logs_code = '''// Format logs output for Telegram
|
|
||||||
const rawOutput = $input.item.json.stdout || '';
|
|
||||||
const data = $('Build Logs Command').item.json;
|
|
||||||
|
|
||||||
// HTML escape function
|
|
||||||
function escapeHtml(text) {
|
|
||||||
return text.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle empty logs
|
|
||||||
if (!rawOutput || rawOutput.trim() === '') {
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
success: true,
|
|
||||||
message: `No logs available for <b>${data.containerName}</b>`,
|
|
||||||
containerName: data.containerName,
|
|
||||||
lineCount: 0
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strip Docker binary headers and process lines
|
|
||||||
const lines = rawOutput.split('\\n')
|
|
||||||
.filter(line => line.length > 0)
|
|
||||||
.map(line => {
|
|
||||||
// Check if line starts with binary header (8-byte Docker stream header)
|
|
||||||
if (line.length > 8 && line.charCodeAt(0) <= 2) {
|
|
||||||
return line.substring(8);
|
|
||||||
}
|
|
||||||
return line;
|
|
||||||
})
|
|
||||||
.join('\\n');
|
|
||||||
|
|
||||||
// Truncate for Telegram (4096 char limit, leave room for header)
|
|
||||||
const maxLen = 3800;
|
|
||||||
const truncated = lines.length > maxLen
|
|
||||||
? lines.substring(0, maxLen) + '\\n... (truncated)'
|
|
||||||
: lines;
|
|
||||||
|
|
||||||
// Escape HTML entities
|
|
||||||
const escaped = escapeHtml(truncated);
|
|
||||||
|
|
||||||
const lineCount = lines.split('\\n').length;
|
|
||||||
const header = `Logs for <b>${data.containerName}</b> (last ${lineCount} lines):\\n\\n`;
|
|
||||||
const formatted = header + '<pre>' + escaped + '</pre>';
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
success: true,
|
|
||||||
message: formatted,
|
|
||||||
containerName: data.containerName,
|
|
||||||
lineCount: lineCount
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
format_logs = {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": format_logs_code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": "Format Logs",
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": [2000, 300]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add all nodes
|
|
||||||
workflow['nodes'] = [
|
|
||||||
trigger,
|
|
||||||
parse_input,
|
|
||||||
get_container,
|
|
||||||
route_node,
|
|
||||||
query_docker,
|
|
||||||
find_container,
|
|
||||||
build_command,
|
|
||||||
execute_logs,
|
|
||||||
format_logs
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add connections
|
|
||||||
workflow['connections'] = {
|
|
||||||
"Execute Workflow Trigger": {
|
|
||||||
"main": [[{"node": "Parse Input", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Parse Input": {
|
|
||||||
"main": [[{"node": "Check Container ID", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Check Container ID": {
|
|
||||||
"main": [[{"node": "Route ID Check", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Route ID Check": {
|
|
||||||
"main": [
|
|
||||||
[{"node": "Build Logs Command", "type": "main", "index": 0}], # direct path
|
|
||||||
[{"node": "Query Docker", "type": "main", "index": 0}] # query path
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"Query Docker": {
|
|
||||||
"main": [[{"node": "Find Container", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Find Container": {
|
|
||||||
"main": [[{"node": "Build Logs Command", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Build Logs Command": {
|
|
||||||
"main": [[{"node": "Execute Logs", "type": "main", "index": 0}]]
|
|
||||||
},
|
|
||||||
"Execute Logs": {
|
|
||||||
"main": [[{"node": "Format Logs", "type": "main", "index": 0}]]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return workflow
|
|
||||||
|
|
||||||
def save_logs_workflow(workflow):
|
|
||||||
with open('n8n-container-logs.json', 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
print(f"Created n8n-container-logs.json with {len(workflow['nodes'])} nodes")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
print("Creating Container Logs sub-workflow...")
|
|
||||||
workflow = create_logs_subworkflow()
|
|
||||||
save_logs_workflow(workflow)
|
|
||||||
print("✓ Container Logs sub-workflow created")
|
|
||||||
@@ -1,326 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Task 3 Part 2: Update main workflow to use Container Logs sub-workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# This will be the ID assigned when we import the workflow to n8n
|
|
||||||
# For now, use a placeholder - we'll need to update this after import
|
|
||||||
CONTAINER_LOGS_WF_ID = "PLACEHOLDER_LOGS_ID"
|
|
||||||
|
|
||||||
def load_workflow():
|
|
||||||
with open('n8n-workflow.json', 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_workflow(workflow):
|
|
||||||
with open('n8n-workflow.json', 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
print(f"Saved workflow with {len(workflow['nodes'])} nodes")
|
|
||||||
|
|
||||||
def find_node(workflow, name):
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if node['name'] == name:
|
|
||||||
return node
|
|
||||||
return None
|
|
||||||
|
|
||||||
def remove_node(workflow, node_name):
|
|
||||||
"""Remove a node and all its connections"""
|
|
||||||
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] != node_name]
|
|
||||||
|
|
||||||
if node_name in workflow['connections']:
|
|
||||||
del workflow['connections'][node_name]
|
|
||||||
|
|
||||||
for source, outputs in list(workflow['connections'].items()):
|
|
||||||
for output_key, connections in list(outputs.items()):
|
|
||||||
workflow['connections'][source][output_key] = [
|
|
||||||
[conn for conn in conn_list if conn.get('node') != node_name]
|
|
||||||
for conn_list in connections
|
|
||||||
]
|
|
||||||
|
|
||||||
def create_code_node(name, code, position):
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"jsCode": code
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.code",
|
|
||||||
"typeVersion": 2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def create_execute_workflow_node(name, workflow_id, position):
|
|
||||||
return {
|
|
||||||
"parameters": {
|
|
||||||
"workflowId": {
|
|
||||||
"__rl": True,
|
|
||||||
"mode": "list",
|
|
||||||
"value": workflow_id
|
|
||||||
},
|
|
||||||
"options": {}
|
|
||||||
},
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"type": "n8n-nodes-base.executeWorkflow",
|
|
||||||
"typeVersion": 1.2,
|
|
||||||
"position": position
|
|
||||||
}
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Loading workflow...")
|
|
||||||
workflow = load_workflow()
|
|
||||||
initial_count = len(workflow['nodes'])
|
|
||||||
print(f"Initial node count: {initial_count}")
|
|
||||||
|
|
||||||
# For TEXT logs command path:
|
|
||||||
# Current: Keyword Router -> Parse Logs Command -> Docker List for Logs ->
|
|
||||||
# Match Logs Container -> Check Logs Match Count -> (various paths) ->
|
|
||||||
# Build Logs Command -> Execute Logs -> Format Logs -> Send Logs Response
|
|
||||||
#
|
|
||||||
# New: Keyword Router -> Prepare Text Logs Input -> Execute Logs Sub-workflow ->
|
|
||||||
# Send Logs Response
|
|
||||||
|
|
||||||
# For INLINE logs action path:
|
|
||||||
# Current: Prepare Logs Action -> Get Container For Logs -> Build Logs Action Command ->
|
|
||||||
# Execute Logs Action -> Format Logs Action Result -> Send Logs Result
|
|
||||||
#
|
|
||||||
# New: Prepare Logs Action -> Execute Logs Sub-workflow -> Send Logs Result
|
|
||||||
|
|
||||||
# 1. Create "Prepare Text Logs Input" node
|
|
||||||
text_input_code = '''// Prepare input for Container Logs sub-workflow (text command)
|
|
||||||
const data = $json;
|
|
||||||
|
|
||||||
// Check if there's an error from Parse Logs Command
|
|
||||||
if (data.error) {
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
error: true,
|
|
||||||
chatId: data.chatId,
|
|
||||||
text: data.text
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
containerName: data.containerQuery,
|
|
||||||
lineCount: data.lines,
|
|
||||||
chatId: data.chatId,
|
|
||||||
messageId: data.messageId || 0,
|
|
||||||
responseMode: "text"
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
text_input_node = create_code_node(
|
|
||||||
"Prepare Text Logs Input",
|
|
||||||
text_input_code,
|
|
||||||
[1120, 600]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 2. Create "Execute Text Logs" sub-workflow node
|
|
||||||
exec_text_logs = create_execute_workflow_node(
|
|
||||||
"Execute Text Logs",
|
|
||||||
CONTAINER_LOGS_WF_ID,
|
|
||||||
[1340, 600]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3. Create "Prepare Inline Logs Input" node (renamed from Prepare Logs Action)
|
|
||||||
inline_input_code = '''// Prepare input for Container Logs sub-workflow (inline action)
|
|
||||||
const data = $('Parse Callback Data').item.json;
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
containerName: data.containerName,
|
|
||||||
lineCount: 30,
|
|
||||||
chatId: data.chatId,
|
|
||||||
messageId: data.messageId,
|
|
||||||
responseMode: "inline"
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
inline_input_node = create_code_node(
|
|
||||||
"Prepare Inline Logs Input",
|
|
||||||
inline_input_code,
|
|
||||||
[1780, 1300]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 4. Create "Execute Inline Logs" sub-workflow node
|
|
||||||
exec_inline_logs = create_execute_workflow_node(
|
|
||||||
"Execute Inline Logs",
|
|
||||||
CONTAINER_LOGS_WF_ID,
|
|
||||||
[2000, 1300]
|
|
||||||
)
|
|
||||||
|
|
||||||
# 5. Create "Format Inline Logs Result" - adds keyboard for inline
|
|
||||||
inline_format_code = '''// Format logs result for inline keyboard display
|
|
||||||
const result = $json;
|
|
||||||
const data = $('Prepare Inline Logs Input').item.json;
|
|
||||||
|
|
||||||
// Get container state (need to fetch from Docker)
|
|
||||||
// For now, build basic keyboard
|
|
||||||
const containerName = result.containerName;
|
|
||||||
|
|
||||||
// Build inline keyboard
|
|
||||||
const keyboard = [
|
|
||||||
[
|
|
||||||
{ text: '🔄 Refresh Logs', callback_data: `action:logs:${containerName}` },
|
|
||||||
{ text: '⬆️ Update', callback_data: `action:update:${containerName}` }
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{ text: '◀️ Back to List', callback_data: 'list:0' }
|
|
||||||
]
|
|
||||||
];
|
|
||||||
|
|
||||||
return {
|
|
||||||
json: {
|
|
||||||
chatId: data.chatId,
|
|
||||||
messageId: data.messageId,
|
|
||||||
text: result.message,
|
|
||||||
reply_markup: { inline_keyboard: keyboard }
|
|
||||||
}
|
|
||||||
};'''
|
|
||||||
|
|
||||||
inline_format_node = create_code_node(
|
|
||||||
"Format Inline Logs Result",
|
|
||||||
inline_format_code,
|
|
||||||
[2220, 1300]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add new nodes
|
|
||||||
print("\nAdding new nodes:")
|
|
||||||
workflow['nodes'].extend([
|
|
||||||
text_input_node,
|
|
||||||
exec_text_logs,
|
|
||||||
inline_input_node,
|
|
||||||
exec_inline_logs,
|
|
||||||
inline_format_node
|
|
||||||
])
|
|
||||||
print(f" - {text_input_node['name']}")
|
|
||||||
print(f" - {exec_text_logs['name']}")
|
|
||||||
print(f" - {inline_input_node['name']}")
|
|
||||||
print(f" - {exec_inline_logs['name']}")
|
|
||||||
print(f" - {inline_format_node['name']}")
|
|
||||||
|
|
||||||
# Update connections
|
|
||||||
print("\nUpdating connections:")
|
|
||||||
|
|
||||||
# Text path: Keyword Router -> Parse Logs Command -> Prepare Text Logs Input
|
|
||||||
# (Keep Parse Logs Command for error handling)
|
|
||||||
workflow['connections']['Parse Logs Command'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Prepare Text Logs Input",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Parse Logs Command -> Prepare Text Logs Input")
|
|
||||||
|
|
||||||
# Prepare Text Logs Input -> Execute Text Logs
|
|
||||||
workflow['connections']['Prepare Text Logs Input'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Execute Text Logs",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Prepare Text Logs Input -> Execute Text Logs")
|
|
||||||
|
|
||||||
# Execute Text Logs -> Send Logs Response
|
|
||||||
workflow['connections']['Execute Text Logs'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Send Logs Response",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Execute Text Logs -> Send Logs Response")
|
|
||||||
|
|
||||||
# Update Send Logs Response to use result.message
|
|
||||||
send_logs_node = find_node(workflow, "Send Logs Response")
|
|
||||||
if send_logs_node and 'parameters' in send_logs_node:
|
|
||||||
send_logs_node['parameters']['text'] = "={{ $json.message }}"
|
|
||||||
|
|
||||||
# Inline path: Action Router -> Prepare Inline Logs Input
|
|
||||||
# Find what routes to logs action
|
|
||||||
for source, outputs in workflow['connections'].items():
|
|
||||||
for output_key, connections in outputs.items():
|
|
||||||
for i, conn_list in enumerate(connections):
|
|
||||||
for j, conn in enumerate(conn_list):
|
|
||||||
if conn.get('node') == 'Prepare Logs Action':
|
|
||||||
workflow['connections'][source][output_key][i][j]['node'] = 'Prepare Inline Logs Input'
|
|
||||||
print(f" - {source} -> Prepare Inline Logs Input (was Prepare Logs Action)")
|
|
||||||
|
|
||||||
# Prepare Inline Logs Input -> Execute Inline Logs
|
|
||||||
workflow['connections']['Prepare Inline Logs Input'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Execute Inline Logs",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Prepare Inline Logs Input -> Execute Inline Logs")
|
|
||||||
|
|
||||||
# Execute Inline Logs -> Format Inline Logs Result
|
|
||||||
workflow['connections']['Execute Inline Logs'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Format Inline Logs Result",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Execute Inline Logs -> Format Inline Logs Result")
|
|
||||||
|
|
||||||
# Format Inline Logs Result -> Send Logs Result
|
|
||||||
workflow['connections']['Format Inline Logs Result'] = {
|
|
||||||
'main': [[{
|
|
||||||
"node": "Send Logs Result",
|
|
||||||
"type": "main",
|
|
||||||
"index": 0
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
print(" - Format Inline Logs Result -> Send Logs Result")
|
|
||||||
|
|
||||||
# Remove obsolete nodes
|
|
||||||
print("\nRemoving obsolete nodes:")
|
|
||||||
nodes_to_remove = [
|
|
||||||
"Docker List for Logs",
|
|
||||||
"Match Logs Container",
|
|
||||||
"Check Logs Match Count",
|
|
||||||
"Build Logs Command",
|
|
||||||
"Execute Logs",
|
|
||||||
"Format Logs",
|
|
||||||
"Send Logs Error",
|
|
||||||
"Format Logs No Match",
|
|
||||||
"Format Logs Multiple",
|
|
||||||
"Prepare Logs Action",
|
|
||||||
"Get Container For Logs",
|
|
||||||
"Build Logs Action Command",
|
|
||||||
"Execute Logs Action",
|
|
||||||
"Format Logs Action Result"
|
|
||||||
]
|
|
||||||
|
|
||||||
removed_count = 0
|
|
||||||
for node_name in nodes_to_remove:
|
|
||||||
if find_node(workflow, node_name):
|
|
||||||
print(f" - Removing: {node_name}")
|
|
||||||
remove_node(workflow, node_name)
|
|
||||||
removed_count += 1
|
|
||||||
|
|
||||||
# Keep Parse Logs Command for initial parsing and error handling
|
|
||||||
|
|
||||||
# Save
|
|
||||||
final_count = len(workflow['nodes'])
|
|
||||||
print(f"\nNode count: {initial_count} -> {final_count} ({final_count - initial_count:+d})")
|
|
||||||
print(f"Removed: {removed_count} nodes")
|
|
||||||
print(f"Added: 5 nodes")
|
|
||||||
print(f"Net change: {final_count - initial_count:+d} nodes")
|
|
||||||
|
|
||||||
save_workflow(workflow)
|
|
||||||
print("\n✓ Task 3 complete: Logs flow now uses Container Logs sub-workflow")
|
|
||||||
print("\nNOTE: You must import n8n-container-logs.json to n8n and update")
|
|
||||||
print(" the CONTAINER_LOGS_WF_ID in this script, then re-run.")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,145 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Task 4: Clean up and verify workflow
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
def load_workflow():
|
|
||||||
with open('n8n-workflow.json', 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_workflow(workflow):
|
|
||||||
with open('n8n-workflow.json', 'w') as f:
|
|
||||||
json.dump(workflow, f, indent=2)
|
|
||||||
print(f"Saved workflow with {len(workflow['nodes'])} nodes")
|
|
||||||
|
|
||||||
def find_orphaned_nodes(workflow):
|
|
||||||
"""Find nodes that are not connected to anything"""
|
|
||||||
connected_nodes = set()
|
|
||||||
|
|
||||||
# Add all nodes that are targets of connections
|
|
||||||
for source, outputs in workflow['connections'].items():
|
|
||||||
connected_nodes.add(source) # Source is connected
|
|
||||||
for output_key, connections in outputs.items():
|
|
||||||
for conn_list in connections:
|
|
||||||
for conn in conn_list:
|
|
||||||
if 'node' in conn:
|
|
||||||
connected_nodes.add(conn['node'])
|
|
||||||
|
|
||||||
# Find trigger node (should always be connected conceptually)
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
if 'trigger' in node['type'].lower():
|
|
||||||
connected_nodes.add(node['name'])
|
|
||||||
|
|
||||||
# Find nodes that exist but aren't connected
|
|
||||||
all_nodes = {node['name'] for node in workflow['nodes']}
|
|
||||||
orphaned = all_nodes - connected_nodes
|
|
||||||
|
|
||||||
return orphaned
|
|
||||||
|
|
||||||
def verify_workflow_structure(workflow):
|
|
||||||
"""Verify workflow has proper structure"""
|
|
||||||
issues = []
|
|
||||||
|
|
||||||
# Check for trigger
|
|
||||||
has_trigger = any('trigger' in node['type'].lower() for node in workflow['nodes'])
|
|
||||||
if not has_trigger:
|
|
||||||
issues.append("WARNING: No trigger node found")
|
|
||||||
|
|
||||||
# Check for broken connections (references to non-existent nodes)
|
|
||||||
all_node_names = {node['name'] for node in workflow['nodes']}
|
|
||||||
for source, outputs in workflow['connections'].items():
|
|
||||||
if source not in all_node_names:
|
|
||||||
issues.append(f"ERROR: Connection source '{source}' does not exist")
|
|
||||||
|
|
||||||
for output_key, connections in outputs.items():
|
|
||||||
for conn_list in connections:
|
|
||||||
for conn in conn_list:
|
|
||||||
target = conn.get('node')
|
|
||||||
if target and target not in all_node_names:
|
|
||||||
issues.append(f"ERROR: Connection target '{target}' (from {source}) does not exist")
|
|
||||||
|
|
||||||
return issues
|
|
||||||
|
|
||||||
def analyze_node_types(workflow):
|
|
||||||
"""Count nodes by type"""
|
|
||||||
type_counts = {}
|
|
||||||
for node in workflow['nodes']:
|
|
||||||
node_type = node['type']
|
|
||||||
type_counts[node_type] = type_counts.get(node_type, 0) + 1
|
|
||||||
return type_counts
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Loading workflow...")
|
|
||||||
workflow = load_workflow()
|
|
||||||
initial_count = len(workflow['nodes'])
|
|
||||||
print(f"Current node count: {initial_count}")
|
|
||||||
|
|
||||||
# Find orphaned nodes
|
|
||||||
print("\n=== Checking for orphaned nodes ===")
|
|
||||||
orphaned = find_orphaned_nodes(workflow)
|
|
||||||
if orphaned:
|
|
||||||
print(f"Found {len(orphaned)} orphaned nodes:")
|
|
||||||
for node in orphaned:
|
|
||||||
print(f" - {node}")
|
|
||||||
|
|
||||||
# Option to remove orphaned nodes
|
|
||||||
print("\nRemoving orphaned nodes...")
|
|
||||||
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] not in orphaned]
|
|
||||||
|
|
||||||
# Clean up any connections from orphaned nodes
|
|
||||||
for node in orphaned:
|
|
||||||
if node in workflow['connections']:
|
|
||||||
del workflow['connections'][node]
|
|
||||||
|
|
||||||
removed_count = len(orphaned)
|
|
||||||
else:
|
|
||||||
print("No orphaned nodes found ✓")
|
|
||||||
removed_count = 0
|
|
||||||
|
|
||||||
# Verify structure
|
|
||||||
print("\n=== Verifying workflow structure ===")
|
|
||||||
issues = verify_workflow_structure(workflow)
|
|
||||||
if issues:
|
|
||||||
print("Issues found:")
|
|
||||||
for issue in issues:
|
|
||||||
print(f" - {issue}")
|
|
||||||
else:
|
|
||||||
print("Workflow structure is valid ✓")
|
|
||||||
|
|
||||||
# Analyze node types
|
|
||||||
print("\n=== Node composition ===")
|
|
||||||
type_counts = analyze_node_types(workflow)
|
|
||||||
for node_type, count in sorted(type_counts.items(), key=lambda x: -x[1]):
|
|
||||||
short_type = node_type.replace('n8n-nodes-base.', '')
|
|
||||||
print(f" {count:3d} {short_type}")
|
|
||||||
|
|
||||||
# Count Execute Workflow nodes (sub-workflow calls)
|
|
||||||
exec_wf_count = type_counts.get('n8n-nodes-base.executeWorkflow', 0)
|
|
||||||
print(f"\n Total Execute Workflow (sub-workflow calls): {exec_wf_count}")
|
|
||||||
|
|
||||||
# Save if we made changes
|
|
||||||
final_count = len(workflow['nodes'])
|
|
||||||
if removed_count > 0:
|
|
||||||
print(f"\nNode count: {initial_count} -> {final_count} ({final_count - initial_count:+d})")
|
|
||||||
save_workflow(workflow)
|
|
||||||
else:
|
|
||||||
print(f"\nNo changes made. Final node count: {final_count}")
|
|
||||||
|
|
||||||
# Check target
|
|
||||||
print("\n=== Target Assessment ===")
|
|
||||||
target_min = 120
|
|
||||||
target_max = 150
|
|
||||||
if target_min <= final_count <= target_max:
|
|
||||||
print(f"✓ Node count {final_count} is within target range ({target_min}-{target_max})")
|
|
||||||
elif final_count < target_min:
|
|
||||||
print(f"✓ Node count {final_count} is BELOW target (even better!)")
|
|
||||||
else:
|
|
||||||
print(f"⚠ Node count {final_count} is above target range ({target_min}-{target_max})")
|
|
||||||
print(f" Over target by: {final_count - target_max} nodes")
|
|
||||||
|
|
||||||
print("\n✓ Task 4 cleanup complete")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
Reference in New Issue
Block a user