chore(10-05): verify and document workflow refactoring
- Run cleanup and verification script - No orphaned nodes found - Workflow structure validated - Final node count: 199 (reduced from 209, -4.8%) - Add comprehensive deployment guide Node composition: - 79 code nodes - 50 httpRequest nodes - 27 telegram nodes - 14 if nodes - 10 switch nodes - 9 executeCommand nodes - 9 executeWorkflow nodes (sub-workflow calls) - 1 telegramTrigger node Note: Node count (199) is above target range (120-150) but achieves primary goals of eliminating duplicate logic. Further optimization possible (~40-45 nodes) by consolidating batch UI and confirmation flows. Deployment requires importing n8n-container-logs.json and updating the workflow ID in main workflow Execute Text/Inline Logs nodes.
This commit is contained in:
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Refactor n8n workflow to use sub-workflows for batch operations and logs.
|
||||
Task 1: Wire batch update to Container Update sub-workflow
|
||||
"""
|
||||
|
||||
import json
|
||||
import copy
|
||||
import sys
|
||||
|
||||
def load_workflow(filename):
|
||||
with open(filename, 'r') as f:
|
||||
return json.load(f)
|
||||
|
||||
def save_workflow(workflow, filename):
|
||||
with open(filename, 'w') as f:
|
||||
json.dump(workflow, f, indent=2)
|
||||
|
||||
def find_node_by_name(workflow, name):
|
||||
for node in workflow['nodes']:
|
||||
if node['name'] == name:
|
||||
return node
|
||||
return None
|
||||
|
||||
def remove_node(workflow, node_name):
|
||||
"""Remove a node and all its connections"""
|
||||
# Remove from nodes list
|
||||
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] != node_name]
|
||||
|
||||
# Remove from connections (as source)
|
||||
if node_name in workflow['connections']:
|
||||
del workflow['connections'][node_name]
|
||||
|
||||
# Remove from connections (as target)
|
||||
for source, outputs in list(workflow['connections'].items()):
|
||||
for output_key, connections in list(outputs.items()):
|
||||
workflow['connections'][source][output_key] = [
|
||||
conn for conn in connections if conn['node'] != node_name
|
||||
]
|
||||
# Clean up empty output keys
|
||||
if not workflow['connections'][source][output_key]:
|
||||
del workflow['connections'][source][output_key]
|
||||
# Clean up empty source nodes
|
||||
if not workflow['connections'][source]:
|
||||
del workflow['connections'][source]
|
||||
|
||||
def create_execute_workflow_node(name, workflow_id, position, parameters=None):
|
||||
"""Create an Execute Workflow node with proper n8n 1.2 format"""
|
||||
node = {
|
||||
"parameters": {
|
||||
"workflowId": {
|
||||
"__rl": True,
|
||||
"mode": "list",
|
||||
"value": workflow_id
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": f"auto-generated-{name.replace(' ', '-').lower()}",
|
||||
"name": name,
|
||||
"type": "n8n-nodes-base.executeWorkflow",
|
||||
"typeVersion": 1.2,
|
||||
"position": position
|
||||
}
|
||||
|
||||
if parameters:
|
||||
node['parameters'].update(parameters)
|
||||
|
||||
return node
|
||||
|
||||
def trace_flow_from_node(workflow, start_node_name, max_depth=10):
|
||||
"""Trace the flow starting from a node"""
|
||||
flow = []
|
||||
current = start_node_name
|
||||
depth = 0
|
||||
|
||||
while current and depth < max_depth:
|
||||
flow.append(current)
|
||||
|
||||
# Find next node
|
||||
if current in workflow['connections']:
|
||||
outputs = workflow['connections'][current]
|
||||
if 'main' in outputs and len(outputs['main']) > 0 and len(outputs['main'][0]) > 0:
|
||||
current = outputs['main'][0][0]['node']
|
||||
depth += 1
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
return flow
|
||||
|
||||
def main():
|
||||
print("Loading workflow...")
|
||||
workflow = load_workflow('n8n-workflow.json')
|
||||
|
||||
print(f"Current node count: {len(workflow['nodes'])}")
|
||||
|
||||
# Task 1: Refactor batch update to use Container Update sub-workflow
|
||||
print("\n=== TASK 1: Wire batch update to Container Update sub-workflow ===")
|
||||
|
||||
# Find the batch update flow
|
||||
batch_prep = find_node_by_name(workflow, 'Prepare Update All Batch')
|
||||
if not batch_prep:
|
||||
print("ERROR: Could not find 'Prepare Update All Batch' node")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Found batch update entry point at position {batch_prep['position']}")
|
||||
|
||||
# Trace what happens after batch prep
|
||||
flow = trace_flow_from_node(workflow, 'Prepare Update All Batch')
|
||||
print(f"Current batch update flow: {' -> '.join(flow)}")
|
||||
|
||||
# The current flow appears to be:
|
||||
# Prepare Update All Batch -> Send Batch Start Message -> ... (Docker operations)
|
||||
# We need to replace the Docker operations with a loop that calls the sub-workflow
|
||||
|
||||
# However, looking at the node list, I notice "Prepare Update All Batch" is the only
|
||||
# batch update node. This suggests the batch update might be handled differently.
|
||||
# Let me check if there's already loop/split logic
|
||||
|
||||
# Find all nodes that might be part of batch update
|
||||
batch_nodes = []
|
||||
for node in workflow['nodes']:
|
||||
if 'batch' in node['name'].lower() and 'update' in node['name'].lower():
|
||||
batch_nodes.append(node['name'])
|
||||
|
||||
print(f"\nBatch update related nodes: {batch_nodes}")
|
||||
|
||||
# Check if there's already a loop/split node in the flow
|
||||
loop_found = False
|
||||
for node_name in flow:
|
||||
node = find_node_by_name(workflow, node_name)
|
||||
if node and 'split' in node['type'].lower():
|
||||
loop_found = True
|
||||
print(f"Found loop/split node: {node_name}")
|
||||
|
||||
if not loop_found:
|
||||
print("No loop node found - batch update may be using inline iteration")
|
||||
|
||||
# Actually, based on the analysis, "Prepare Update All Batch" might already prepare
|
||||
# data for individual updates. Let me check what Execute Sub-workflow nodes exist
|
||||
print("\n=== Checking for existing Execute Workflow nodes ===")
|
||||
exec_nodes = []
|
||||
for node in workflow['nodes']:
|
||||
if node['type'] == 'n8n-nodes-base.executeWorkflow':
|
||||
exec_nodes.append({
|
||||
'name': node['name'],
|
||||
'workflow_id': node['parameters'].get('workflowId', {}).get('value', 'N/A')
|
||||
})
|
||||
|
||||
print(f"Found {len(exec_nodes)} Execute Workflow nodes:")
|
||||
for en in exec_nodes:
|
||||
print(f" - {en['name']}: {en['workflow_id']}")
|
||||
|
||||
# Save analysis for now
|
||||
print("\nAnalysis complete. Next: implement refactoring...")
|
||||
|
||||
# For now, let's save the workflow unchanged and return analysis
|
||||
print(f"\nFinal node count: {len(workflow['nodes'])}")
|
||||
|
||||
return workflow, exec_nodes, flow
|
||||
|
||||
if __name__ == '__main__':
|
||||
workflow, exec_nodes, flow = main()
|
||||
Reference in New Issue
Block a user