chore(10-05): verify and document workflow refactoring

- Run cleanup and verification script
- No orphaned nodes found
- Workflow structure validated
- Final node count: 199 (reduced from 209, -4.8%)
- Add comprehensive deployment guide

Node composition:
- 79 code nodes
- 50 httpRequest nodes
- 27 telegram nodes
- 14 if nodes
- 10 switch nodes
- 9 executeCommand nodes
- 9 executeWorkflow nodes (sub-workflow calls)
- 1 telegramTrigger node

Note: Node count (199) is above target range (120-150) but achieves
primary goals of eliminating duplicate logic. Further optimization
possible (~40-45 nodes) by consolidating batch UI and confirmation flows.

Deployment requires importing n8n-container-logs.json and updating
the workflow ID in main workflow Execute Text/Inline Logs nodes.
This commit is contained in:
Lucas Berger
2026-02-04 13:58:48 -05:00
parent 6471dcecd6
commit 186f11362e
9 changed files with 1731 additions and 6 deletions
+326
View File
@@ -0,0 +1,326 @@
#!/usr/bin/env python3
"""
Task 3 Part 2: Update main workflow to use Container Logs sub-workflow
"""
import json
import uuid
# This will be the ID assigned when we import the workflow to n8n
# For now, use a placeholder - we'll need to update this after import
CONTAINER_LOGS_WF_ID = "PLACEHOLDER_LOGS_ID"
def load_workflow():
with open('n8n-workflow.json', 'r') as f:
return json.load(f)
def save_workflow(workflow):
with open('n8n-workflow.json', 'w') as f:
json.dump(workflow, f, indent=2)
print(f"Saved workflow with {len(workflow['nodes'])} nodes")
def find_node(workflow, name):
for node in workflow['nodes']:
if node['name'] == name:
return node
return None
def remove_node(workflow, node_name):
"""Remove a node and all its connections"""
workflow['nodes'] = [n for n in workflow['nodes'] if n['name'] != node_name]
if node_name in workflow['connections']:
del workflow['connections'][node_name]
for source, outputs in list(workflow['connections'].items()):
for output_key, connections in list(outputs.items()):
workflow['connections'][source][output_key] = [
[conn for conn in conn_list if conn.get('node') != node_name]
for conn_list in connections
]
def create_code_node(name, code, position):
return {
"parameters": {
"jsCode": code
},
"id": str(uuid.uuid4()),
"name": name,
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": position
}
def create_execute_workflow_node(name, workflow_id, position):
return {
"parameters": {
"workflowId": {
"__rl": True,
"mode": "list",
"value": workflow_id
},
"options": {}
},
"id": str(uuid.uuid4()),
"name": name,
"type": "n8n-nodes-base.executeWorkflow",
"typeVersion": 1.2,
"position": position
}
def main():
print("Loading workflow...")
workflow = load_workflow()
initial_count = len(workflow['nodes'])
print(f"Initial node count: {initial_count}")
# For TEXT logs command path:
# Current: Keyword Router -> Parse Logs Command -> Docker List for Logs ->
# Match Logs Container -> Check Logs Match Count -> (various paths) ->
# Build Logs Command -> Execute Logs -> Format Logs -> Send Logs Response
#
# New: Keyword Router -> Prepare Text Logs Input -> Execute Logs Sub-workflow ->
# Send Logs Response
# For INLINE logs action path:
# Current: Prepare Logs Action -> Get Container For Logs -> Build Logs Action Command ->
# Execute Logs Action -> Format Logs Action Result -> Send Logs Result
#
# New: Prepare Logs Action -> Execute Logs Sub-workflow -> Send Logs Result
# 1. Create "Prepare Text Logs Input" node
text_input_code = '''// Prepare input for Container Logs sub-workflow (text command)
const data = $json;
// Check if there's an error from Parse Logs Command
if (data.error) {
return {
json: {
error: true,
chatId: data.chatId,
text: data.text
}
};
}
return {
json: {
containerName: data.containerQuery,
lineCount: data.lines,
chatId: data.chatId,
messageId: data.messageId || 0,
responseMode: "text"
}
};'''
text_input_node = create_code_node(
"Prepare Text Logs Input",
text_input_code,
[1120, 600]
)
# 2. Create "Execute Text Logs" sub-workflow node
exec_text_logs = create_execute_workflow_node(
"Execute Text Logs",
CONTAINER_LOGS_WF_ID,
[1340, 600]
)
# 3. Create "Prepare Inline Logs Input" node (renamed from Prepare Logs Action)
inline_input_code = '''// Prepare input for Container Logs sub-workflow (inline action)
const data = $('Parse Callback Data').item.json;
return {
json: {
containerName: data.containerName,
lineCount: 30,
chatId: data.chatId,
messageId: data.messageId,
responseMode: "inline"
}
};'''
inline_input_node = create_code_node(
"Prepare Inline Logs Input",
inline_input_code,
[1780, 1300]
)
# 4. Create "Execute Inline Logs" sub-workflow node
exec_inline_logs = create_execute_workflow_node(
"Execute Inline Logs",
CONTAINER_LOGS_WF_ID,
[2000, 1300]
)
# 5. Create "Format Inline Logs Result" - adds keyboard for inline
inline_format_code = '''// Format logs result for inline keyboard display
const result = $json;
const data = $('Prepare Inline Logs Input').item.json;
// Get container state (need to fetch from Docker)
// For now, build basic keyboard
const containerName = result.containerName;
// Build inline keyboard
const keyboard = [
[
{ text: '🔄 Refresh Logs', callback_data: `action:logs:${containerName}` },
{ text: '⬆️ Update', callback_data: `action:update:${containerName}` }
],
[
{ text: '◀️ Back to List', callback_data: 'list:0' }
]
];
return {
json: {
chatId: data.chatId,
messageId: data.messageId,
text: result.message,
reply_markup: { inline_keyboard: keyboard }
}
};'''
inline_format_node = create_code_node(
"Format Inline Logs Result",
inline_format_code,
[2220, 1300]
)
# Add new nodes
print("\nAdding new nodes:")
workflow['nodes'].extend([
text_input_node,
exec_text_logs,
inline_input_node,
exec_inline_logs,
inline_format_node
])
print(f" - {text_input_node['name']}")
print(f" - {exec_text_logs['name']}")
print(f" - {inline_input_node['name']}")
print(f" - {exec_inline_logs['name']}")
print(f" - {inline_format_node['name']}")
# Update connections
print("\nUpdating connections:")
# Text path: Keyword Router -> Parse Logs Command -> Prepare Text Logs Input
# (Keep Parse Logs Command for error handling)
workflow['connections']['Parse Logs Command'] = {
'main': [[{
"node": "Prepare Text Logs Input",
"type": "main",
"index": 0
}]]
}
print(" - Parse Logs Command -> Prepare Text Logs Input")
# Prepare Text Logs Input -> Execute Text Logs
workflow['connections']['Prepare Text Logs Input'] = {
'main': [[{
"node": "Execute Text Logs",
"type": "main",
"index": 0
}]]
}
print(" - Prepare Text Logs Input -> Execute Text Logs")
# Execute Text Logs -> Send Logs Response
workflow['connections']['Execute Text Logs'] = {
'main': [[{
"node": "Send Logs Response",
"type": "main",
"index": 0
}]]
}
print(" - Execute Text Logs -> Send Logs Response")
# Update Send Logs Response to use result.message
send_logs_node = find_node(workflow, "Send Logs Response")
if send_logs_node and 'parameters' in send_logs_node:
send_logs_node['parameters']['text'] = "={{ $json.message }}"
# Inline path: Action Router -> Prepare Inline Logs Input
# Find what routes to logs action
for source, outputs in workflow['connections'].items():
for output_key, connections in outputs.items():
for i, conn_list in enumerate(connections):
for j, conn in enumerate(conn_list):
if conn.get('node') == 'Prepare Logs Action':
workflow['connections'][source][output_key][i][j]['node'] = 'Prepare Inline Logs Input'
print(f" - {source} -> Prepare Inline Logs Input (was Prepare Logs Action)")
# Prepare Inline Logs Input -> Execute Inline Logs
workflow['connections']['Prepare Inline Logs Input'] = {
'main': [[{
"node": "Execute Inline Logs",
"type": "main",
"index": 0
}]]
}
print(" - Prepare Inline Logs Input -> Execute Inline Logs")
# Execute Inline Logs -> Format Inline Logs Result
workflow['connections']['Execute Inline Logs'] = {
'main': [[{
"node": "Format Inline Logs Result",
"type": "main",
"index": 0
}]]
}
print(" - Execute Inline Logs -> Format Inline Logs Result")
# Format Inline Logs Result -> Send Logs Result
workflow['connections']['Format Inline Logs Result'] = {
'main': [[{
"node": "Send Logs Result",
"type": "main",
"index": 0
}]]
}
print(" - Format Inline Logs Result -> Send Logs Result")
# Remove obsolete nodes
print("\nRemoving obsolete nodes:")
nodes_to_remove = [
"Docker List for Logs",
"Match Logs Container",
"Check Logs Match Count",
"Build Logs Command",
"Execute Logs",
"Format Logs",
"Send Logs Error",
"Format Logs No Match",
"Format Logs Multiple",
"Prepare Logs Action",
"Get Container For Logs",
"Build Logs Action Command",
"Execute Logs Action",
"Format Logs Action Result"
]
removed_count = 0
for node_name in nodes_to_remove:
if find_node(workflow, node_name):
print(f" - Removing: {node_name}")
remove_node(workflow, node_name)
removed_count += 1
# Keep Parse Logs Command for initial parsing and error handling
# Save
final_count = len(workflow['nodes'])
print(f"\nNode count: {initial_count} -> {final_count} ({final_count - initial_count:+d})")
print(f"Removed: {removed_count} nodes")
print(f"Added: 5 nodes")
print(f"Net change: {final_count - initial_count:+d} nodes")
save_workflow(workflow)
print("\n✓ Task 3 complete: Logs flow now uses Container Logs sub-workflow")
print("\nNOTE: You must import n8n-container-logs.json to n8n and update")
print(" the CONTAINER_LOGS_WF_ID in this script, then re-run.")
if __name__ == '__main__':
main()