{ "name": "Homelab Integration Advisor", "nodes": [ { "parameters": { "rule": { "interval": [ { "field": "days", "daysInterval": 1, "triggerAtHour": 9 } ] } }, "id": "daily-trigger", "name": "Daily at 9 AM", "type": "n8n-nodes-base.scheduleTrigger", "typeVersion": 1.2, "position": [ 250, 400 ] }, { "parameters": { "httpMethod": "POST", "path": "integration-advisor", "responseMode": "responseNode", "options": {} }, "id": "webhook-trigger", "name": "Manual Trigger", "type": "n8n-nodes-base.webhook", "typeVersion": 2, "position": [ 250, 600 ], "webhookId": "integration-advisor" }, { "parameters": { "command": "docker service ls --format '{{.Name}}|{{.Mode}}|{{.Replicas}}|{{.Image}}|{{.Ports}}'" }, "id": "get-services", "name": "Get All Services", "type": "n8n-nodes-base.executeCommand", "typeVersion": 1, "position": [ 500, 400 ] }, { "parameters": { "url": "=http://prometheus:9090/api/v1/query?query=up", "options": { "timeout": 5000 } }, "id": "get-prometheus-metrics", "name": "Get Prometheus Metrics", "type": "n8n-nodes-base.httpRequest", "typeVersion": 4.2, "position": [ 500, 550 ], "continueOnFail": true }, { "parameters": { "url": "=http://lm-studio:1234/v1/models", "options": {} }, "id": "get-ai-models", "name": "Get Available AI Models", "type": "n8n-nodes-base.httpRequest", "typeVersion": 4.2, "position": [ 500, 700 ], "continueOnFail": true }, { "parameters": { "jsCode": "const items = $input.all();\n\nconst inventory = {\n timestamp: new Date().toISOString(),\n services: [],\n capabilities: {\n ai: [],\n monitoring: [],\n automation: [],\n storage: [],\n productivity: [],\n media: [],\n development: []\n },\n integration_potential: []\n};\n\n// Parse service list\nconst serviceData = items.find(i => i.json.stdout);\nif (serviceData && serviceData.json.stdout) {\n const lines = serviceData.json.stdout.split('\\n').filter(l => l.trim());\n lines.forEach(line => {\n const [name, mode, replicas, image, ports] = line.split('|');\n const service = { name, mode, replicas, image, ports };\n inventory.services.push(service);\n \n // Categorize by capability\n if (name.includes('openwebui') || name.includes('lm-studio') || name.includes('ollama')) {\n inventory.capabilities.ai.push(name);\n } else if (name.includes('prometheus') || name.includes('grafana') || name.includes('alert')) {\n inventory.capabilities.monitoring.push(name);\n } else if (name.includes('n8n') || name.includes('komodo')) {\n inventory.capabilities.automation.push(name);\n } else if (name.includes('paperless') || name.includes('stirling') || name.includes('nextcloud')) {\n inventory.capabilities.productivity.push(name);\n } else if (name.includes('plex') || name.includes('jellyfin') || name.includes('immich')) {\n inventory.capabilities.media.push(name);\n } else if (name.includes('gitea') || name.includes('code-server')) {\n inventory.capabilities.development.push(name);\n } else if (name.includes('omv') || name.includes('samba')) {\n inventory.capabilities.storage.push(name);\n }\n });\n}\n\n// Get AI models\nconst aiModels = items.find(i => i.json.data);\nif (aiModels && aiModels.json.data) {\n inventory.ai_models = aiModels.json.data.map(m => m.id);\n}\n\n// Define integration opportunities\nconst integrations = [\n { from: 'n8n', to: 'paperless', type: 'Document automation', potential: 'high' },\n { from: 'n8n', to: 'prometheus', type: 'Metric-based triggers', potential: 'high' },\n { from: 'n8n', to: 'openwebui', type: 'AI-powered workflows', potential: 'high' },\n { from: 'openwebui', to: 'searxng', type: 'Enhanced search', potential: 'medium' },\n { from: 'prometheus', to: 'grafana', type: 'Visualization', potential: 'existing' },\n { from: 'gitea', to: 'komodo', type: 'CI/CD automation', potential: 'high' },\n { from: 'paperless', to: 'nextcloud', type: 'Document storage', potential: 'medium' },\n { from: 'immich', to: 'openwebui', type: 'Photo analysis', potential: 'medium' },\n { from: 'home-assistant', to: 'all', type: 'Smart home integration', potential: 'high' }\n];\n\ninventory.integration_potential = integrations.filter(i => {\n const fromExists = inventory.services.some(s => s.name.includes(i.from.split('-')[0]));\n const toExists = i.to === 'all' || inventory.services.some(s => s.name.includes(i.to.split('-')[0]));\n return fromExists && toExists;\n});\n\nreturn [{ json: inventory }];" }, "id": "build-inventory", "name": "Build Service Inventory", "type": "n8n-nodes-base.code", "typeVersion": 2, "position": [ 750, 500 ] }, { "parameters": { "method": "POST", "url": "=http://lm-studio:1234/v1/chat/completions", "sendBody": true, "bodyParameters": { "parameters": [ { "name": "model", "value": "=deepseek-r1-distill-llama-8b" }, { "name": "messages", "value": "={{ [{\"role\":\"system\",\"content\":\"You are a homelab integration expert specializing in service orchestration with n8n, Docker, and modern DevOps tools. Analyze the provided service inventory and recommend specific integration workflows. For each recommendation provide: 1) Services involved 2) Integration type 3) Specific n8n workflow pattern 4) Expected benefits 5) Complexity (low/medium/high). Respond in JSON format with an array of recommendations.\"}, {\"role\":\"user\",\"content\":\"Analyze this homelab and recommend integration workflows:\\n\\nServices: \" + JSON.stringify($json.capabilities, null, 2) + \"\\n\\nAvailable AI Models: \" + JSON.stringify($json.ai_models || [], null, 2) + \"\\n\\nPotential Integrations Identified: \" + JSON.stringify($json.integration_potential, null, 2)}] }}" }, { "name": "temperature", "value": "=0.4" }, { "name": "max_tokens", "value": "=2000" } ] }, "options": { "timeout": 40000 } }, "id": "ai-integration-advisor", "name": "AI Integration Advisor", "type": "n8n-nodes-base.httpRequest", "typeVersion": 4.2, "position": [ 1000, 500 ] }, { "parameters": { "jsCode": "const inventory = $('Build Service Inventory').item.json;\nconst aiResponse = $json.choices[0].message.content;\n\nlet recommendations;\ntry {\n const jsonMatch = aiResponse.match(/\\{[\\s\\S]*\\}|\\[[\\s\\S]*\\]/);\n recommendations = jsonMatch ? JSON.parse(jsonMatch[0]) : { raw: aiResponse };\n} catch (e) {\n recommendations = { raw: aiResponse, error: e.message };\n}\n\nconst report = {\n generated_at: new Date().toISOString(),\n homelab_summary: {\n total_services: inventory.services.length,\n capabilities: inventory.capabilities,\n ai_models_available: inventory.ai_models?.length || 0\n },\n integration_opportunities: inventory.integration_potential,\n ai_recommendations: recommendations,\n priority_integrations: [],\n quick_wins: []\n};\n\n// Extract priority integrations from AI response\nif (Array.isArray(recommendations)) {\n report.priority_integrations = recommendations\n .filter(r => r.complexity === 'low' || r.complexity === 'medium')\n .slice(0, 5);\n report.quick_wins = recommendations\n .filter(r => r.complexity === 'low')\n .slice(0, 3);\n} else if (recommendations.recommendations) {\n report.priority_integrations = recommendations.recommendations.slice(0, 5);\n}\n\nreturn [{ json: report }];" }, "id": "build-integration-report", "name": "Build Integration Report", "type": "n8n-nodes-base.code", "typeVersion": 2, "position": [ 1250, 500 ] }, { "parameters": { "respondWith": "json", "responseBody": "={{ $json }}" }, "id": "webhook-response", "name": "Webhook Response", "type": "n8n-nodes-base.respondToWebhook", "typeVersion": 1, "position": [ 1500, 500 ] } ], "pinData": {}, "connections": { "Daily at 9 AM": { "main": [ [ { "node": "Get All Services", "type": "main", "index": 0 }, { "node": "Get Prometheus Metrics", "type": "main", "index": 0 }, { "node": "Get Available AI Models", "type": "main", "index": 0 } ] ] }, "Manual Trigger": { "main": [ [ { "node": "Get All Services", "type": "main", "index": 0 }, { "node": "Get Prometheus Metrics", "type": "main", "index": 0 }, { "node": "Get Available AI Models", "type": "main", "index": 0 } ] ] }, "Get All Services": { "main": [ [ { "node": "Build Service Inventory", "type": "main", "index": 0 } ] ] }, "Get Prometheus Metrics": { "main": [ [ { "node": "Build Service Inventory", "type": "main", "index": 0 } ] ] }, "Get Available AI Models": { "main": [ [ { "node": "Build Service Inventory", "type": "main", "index": 0 } ] ] }, "Build Service Inventory": { "main": [ [ { "node": "AI Integration Advisor", "type": "main", "index": 0 } ] ] }, "AI Integration Advisor": { "main": [ [ { "node": "Build Integration Report", "type": "main", "index": 0 } ] ] }, "Build Integration Report": { "main": [ [ { "node": "Webhook Response", "type": "main", "index": 0 } ] ] } }, "active": false, "settings": { "executionOrder": "v1" }, "versionId": "1", "meta": { "templateCredsSetupCompleted": true, "instanceId": "homelab" }, "id": "homelab-integration-advisor", "tags": [] }