{
  "name": "Vorlux AI | AI Agent Health Monitor",
  "nodes": [
    {
      "id": "4d4d71c9-0871-4036-a54b-d2cd0c4af52c",
      "name": "Every 15m",
      "type": "n8n-nodes-base.scheduleTrigger",
      "typeVersion": 1.2,
      "position": [
        220,
        300
      ],
      "parameters": {
        "rule": {
          "interval": [
            {
              "field": "minutes",
              "minutesInterval": 15
            }
          ]
        }
      }
    },
    {
      "id": "060223ba-a8cd-4d9b-845a-8f30a1800cec",
      "name": "Ollama",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        460,
        160
      ],
      "parameters": {
        "method": "GET",
        "url": "={{$env.OLLAMA_URL||\"http://localhost:11434\"}}/api/tags",
        "options": {
          "timeout": 5000
        }
      }
    },
    {
      "id": "b16d13c4-7a23-4b94-a1db-b1732f3fe110",
      "name": "ComfyUI",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        460,
        440
      ],
      "parameters": {
        "method": "GET",
        "url": "={{$env.COMFYUI_URL||\"http://100.79.221.32:8188\"}}/system_stats",
        "options": {
          "timeout": 5000
        }
      }
    },
    {
      "id": "4b2db411-2e18-4065-9c13-f59adcfa7174",
      "name": "Status",
      "type": "n8n-nodes-base.code",
      "typeVersion": 2,
      "position": [
        700,
        300
      ],
      "parameters": {
        "mode": "runOnceForAllItems",
        "jsCode": "const o = !$node.Ollama.json.error;\nconst c = !$node.ComfyUI.json.error;\nconst gpu = $node['GPU Metrics'].json.data || {};\nconst gpuTemp = gpu.temperature || 'N/A';\nconst gpuMem = gpu.memoryUsedMB ? Math.round(gpu.memoryUsedMB) + '/' + Math.round(gpu.memoryTotalMB || 0) + 'MB' : 'N/A';\nconst gpuUtil = gpu.utilization || 'N/A';\nconst gpuOk = gpu.temperature ? gpu.temperature < 85 : true;\nconst allOk = o && c && gpuOk;\nreturn [{json: {\n  ollama: o, comfyui: c, gpuOk,\n  models: $node.Ollama.json.models?.length || 0,\n  gpu: {temp: gpuTemp, memory: gpuMem, utilization: gpuUtil},\n  allOk,\n  timestamp: new Date().toISOString()\n}}];"
      },
      "notes": "Aggregates Ollama, ComfyUI, and GPU health with temperature alerts"
    },
    {
      "id": "b32b3039-8a2d-4c61-b0f3-508032c81394",
      "name": "Save",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        940,
        200
      ],
      "parameters": {
        "method": "POST",
        "url": "={{$env.VORLUX_HUB_URL}}/api/admin/ai-providers/health",
        "sendBody": true,
        "specifyBody": "json",
        "jsonBody": "={{ JSON.stringify($json) }}",
        "options": {
          "timeout": 10000
        }
      }
    },
    {
      "id": "9e2f7982-0507-46aa-9c32-8ccc0e4eae6b",
      "name": "Alert?",
      "type": "n8n-nodes-base.if",
      "typeVersion": 2,
      "position": [
        940,
        400
      ],
      "parameters": {
        "conditions": {
          "options": {
            "caseSensitive": true,
            "leftValue": "",
            "typeValidation": "strict"
          },
          "conditions": [
            {
              "leftValue": "={{ !$json.allOk }}",
              "rightValue": "true",
              "operator": {
                "type": "string",
                "operation": "equals"
              }
            }
          ],
          "combinator": "and"
        }
      }
    },
    {
      "id": "ffe9e746-ed10-4244-8e98-95a7b78893dc",
      "name": "Alert",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        1180,
        400
      ],
      "parameters": {
        "method": "POST",
        "url": "={{$env.DISCORD_OPS_WEBHOOK}}",
        "sendBody": true,
        "specifyBody": "json",
        "jsonBody": "={\"embeds\":[{\"title\":\"\ud83e\udd16 AI Provider Alert\",\"description\":\"Ollama: {{ $json.ollama ? \\\"\u2705\\\" : \\\"\u274c\\\" }} ({{ $json.models }} models)\\nComfyUI: {{ $json.comfyui ? \\\"\u2705\\\" : \\\"\u274c\\\" }}\\nGPU: {{ $json.gpuOk ? \\\"\u2705\\\" : \\\"\ud83d\udd25\\\" }} {{ $json.gpu.temp }}\u00b0C | {{ $json.gpu.memory }} | {{ $json.gpu.utilization }}%\",\"color\":15158332}]}",
        "options": {
          "timeout": 10000
        }
      }
    },
    {
      "id": "604624f7-f06d-4a2b-988e-72085f12c4d6",
      "name": "GPU Metrics",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        460,
        580
      ],
      "notes": "Fetches GPU temperature, memory, and utilization from Windows machine via Hub API",
      "parameters": {
        "method": "GET",
        "url": "={{$env.VORLUX_HUB_URL}}/api/admin/gpu-status",
        "options": {
          "timeout": 10000
        }
      }
    }
  ],
  "connections": {
    "Every 15m": {
      "main": [
        [
          {
            "node": "Ollama",
            "type": "main",
            "index": 0
          },
          {
            "node": "ComfyUI",
            "type": "main",
            "index": 0
          },
          {
            "node": "GPU Metrics",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Ollama": {
      "main": [
        [
          {
            "node": "Status",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "ComfyUI": {
      "main": [
        [
          {
            "node": "Status",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Status": {
      "main": [
        [
          {
            "node": "Save",
            "type": "main",
            "index": 0
          },
          {
            "node": "Alert?",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Alert?": {
      "main": [
        [
          {
            "node": "Alert",
            "type": "main",
            "index": 0
          }
        ],
        []
      ]
    },
    "GPU Metrics": {
      "main": [
        [
          {
            "node": "Status",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  },
  "settings": {
    "executionOrder": "v1",
    "saveManualExecutions": true,
    "saveExecutionProgress": true
  }
}