{
  "name": "Vorlux AI | Audience Mood Analyzer",
  "nodes": [
    {
      "id": "f3e8d146-ad7d-4578-9eec-991bed83c56d",
      "name": "Schedule",
      "type": "n8n-nodes-base.scheduleTrigger",
      "typeVersion": 1.2,
      "position": [
        220,
        300
      ],
      "parameters": {
        "rule": {
          "interval": [
            {
              "field": "minutes",
              "minutesInterval": 10
            }
          ]
        }
      }
    },
    {
      "id": "18e8a6e0-260f-48bc-b0e8-5eae9375b94b",
      "name": "Fetch Data",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        460,
        300
      ],
      "parameters": {
        "method": "GET",
        "url": "={{$env.VORLUX_HUB_URL}}/api/chat/messages?limit=100&period=10m",
        "options": {
          "timeout": 15000
        }
      }
    },
    {
      "id": "3533061e-7cf1-4cae-b95a-f15015f5a1d6",
      "name": "Process",
      "type": "n8n-nodes-base.code",
      "typeVersion": 2,
      "position": [
        740,
        300
      ],
      "parameters": {
        "mode": "runOnceForAllItems",
        "jsCode": "const msgs = $input.first().json.data || [];\nif (!msgs.length) return [{json: {mood: 'quiet', score: 50, count: 0, skip: true}}];\n\n// Build a sample of recent messages for LLM analysis\nconst sample = msgs.slice(0, 30).map(m => m.content || '').filter(Boolean).join('\n');\nif (!sample) return [{json: {mood: 'quiet', score: 50, count: msgs.length, skip: true}}];\n\n// Call local Ollama for sentiment analysis\ntry {\n  const ollamaUrl = $env.OLLAMA_URL || 'http://localhost:11434';\n  const res = await fetch(ollamaUrl + '/api/generate', {\n    method: 'POST',\n    headers: {'Content-Type': 'application/json'},\n    body: JSON.stringify({\n      model: 'llama3.2',\n      prompt: 'Analyze the sentiment of these chat messages from a live stream. Rate the overall mood on a scale of 0-100 (0=very negative, 50=neutral, 100=very positive). Also classify as one of: hype, positive, neutral, dipping, negative. Respond with ONLY JSON: {\"score\": NUMBER, \"mood\": \"STRING\", \"summary\": \"ONE_SENTENCE\"}\n\nMessages:\n' + sample,\n      stream: false,\n      options: {temperature: 0.3, num_predict: 100}\n    }),\n    signal: AbortSignal.timeout(30000)\n  });\n  \n  if (res.ok) {\n    const data = await res.json();\n    const text = data.response || '';\n    // Parse JSON from LLM response\n    const match = text.match(/\\{[^}]+\\}/);\n    if (match) {\n      const parsed = JSON.parse(match[0]);\n      return [{json: {\n        mood: parsed.mood || 'neutral',\n        score: Math.max(0, Math.min(100, parsed.score || 50)),\n        count: msgs.length,\n        summary: parsed.summary || '',\n        method: 'llm',\n        skip: false\n      }}];\n    }\n  }\n} catch(e) {\n  // Fallback to keyword-based if LLM fails\n}\n\n// Keyword fallback\nconst pos = /\b(love|great|awesome|hype|pog|lol|nice|gg|fire|based)\b/i;\nconst neg = /\b(boring|bad|hate|cringe|trash|dead|worst|lag)\b/i;\nlet p = 0, n = 0;\nmsgs.forEach(m => {\n  const t = m.content || '';\n  if (pos.test(t)) p++;\n  if (neg.test(t)) n++;\n});\nconst score = Math.round(((p - n) / msgs.length + 1) * 50);\nconst mood = score > 70 ? 'hype' : score > 55 ? 'positive' : score > 45 ? 'neutral' : 'dipping';\nreturn [{json: {mood, score, count: msgs.length, pos: p, neg: n, method: 'keywords', skip: false}}];"
      },
      "notes": "LLM-based sentiment analysis via Ollama with keyword fallback"
    },
    {
      "id": "25ae634a-2740-4c66-8f3c-b5d89207693d",
      "name": "Post Discord",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 4.2,
      "position": [
        980,
        300
      ],
      "parameters": {
        "method": "POST",
        "url": "={{$env.DISCORD_OPS_WEBHOOK}}",
        "sendBody": true,
        "specifyBody": "json",
        "jsonBody": "={{ JSON.stringify($json.embeds ? $json : {embeds:[{title:$json.mood||$json.status||\"Update\",description:JSON.stringify($json).substring(0,500),color:5793266}]}) }}",
        "options": {
          "timeout": 10000
        }
      }
    }
  ],
  "connections": {
    "Schedule": {
      "main": [
        [
          {
            "node": "Fetch Data",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Fetch Data": {
      "main": [
        [
          {
            "node": "Process",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Process": {
      "main": [
        [
          {
            "node": "Post Discord",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  },
  "settings": {
    "executionOrder": "v1",
    "saveManualExecutions": true,
    "saveExecutionProgress": true
  }
}