|
1
|
#!/usr/bin/env node |
|
2
|
// Minimal OpenAI + scuttlebot relay example (Node 18+). |
|
3
|
// Requires env: SCUTTLEBOT_URL, SCUTTLEBOT_TOKEN, SCUTTLEBOT_CHANNEL. |
|
4
|
// Optional: SCUTTLEBOT_NICK, SCUTTLEBOT_SESSION_ID, OPENAI_API_KEY. |
|
5
|
|
|
6
|
import OpenAI from "openai"; |
|
7
|
import path from "node:path"; |
|
8
|
|
|
9
|
const prompt = process.argv[2] || "Hello from openai-relay"; |
|
10
|
const sanitize = (value) => value.replace(/[^A-Za-z0-9_-]+/g, "-").replace(/^-+|-+$/g, ""); |
|
11
|
const baseName = sanitize(path.basename(process.cwd()) || "repo"); |
|
12
|
const sessionSuffix = sanitize( |
|
13
|
process.env.SCUTTLEBOT_SESSION_ID || process.env.CODEX_SESSION_ID || String(process.ppid || process.pid) |
|
14
|
) || "session"; |
|
15
|
|
|
16
|
const cfg = { |
|
17
|
url: process.env.SCUTTLEBOT_URL, |
|
18
|
token: process.env.SCUTTLEBOT_TOKEN, |
|
19
|
channel: (process.env.SCUTTLEBOT_CHANNEL || "general").replace(/^#/, ""), |
|
20
|
nick: process.env.SCUTTLEBOT_NICK || `codex-${baseName}-${sessionSuffix}`, |
|
21
|
model: process.env.OPENAI_MODEL || "gpt-4.1-mini", |
|
22
|
backend: process.env.SCUTTLEBOT_LLM_BACKEND || "openai", // default to daemon-stored openai |
|
23
|
}; |
|
24
|
|
|
25
|
for (const [k, v] of Object.entries(cfg)) { |
|
26
|
if (["backend", "model"].includes(k)) continue; |
|
27
|
if (!v) { |
|
28
|
console.error(`missing env: ${k.toUpperCase()}`); |
|
29
|
process.exit(1); |
|
30
|
} |
|
31
|
} |
|
32
|
const useBackend = !!cfg.backend; |
|
33
|
if (!useBackend && !process.env.OPENAI_API_KEY) { |
|
34
|
console.error("missing env: OPENAI_API_KEY (or set SCUTTLEBOT_LLM_BACKEND to use server-side key)"); |
|
35
|
process.exit(1); |
|
36
|
} |
|
37
|
|
|
38
|
const openai = useBackend ? null : new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); |
|
39
|
let lastCheck = 0; |
|
40
|
|
|
41
|
function mentionsNick(text) { |
|
42
|
const escaped = cfg.nick.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); |
|
43
|
return new RegExp(`(^|[^A-Za-z0-9_./\\\\-])${escaped}($|[^A-Za-z0-9_./\\\\-])`, "i").test(text); |
|
44
|
} |
|
45
|
|
|
46
|
async function relayPost(text) { |
|
47
|
const res = await fetch(`${cfg.url}/v1/channels/${cfg.channel}/messages`, { |
|
48
|
method: "POST", |
|
49
|
headers: { |
|
50
|
Authorization: `Bearer ${cfg.token}`, |
|
51
|
"Content-Type": "application/json", |
|
52
|
}, |
|
53
|
body: JSON.stringify({ text, nick: cfg.nick }), |
|
54
|
}); |
|
55
|
if (!res.ok) { |
|
56
|
throw new Error(`relay post failed: ${res.status} ${res.statusText}`); |
|
57
|
} |
|
58
|
} |
|
59
|
|
|
60
|
async function relayPoll() { |
|
61
|
const res = await fetch(`${cfg.url}/v1/channels/${cfg.channel}/messages`, { |
|
62
|
headers: { Authorization: `Bearer ${cfg.token}` }, |
|
63
|
}); |
|
64
|
if (!res.ok) { |
|
65
|
throw new Error(`relay poll failed: ${res.status} ${res.statusText}`); |
|
66
|
} |
|
67
|
const data = await res.json(); |
|
68
|
const now = Date.now() / 1000; |
|
69
|
const bots = new Set([ |
|
70
|
cfg.nick, |
|
71
|
"bridge", |
|
72
|
"oracle", |
|
73
|
"sentinel", |
|
74
|
"steward", |
|
75
|
"scribe", |
|
76
|
"warden", |
|
77
|
"snitch", |
|
78
|
"herald", |
|
79
|
"scroll", |
|
80
|
"systembot", |
|
81
|
"auditbot", |
|
82
|
"claude", |
|
83
|
]); |
|
84
|
const msgs = |
|
85
|
data.messages?.filter( |
|
86
|
(m) => |
|
87
|
!bots.has(m.nick) && |
|
88
|
!m.nick.startsWith("claude-") && |
|
89
|
!m.nick.startsWith("codex-") && |
|
90
|
!m.nick.startsWith("gemini-") && |
|
91
|
Date.parse(m.at) / 1000 > lastCheck && |
|
92
|
mentionsNick(m.text) |
|
93
|
) || []; |
|
94
|
lastCheck = now; |
|
95
|
return msgs; |
|
96
|
} |
|
97
|
|
|
98
|
async function main() { |
|
99
|
await relayPost(`starting: ${prompt}`); |
|
100
|
|
|
101
|
let reply; |
|
102
|
if (useBackend) { |
|
103
|
const res = await fetch(`${cfg.url}/v1/llm/complete`, { |
|
104
|
method: "POST", |
|
105
|
headers: { |
|
106
|
Authorization: `Bearer ${cfg.token}`, |
|
107
|
"Content-Type": "application/json", |
|
108
|
}, |
|
109
|
body: JSON.stringify({ backend: cfg.backend, prompt }), |
|
110
|
}); |
|
111
|
if (!res.ok) throw new Error(`llm complete failed: ${res.status} ${res.statusText}`); |
|
112
|
const body = await res.json(); |
|
113
|
reply = body.text; |
|
114
|
} else { |
|
115
|
const completion = await openai.chat.completions.create({ |
|
116
|
model: cfg.model, |
|
117
|
messages: [{ role: "user", content: prompt }], |
|
118
|
}); |
|
119
|
reply = completion.choices[0].message.content; |
|
120
|
} |
|
121
|
console.log(`OpenAI: ${reply}`); |
|
122
|
|
|
123
|
await relayPost(`OpenAI reply: ${reply}`); |
|
124
|
|
|
125
|
const instructions = await relayPoll(); |
|
126
|
instructions.forEach((m) => console.log(`[IRC] ${m.nick}: ${m.text}`)); |
|
127
|
} |
|
128
|
|
|
129
|
main().catch((err) => { |
|
130
|
console.error(err); |
|
131
|
process.exit(1); |
|
132
|
}); |
|
133
|
|