|
50baf1a…
|
lmata
|
1 |
#!/usr/bin/env python3 |
|
50baf1a…
|
lmata
|
2 |
"""Minimal OpenAI + scuttlebot relay example. |
|
50baf1a…
|
lmata
|
3 |
|
|
50baf1a…
|
lmata
|
4 |
Env required: |
|
50baf1a…
|
lmata
|
5 |
SCUTTLEBOT_URL, SCUTTLEBOT_TOKEN, SCUTTLEBOT_CHANNEL |
|
50baf1a…
|
lmata
|
6 |
Optional: |
|
50baf1a…
|
lmata
|
7 |
SCUTTLEBOT_NICK, SCUTTLEBOT_SESSION_ID, OPENAI_MODEL (default: gpt-4.1-mini) |
|
50baf1a…
|
lmata
|
8 |
""" |
|
50baf1a…
|
lmata
|
9 |
import os |
|
50baf1a…
|
lmata
|
10 |
import re |
|
50baf1a…
|
lmata
|
11 |
import sys |
|
50baf1a…
|
lmata
|
12 |
import time |
|
50baf1a…
|
lmata
|
13 |
from datetime import datetime |
|
50baf1a…
|
lmata
|
14 |
import requests |
|
50baf1a…
|
lmata
|
15 |
from openai import OpenAI |
|
50baf1a…
|
lmata
|
16 |
|
|
50baf1a…
|
lmata
|
17 |
prompt = sys.argv[1] if len(sys.argv) > 1 else "Hello from openai-relay" |
|
50baf1a…
|
lmata
|
18 |
|
|
50baf1a…
|
lmata
|
19 |
|
|
50baf1a…
|
lmata
|
20 |
def sanitize(value: str) -> str: |
|
50baf1a…
|
lmata
|
21 |
return re.sub(r"[^A-Za-z0-9_-]+", "-", value).strip("-") or "session" |
|
50baf1a…
|
lmata
|
22 |
|
|
50baf1a…
|
lmata
|
23 |
|
|
50baf1a…
|
lmata
|
24 |
base_name = sanitize(os.path.basename(os.getcwd()) or "repo") |
|
50baf1a…
|
lmata
|
25 |
session_suffix = sanitize( |
|
50baf1a…
|
lmata
|
26 |
os.environ.get("SCUTTLEBOT_SESSION_ID") |
|
50baf1a…
|
lmata
|
27 |
or os.environ.get("CODEX_SESSION_ID") |
|
50baf1a…
|
lmata
|
28 |
or str(os.getppid()) |
|
50baf1a…
|
lmata
|
29 |
) |
|
50baf1a…
|
lmata
|
30 |
|
|
50baf1a…
|
lmata
|
31 |
cfg = { |
|
50baf1a…
|
lmata
|
32 |
"url": os.environ.get("SCUTTLEBOT_URL"), |
|
50baf1a…
|
lmata
|
33 |
"token": os.environ.get("SCUTTLEBOT_TOKEN"), |
|
50baf1a…
|
lmata
|
34 |
"channel": (os.environ.get("SCUTTLEBOT_CHANNEL", "general")).lstrip("#"), |
|
50baf1a…
|
lmata
|
35 |
"nick": os.environ.get( |
|
50baf1a…
|
lmata
|
36 |
"SCUTTLEBOT_NICK", f"codex-{base_name}-{session_suffix}" |
|
50baf1a…
|
lmata
|
37 |
), |
|
50baf1a…
|
lmata
|
38 |
"model": os.environ.get("OPENAI_MODEL", "gpt-4.1-mini"), |
|
50baf1a…
|
lmata
|
39 |
"backend": os.environ.get("SCUTTLEBOT_LLM_BACKEND", "openai"), # default to daemon-stored openai backend |
|
50baf1a…
|
lmata
|
40 |
} |
|
50baf1a…
|
lmata
|
41 |
|
|
50baf1a…
|
lmata
|
42 |
missing = [k for k, v in cfg.items() if not v and k != "model"] |
|
50baf1a…
|
lmata
|
43 |
use_backend = bool(cfg["backend"]) |
|
50baf1a…
|
lmata
|
44 |
if missing: |
|
50baf1a…
|
lmata
|
45 |
print(f"missing env: {', '.join(missing)}", file=sys.stderr) |
|
50baf1a…
|
lmata
|
46 |
sys.exit(1) |
|
50baf1a…
|
lmata
|
47 |
if not use_backend and "OPENAI_API_KEY" not in os.environ: |
|
50baf1a…
|
lmata
|
48 |
print("missing env: OPENAI_API_KEY (or set SCUTTLEBOT_LLM_BACKEND to use server-side key)", file=sys.stderr) |
|
50baf1a…
|
lmata
|
49 |
sys.exit(1) |
|
50baf1a…
|
lmata
|
50 |
|
|
50baf1a…
|
lmata
|
51 |
client = None if use_backend else OpenAI(api_key=os.environ["OPENAI_API_KEY"]) |
|
50baf1a…
|
lmata
|
52 |
last_check = 0.0 |
|
50baf1a…
|
lmata
|
53 |
mention_re = re.compile( |
|
50baf1a…
|
lmata
|
54 |
rf"(^|[^A-Za-z0-9_./\\-]){re.escape(cfg['nick'])}($|[^A-Za-z0-9_./\\-])", |
|
50baf1a…
|
lmata
|
55 |
re.IGNORECASE, |
|
50baf1a…
|
lmata
|
56 |
) |
|
50baf1a…
|
lmata
|
57 |
|
|
50baf1a…
|
lmata
|
58 |
|
|
50baf1a…
|
lmata
|
59 |
def relay_post(text: str) -> None: |
|
50baf1a…
|
lmata
|
60 |
res = requests.post( |
|
50baf1a…
|
lmata
|
61 |
f"{cfg['url']}/v1/channels/{cfg['channel']}/messages", |
|
50baf1a…
|
lmata
|
62 |
headers={ |
|
50baf1a…
|
lmata
|
63 |
"Authorization": f"Bearer {cfg['token']}", |
|
50baf1a…
|
lmata
|
64 |
"Content-Type": "application/json", |
|
50baf1a…
|
lmata
|
65 |
}, |
|
50baf1a…
|
lmata
|
66 |
json={"text": text, "nick": cfg["nick"]}, |
|
50baf1a…
|
lmata
|
67 |
timeout=10, |
|
50baf1a…
|
lmata
|
68 |
) |
|
50baf1a…
|
lmata
|
69 |
res.raise_for_status() |
|
50baf1a…
|
lmata
|
70 |
|
|
50baf1a…
|
lmata
|
71 |
|
|
50baf1a…
|
lmata
|
72 |
def relay_poll(): |
|
50baf1a…
|
lmata
|
73 |
global last_check |
|
50baf1a…
|
lmata
|
74 |
res = requests.get( |
|
50baf1a…
|
lmata
|
75 |
f"{cfg['url']}/v1/channels/{cfg['channel']}/messages", |
|
50baf1a…
|
lmata
|
76 |
headers={"Authorization": f"Bearer {cfg['token']}"}, |
|
50baf1a…
|
lmata
|
77 |
timeout=10, |
|
50baf1a…
|
lmata
|
78 |
) |
|
50baf1a…
|
lmata
|
79 |
res.raise_for_status() |
|
50baf1a…
|
lmata
|
80 |
data = res.json() |
|
50baf1a…
|
lmata
|
81 |
now = time.time() |
|
50baf1a…
|
lmata
|
82 |
bots = { |
|
50baf1a…
|
lmata
|
83 |
cfg["nick"], |
|
50baf1a…
|
lmata
|
84 |
"bridge", |
|
50baf1a…
|
lmata
|
85 |
"oracle", |
|
50baf1a…
|
lmata
|
86 |
"sentinel", |
|
50baf1a…
|
lmata
|
87 |
"steward", |
|
50baf1a…
|
lmata
|
88 |
"scribe", |
|
50baf1a…
|
lmata
|
89 |
"warden", |
|
50baf1a…
|
lmata
|
90 |
"snitch", |
|
50baf1a…
|
lmata
|
91 |
"herald", |
|
50baf1a…
|
lmata
|
92 |
"scroll", |
|
50baf1a…
|
lmata
|
93 |
"systembot", |
|
50baf1a…
|
lmata
|
94 |
"auditbot", |
|
50baf1a…
|
lmata
|
95 |
"claude", |
|
50baf1a…
|
lmata
|
96 |
} |
|
50baf1a…
|
lmata
|
97 |
msgs = [ |
|
50baf1a…
|
lmata
|
98 |
m |
|
50baf1a…
|
lmata
|
99 |
for m in data.get("messages", []) |
|
50baf1a…
|
lmata
|
100 |
if m["nick"] not in bots |
|
50baf1a…
|
lmata
|
101 |
and not m["nick"].startswith("claude-") |
|
50baf1a…
|
lmata
|
102 |
and not m["nick"].startswith("codex-") |
|
50baf1a…
|
lmata
|
103 |
and not m["nick"].startswith("gemini-") |
|
50baf1a…
|
lmata
|
104 |
and datetime.fromisoformat(m["at"].replace("Z", "+00:00")).timestamp() > last_check |
|
50baf1a…
|
lmata
|
105 |
and mention_re.search(m["text"]) |
|
50baf1a…
|
lmata
|
106 |
] |
|
50baf1a…
|
lmata
|
107 |
last_check = now |
|
50baf1a…
|
lmata
|
108 |
return msgs |
|
50baf1a…
|
lmata
|
109 |
|
|
50baf1a…
|
lmata
|
110 |
|
|
50baf1a…
|
lmata
|
111 |
def main(): |
|
50baf1a…
|
lmata
|
112 |
relay_post(f"starting: {prompt}") |
|
50baf1a…
|
lmata
|
113 |
if use_backend: |
|
50baf1a…
|
lmata
|
114 |
res = requests.post( |
|
50baf1a…
|
lmata
|
115 |
f"{cfg['url']}/v1/llm/complete", |
|
50baf1a…
|
lmata
|
116 |
headers={ |
|
50baf1a…
|
lmata
|
117 |
"Authorization": f"Bearer {cfg['token']}", |
|
50baf1a…
|
lmata
|
118 |
"Content-Type": "application/json", |
|
50baf1a…
|
lmata
|
119 |
}, |
|
50baf1a…
|
lmata
|
120 |
json={"backend": cfg["backend"], "prompt": prompt}, |
|
50baf1a…
|
lmata
|
121 |
timeout=20, |
|
50baf1a…
|
lmata
|
122 |
) |
|
50baf1a…
|
lmata
|
123 |
res.raise_for_status() |
|
50baf1a…
|
lmata
|
124 |
reply = res.json()["text"] |
|
50baf1a…
|
lmata
|
125 |
else: |
|
50baf1a…
|
lmata
|
126 |
completion = client.chat.completions.create( |
|
50baf1a…
|
lmata
|
127 |
model=cfg["model"], |
|
50baf1a…
|
lmata
|
128 |
messages=[{"role": "user", "content": prompt}], |
|
50baf1a…
|
lmata
|
129 |
) |
|
50baf1a…
|
lmata
|
130 |
reply = completion.choices[0].message.content |
|
50baf1a…
|
lmata
|
131 |
print(f"OpenAI: {reply}") |
|
50baf1a…
|
lmata
|
132 |
relay_post(f"OpenAI reply: {reply}") |
|
50baf1a…
|
lmata
|
133 |
for m in relay_poll(): |
|
50baf1a…
|
lmata
|
134 |
print(f"[IRC] {m['nick']}: {m['text']}") |
|
50baf1a…
|
lmata
|
135 |
|
|
50baf1a…
|
lmata
|
136 |
|
|
50baf1a…
|
lmata
|
137 |
if __name__ == "__main__": |
|
50baf1a…
|
lmata
|
138 |
try: |
|
50baf1a…
|
lmata
|
139 |
main() |
|
50baf1a…
|
lmata
|
140 |
except Exception as exc: # broad but fine for CLI sample |
|
50baf1a…
|
lmata
|
141 |
print(exc, file=sys.stderr) |
|
50baf1a…
|
lmata
|
142 |
sys.exit(1) |