|
0981a08…
|
noreply
|
1 |
"""Interactive planning companion REPL for PlanOpticon.""" |
|
0981a08…
|
noreply
|
2 |
|
|
0981a08…
|
noreply
|
3 |
import logging |
|
0981a08…
|
noreply
|
4 |
from pathlib import Path |
|
0981a08…
|
noreply
|
5 |
from typing import List, Optional |
|
0981a08…
|
noreply
|
6 |
|
|
0981a08…
|
noreply
|
7 |
logger = logging.getLogger(__name__) |
|
0981a08…
|
noreply
|
8 |
|
|
0981a08…
|
noreply
|
9 |
VIDEO_EXTS = {".mp4", ".mkv", ".webm"} |
|
0981a08…
|
noreply
|
10 |
DOC_EXTS = {".md", ".pdf", ".docx"} |
|
0981a08…
|
noreply
|
11 |
|
|
0981a08…
|
noreply
|
12 |
|
|
0981a08…
|
noreply
|
13 |
class CompanionREPL: |
|
0981a08…
|
noreply
|
14 |
"""Smart REPL with workspace awareness and KG querying.""" |
|
0981a08…
|
noreply
|
15 |
|
|
0981a08…
|
noreply
|
16 |
def __init__( |
|
0981a08…
|
noreply
|
17 |
self, |
|
0981a08…
|
noreply
|
18 |
kb_paths: Optional[List[str]] = None, |
|
0981a08…
|
noreply
|
19 |
provider: str = "auto", |
|
0981a08…
|
noreply
|
20 |
chat_model: Optional[str] = None, |
|
0981a08…
|
noreply
|
21 |
): |
|
0981a08…
|
noreply
|
22 |
self.kg = None |
|
0981a08…
|
noreply
|
23 |
self.query_engine = None |
|
0981a08…
|
noreply
|
24 |
self.agent = None |
|
0981a08…
|
noreply
|
25 |
self.provider_manager = None |
|
0981a08…
|
noreply
|
26 |
self._kb_paths = kb_paths or [] |
|
0981a08…
|
noreply
|
27 |
self._provider_name = provider |
|
0981a08…
|
noreply
|
28 |
self._chat_model = chat_model |
|
0981a08…
|
noreply
|
29 |
self._videos: List[Path] = [] |
|
0981a08…
|
noreply
|
30 |
self._docs: List[Path] = [] |
|
0981a08…
|
noreply
|
31 |
self._kg_path: Optional[Path] = None |
|
0981a08…
|
noreply
|
32 |
|
|
0981a08…
|
noreply
|
33 |
def _discover(self) -> None: |
|
0981a08…
|
noreply
|
34 |
"""Auto-discover workspace context.""" |
|
0981a08…
|
noreply
|
35 |
# Discover knowledge graphs |
|
0981a08…
|
noreply
|
36 |
from video_processor.integrators.graph_discovery import ( |
|
0981a08…
|
noreply
|
37 |
find_nearest_graph, |
|
0981a08…
|
noreply
|
38 |
) |
|
0981a08…
|
noreply
|
39 |
|
|
0981a08…
|
noreply
|
40 |
if self._kb_paths: |
|
0981a08…
|
noreply
|
41 |
# Use explicit paths |
|
0981a08…
|
noreply
|
42 |
self._kg_path = Path(self._kb_paths[0]) |
|
0981a08…
|
noreply
|
43 |
else: |
|
0981a08…
|
noreply
|
44 |
self._kg_path = find_nearest_graph() |
|
0981a08…
|
noreply
|
45 |
|
|
0981a08…
|
noreply
|
46 |
if self._kg_path and self._kg_path.exists(): |
|
0981a08…
|
noreply
|
47 |
self._load_kg(self._kg_path) |
|
0981a08…
|
noreply
|
48 |
|
|
0981a08…
|
noreply
|
49 |
# Scan for media and doc files in cwd |
|
0981a08…
|
noreply
|
50 |
cwd = Path.cwd() |
|
0981a08…
|
noreply
|
51 |
try: |
|
0981a08…
|
noreply
|
52 |
for f in sorted(cwd.iterdir()): |
|
0981a08…
|
noreply
|
53 |
if f.suffix.lower() in VIDEO_EXTS: |
|
0981a08…
|
noreply
|
54 |
self._videos.append(f) |
|
0981a08…
|
noreply
|
55 |
elif f.suffix.lower() in DOC_EXTS: |
|
0981a08…
|
noreply
|
56 |
self._docs.append(f) |
|
0981a08…
|
noreply
|
57 |
except PermissionError: |
|
0981a08…
|
noreply
|
58 |
pass |
|
0981a08…
|
noreply
|
59 |
|
|
0981a08…
|
noreply
|
60 |
def _load_kg(self, path: Path) -> None: |
|
0981a08…
|
noreply
|
61 |
"""Load a knowledge graph from a file path.""" |
|
0981a08…
|
noreply
|
62 |
from video_processor.integrators.graph_query import ( |
|
0981a08…
|
noreply
|
63 |
GraphQueryEngine, |
|
0981a08…
|
noreply
|
64 |
) |
|
0981a08…
|
noreply
|
65 |
|
|
0981a08…
|
noreply
|
66 |
try: |
|
0981a08…
|
noreply
|
67 |
if path.suffix == ".json": |
|
0981a08…
|
noreply
|
68 |
self.query_engine = GraphQueryEngine.from_json_path(path) |
|
0981a08…
|
noreply
|
69 |
else: |
|
0981a08…
|
noreply
|
70 |
self.query_engine = GraphQueryEngine.from_db_path(path) |
|
0981a08…
|
noreply
|
71 |
self.kg = self.query_engine.store |
|
0981a08…
|
noreply
|
72 |
except Exception as exc: |
|
0981a08…
|
noreply
|
73 |
logger.debug("Failed to load KG at %s: %s", path, exc) |
|
0981a08…
|
noreply
|
74 |
|
|
0981a08…
|
noreply
|
75 |
def _init_provider(self) -> None: |
|
0981a08…
|
noreply
|
76 |
"""Try to initialise an LLM provider.""" |
|
0981a08…
|
noreply
|
77 |
try: |
|
0981a08…
|
noreply
|
78 |
from video_processor.providers.manager import ( |
|
0981a08…
|
noreply
|
79 |
ProviderManager, |
|
0981a08…
|
noreply
|
80 |
) |
|
0981a08…
|
noreply
|
81 |
|
|
0981a08…
|
noreply
|
82 |
prov = None if self._provider_name == "auto" else self._provider_name |
|
0981a08…
|
noreply
|
83 |
self.provider_manager = ProviderManager( |
|
0981a08…
|
noreply
|
84 |
chat_model=self._chat_model, |
|
0981a08…
|
noreply
|
85 |
provider=prov, |
|
0981a08…
|
noreply
|
86 |
) |
|
0981a08…
|
noreply
|
87 |
except Exception: |
|
0981a08…
|
noreply
|
88 |
self.provider_manager = None |
|
0981a08…
|
noreply
|
89 |
|
|
0981a08…
|
noreply
|
90 |
def _init_agent(self) -> None: |
|
0981a08…
|
noreply
|
91 |
"""Create a PlanningAgent if possible.""" |
|
0981a08…
|
noreply
|
92 |
try: |
|
0981a08…
|
noreply
|
93 |
from video_processor.agent.agent_loop import ( |
|
0981a08…
|
noreply
|
94 |
PlanningAgent, |
|
0981a08…
|
noreply
|
95 |
) |
|
0981a08…
|
noreply
|
96 |
from video_processor.agent.skills.base import ( |
|
0981a08…
|
noreply
|
97 |
AgentContext, |
|
0981a08…
|
noreply
|
98 |
) |
|
0981a08…
|
noreply
|
99 |
|
|
0981a08…
|
noreply
|
100 |
ctx = AgentContext( |
|
0981a08…
|
noreply
|
101 |
knowledge_graph=self.kg, |
|
0981a08…
|
noreply
|
102 |
query_engine=self.query_engine, |
|
0981a08…
|
noreply
|
103 |
provider_manager=self.provider_manager, |
|
0981a08…
|
noreply
|
104 |
) |
|
0981a08…
|
noreply
|
105 |
self.agent = PlanningAgent(context=ctx) |
|
0981a08…
|
noreply
|
106 |
except Exception: |
|
0981a08…
|
noreply
|
107 |
self.agent = None |
|
0981a08…
|
noreply
|
108 |
|
|
0981a08…
|
noreply
|
109 |
def _welcome_banner(self) -> str: |
|
0981a08…
|
noreply
|
110 |
"""Build the welcome banner text.""" |
|
0981a08…
|
noreply
|
111 |
lines = [ |
|
0981a08…
|
noreply
|
112 |
"", |
|
0981a08…
|
noreply
|
113 |
" PlanOpticon Companion", |
|
0981a08…
|
noreply
|
114 |
" Interactive planning REPL", |
|
0981a08…
|
noreply
|
115 |
"", |
|
0981a08…
|
noreply
|
116 |
] |
|
0981a08…
|
noreply
|
117 |
|
|
0981a08…
|
noreply
|
118 |
if self._kg_path and self.query_engine: |
|
0981a08…
|
noreply
|
119 |
stats = self.query_engine.stats().data |
|
0981a08…
|
noreply
|
120 |
lines.append( |
|
0981a08…
|
noreply
|
121 |
f" Knowledge graph: {self._kg_path.name}" |
|
0981a08…
|
noreply
|
122 |
f" ({stats['entity_count']} entities," |
|
0981a08…
|
noreply
|
123 |
f" {stats['relationship_count']} relationships)" |
|
0981a08…
|
noreply
|
124 |
) |
|
0981a08…
|
noreply
|
125 |
else: |
|
0981a08…
|
noreply
|
126 |
lines.append(" No knowledge graph loaded.") |
|
0981a08…
|
noreply
|
127 |
|
|
0981a08…
|
noreply
|
128 |
if self._videos: |
|
0981a08…
|
noreply
|
129 |
names = ", ".join(v.name for v in self._videos[:3]) |
|
0981a08…
|
noreply
|
130 |
suffix = f" (+{len(self._videos) - 3} more)" if len(self._videos) > 3 else "" |
|
0981a08…
|
noreply
|
131 |
lines.append(f" Videos: {names}{suffix}") |
|
0981a08…
|
noreply
|
132 |
|
|
0981a08…
|
noreply
|
133 |
if self._docs: |
|
0981a08…
|
noreply
|
134 |
names = ", ".join(d.name for d in self._docs[:3]) |
|
0981a08…
|
noreply
|
135 |
suffix = f" (+{len(self._docs) - 3} more)" if len(self._docs) > 3 else "" |
|
0981a08…
|
noreply
|
136 |
lines.append(f" Docs: {names}{suffix}") |
|
0981a08…
|
noreply
|
137 |
|
|
0981a08…
|
noreply
|
138 |
if self.provider_manager: |
|
0981a08…
|
noreply
|
139 |
prov = getattr(self.provider_manager, "provider", self._provider_name) |
|
0981a08…
|
noreply
|
140 |
model = self._chat_model or "default" |
|
0981a08…
|
noreply
|
141 |
lines.append(f" LLM provider: {prov} (model: {model})") |
|
0981a08…
|
noreply
|
142 |
else: |
|
0981a08…
|
noreply
|
143 |
lines.append(" LLM provider: none") |
|
0981a08…
|
noreply
|
144 |
lines.append("") |
|
0981a08…
|
noreply
|
145 |
lines.append(" Type /help for commands, or ask a question.") |
|
0981a08…
|
noreply
|
146 |
lines.append("") |
|
0981a08…
|
noreply
|
147 |
return "\n".join(lines) |
|
0981a08…
|
noreply
|
148 |
|
|
0981a08…
|
noreply
|
149 |
# ── Command handlers ── |
|
0981a08…
|
noreply
|
150 |
|
|
0981a08…
|
noreply
|
151 |
def _cmd_help(self) -> str: |
|
0981a08…
|
noreply
|
152 |
lines = [ |
|
0981a08…
|
noreply
|
153 |
"Available commands:", |
|
0981a08…
|
noreply
|
154 |
" /help Show this help", |
|
0981a08…
|
noreply
|
155 |
" /status Workspace status", |
|
0981a08…
|
noreply
|
156 |
" /skills List available skills", |
|
0981a08…
|
noreply
|
157 |
" /entities [--type T] List KG entities", |
|
0981a08…
|
noreply
|
158 |
" /search TERM Search entities by name", |
|
0981a08…
|
noreply
|
159 |
" /neighbors ENTITY Show entity relationships", |
|
0981a08…
|
noreply
|
160 |
" /export FORMAT Export KG (markdown, obsidian, notion, csv)", |
|
0981a08…
|
noreply
|
161 |
" /analyze PATH Analyze a video/doc", |
|
0981a08…
|
noreply
|
162 |
" /ingest PATH Ingest a file into the KG", |
|
0981a08…
|
noreply
|
163 |
" /auth SERVICE Authenticate with a cloud service", |
|
0981a08…
|
noreply
|
164 |
" /provider [NAME] List or switch LLM provider", |
|
0981a08…
|
noreply
|
165 |
" /model [NAME] Show or switch chat model", |
|
0981a08…
|
noreply
|
166 |
" /run SKILL Run a skill by name", |
|
0981a08…
|
noreply
|
167 |
" /plan Run project_plan skill", |
|
0981a08…
|
noreply
|
168 |
" /prd Run PRD skill", |
|
0981a08…
|
noreply
|
169 |
" /tasks Run task_breakdown skill", |
|
0981a08…
|
noreply
|
170 |
" /quit, /exit Exit companion", |
|
0981a08…
|
noreply
|
171 |
"", |
|
0981a08…
|
noreply
|
172 |
"Any other input is sent to the chat agent (requires LLM).", |
|
0981a08…
|
noreply
|
173 |
] |
|
0981a08…
|
noreply
|
174 |
return "\n".join(lines) |
|
0981a08…
|
noreply
|
175 |
|
|
0981a08…
|
noreply
|
176 |
def _cmd_status(self) -> str: |
|
0981a08…
|
noreply
|
177 |
lines = ["Workspace status:"] |
|
0981a08…
|
noreply
|
178 |
if self._kg_path and self.query_engine: |
|
0981a08…
|
noreply
|
179 |
stats = self.query_engine.stats().data |
|
0981a08…
|
noreply
|
180 |
lines.append( |
|
0981a08…
|
noreply
|
181 |
f" KG: {self._kg_path}" |
|
0981a08…
|
noreply
|
182 |
f" ({stats['entity_count']} entities," |
|
0981a08…
|
noreply
|
183 |
f" {stats['relationship_count']} relationships)" |
|
0981a08…
|
noreply
|
184 |
) |
|
0981a08…
|
noreply
|
185 |
if stats.get("entity_types"): |
|
0981a08…
|
noreply
|
186 |
for t, c in sorted( |
|
0981a08…
|
noreply
|
187 |
stats["entity_types"].items(), |
|
0981a08…
|
noreply
|
188 |
key=lambda x: -x[1], |
|
0981a08…
|
noreply
|
189 |
): |
|
0981a08…
|
noreply
|
190 |
lines.append(f" {t}: {c}") |
|
0981a08…
|
noreply
|
191 |
else: |
|
0981a08…
|
noreply
|
192 |
lines.append(" KG: not loaded") |
|
0981a08…
|
noreply
|
193 |
|
|
0981a08…
|
noreply
|
194 |
lines.append(f" Videos: {len(self._videos)} found") |
|
0981a08…
|
noreply
|
195 |
lines.append(f" Docs: {len(self._docs)} found") |
|
0981a08…
|
noreply
|
196 |
lines.append(f" Provider: {'active' if self.provider_manager else 'none'}") |
|
0981a08…
|
noreply
|
197 |
return "\n".join(lines) |
|
0981a08…
|
noreply
|
198 |
|
|
0981a08…
|
noreply
|
199 |
def _cmd_skills(self) -> str: |
|
0981a08…
|
noreply
|
200 |
from video_processor.agent.skills.base import ( |
|
0981a08…
|
noreply
|
201 |
list_skills, |
|
0981a08…
|
noreply
|
202 |
) |
|
0981a08…
|
noreply
|
203 |
|
|
0981a08…
|
noreply
|
204 |
skills = list_skills() |
|
0981a08…
|
noreply
|
205 |
if not skills: |
|
0981a08…
|
noreply
|
206 |
return "No skills registered." |
|
0981a08…
|
noreply
|
207 |
lines = ["Available skills:"] |
|
0981a08…
|
noreply
|
208 |
for s in skills: |
|
0981a08…
|
noreply
|
209 |
lines.append(f" {s.name}: {s.description}") |
|
0981a08…
|
noreply
|
210 |
return "\n".join(lines) |
|
0981a08…
|
noreply
|
211 |
|
|
0981a08…
|
noreply
|
212 |
def _cmd_entities(self, args: str) -> str: |
|
0981a08…
|
noreply
|
213 |
if not self.query_engine: |
|
0981a08…
|
noreply
|
214 |
return "No knowledge graph loaded." |
|
0981a08…
|
noreply
|
215 |
entity_type = None |
|
0981a08…
|
noreply
|
216 |
parts = args.split() |
|
0981a08…
|
noreply
|
217 |
for i, part in enumerate(parts): |
|
0981a08…
|
noreply
|
218 |
if part == "--type" and i + 1 < len(parts): |
|
0981a08…
|
noreply
|
219 |
entity_type = parts[i + 1] |
|
0981a08…
|
noreply
|
220 |
result = self.query_engine.entities( |
|
0981a08…
|
noreply
|
221 |
entity_type=entity_type, |
|
0981a08…
|
noreply
|
222 |
) |
|
0981a08…
|
noreply
|
223 |
return result.to_text() |
|
0981a08…
|
noreply
|
224 |
|
|
0981a08…
|
noreply
|
225 |
def _cmd_search(self, term: str) -> str: |
|
0981a08…
|
noreply
|
226 |
if not self.query_engine: |
|
0981a08…
|
noreply
|
227 |
return "No knowledge graph loaded." |
|
0981a08…
|
noreply
|
228 |
term = term.strip() |
|
0981a08…
|
noreply
|
229 |
if not term: |
|
0981a08…
|
noreply
|
230 |
return "Usage: /search TERM" |
|
0981a08…
|
noreply
|
231 |
result = self.query_engine.entities(name=term) |
|
0981a08…
|
noreply
|
232 |
return result.to_text() |
|
0981a08…
|
noreply
|
233 |
|
|
0981a08…
|
noreply
|
234 |
def _cmd_neighbors(self, entity: str) -> str: |
|
0981a08…
|
noreply
|
235 |
if not self.query_engine: |
|
0981a08…
|
noreply
|
236 |
return "No knowledge graph loaded." |
|
0981a08…
|
noreply
|
237 |
entity = entity.strip() |
|
0981a08…
|
noreply
|
238 |
if not entity: |
|
0981a08…
|
noreply
|
239 |
return "Usage: /neighbors ENTITY" |
|
0981a08…
|
noreply
|
240 |
result = self.query_engine.neighbors(entity) |
|
0981a08…
|
noreply
|
241 |
return result.to_text() |
|
0981a08…
|
noreply
|
242 |
|
|
0981a08…
|
noreply
|
243 |
def _cmd_export(self, fmt: str) -> str: |
|
0981a08…
|
noreply
|
244 |
fmt = fmt.strip().lower() |
|
0981a08…
|
noreply
|
245 |
if not fmt: |
|
0981a08…
|
noreply
|
246 |
return "Usage: /export FORMAT (markdown, obsidian, notion, csv)" |
|
0981a08…
|
noreply
|
247 |
if not self._kg_path: |
|
0981a08…
|
noreply
|
248 |
return "No knowledge graph loaded." |
|
0981a08…
|
noreply
|
249 |
return ( |
|
0981a08…
|
noreply
|
250 |
f"Export '{fmt}' requested. Use the CLI command:\n" |
|
0981a08…
|
noreply
|
251 |
f" planopticon export {fmt} {self._kg_path}" |
|
0981a08…
|
noreply
|
252 |
) |
|
0981a08…
|
noreply
|
253 |
|
|
0981a08…
|
noreply
|
254 |
def _cmd_analyze(self, path_str: str) -> str: |
|
0981a08…
|
noreply
|
255 |
path_str = path_str.strip() |
|
0981a08…
|
noreply
|
256 |
if not path_str: |
|
0981a08…
|
noreply
|
257 |
return "Usage: /analyze PATH" |
|
0981a08…
|
noreply
|
258 |
p = Path(path_str) |
|
0981a08…
|
noreply
|
259 |
if not p.exists(): |
|
0981a08…
|
noreply
|
260 |
return f"File not found: {path_str}" |
|
0981a08…
|
noreply
|
261 |
return f"Analyze requested for {p.name}. Use the CLI:\n planopticon analyze -i {p}" |
|
0981a08…
|
noreply
|
262 |
|
|
0981a08…
|
noreply
|
263 |
def _cmd_ingest(self, path_str: str) -> str: |
|
0981a08…
|
noreply
|
264 |
path_str = path_str.strip() |
|
0981a08…
|
noreply
|
265 |
if not path_str: |
|
0981a08…
|
noreply
|
266 |
return "Usage: /ingest PATH" |
|
0981a08…
|
noreply
|
267 |
p = Path(path_str) |
|
0981a08…
|
noreply
|
268 |
if not p.exists(): |
|
0981a08…
|
noreply
|
269 |
return f"File not found: {path_str}" |
|
0981a08…
|
noreply
|
270 |
return f"Ingest requested for {p.name}. Use the CLI:\n planopticon ingest {p}" |
|
0981a08…
|
noreply
|
271 |
|
|
0981a08…
|
noreply
|
272 |
def _cmd_run_skill(self, skill_name: str) -> str: |
|
0981a08…
|
noreply
|
273 |
skill_name = skill_name.strip() |
|
0981a08…
|
noreply
|
274 |
if not skill_name: |
|
0981a08…
|
noreply
|
275 |
return "Usage: /run SKILL_NAME" |
|
0981a08…
|
noreply
|
276 |
from video_processor.agent.skills.base import ( |
|
0981a08…
|
noreply
|
277 |
get_skill, |
|
0981a08…
|
noreply
|
278 |
) |
|
0981a08…
|
noreply
|
279 |
|
|
0981a08…
|
noreply
|
280 |
skill = get_skill(skill_name) |
|
0981a08…
|
noreply
|
281 |
if not skill: |
|
0981a08…
|
noreply
|
282 |
return f"Unknown skill: {skill_name}" |
|
0981a08…
|
noreply
|
283 |
if not self.agent: |
|
0981a08…
|
noreply
|
284 |
return "Agent not initialised (no LLM provider?)." |
|
0981a08…
|
noreply
|
285 |
if not skill.can_execute(self.agent.context): |
|
0981a08…
|
noreply
|
286 |
return f"Skill '{skill_name}' cannot execute in current context." |
|
0981a08…
|
noreply
|
287 |
try: |
|
0981a08…
|
noreply
|
288 |
artifact = skill.execute(self.agent.context) |
|
0981a08…
|
noreply
|
289 |
return f"--- {artifact.name} ({artifact.artifact_type}) ---\n{artifact.content}" |
|
0981a08…
|
noreply
|
290 |
except Exception as exc: |
|
0981a08…
|
noreply
|
291 |
return f"Skill execution failed: {exc}" |
|
0981a08…
|
noreply
|
292 |
|
|
0981a08…
|
noreply
|
293 |
def _cmd_auth(self, args: str) -> str: |
|
0981a08…
|
noreply
|
294 |
"""Authenticate with a cloud service.""" |
|
0981a08…
|
noreply
|
295 |
service = args.strip().lower() |
|
0981a08…
|
noreply
|
296 |
if not service: |
|
0981a08…
|
noreply
|
297 |
from video_processor.auth import KNOWN_CONFIGS |
|
0981a08…
|
noreply
|
298 |
|
|
0981a08…
|
noreply
|
299 |
services = ", ".join(sorted(KNOWN_CONFIGS.keys())) |
|
0981a08…
|
noreply
|
300 |
return f"Usage: /auth SERVICE\nAvailable: {services}" |
|
0981a08…
|
noreply
|
301 |
|
|
0981a08…
|
noreply
|
302 |
from video_processor.auth import get_auth_manager |
|
0981a08…
|
noreply
|
303 |
|
|
0981a08…
|
noreply
|
304 |
manager = get_auth_manager(service) |
|
0981a08…
|
noreply
|
305 |
if not manager: |
|
0981a08…
|
noreply
|
306 |
return f"Unknown service: {service}" |
|
0981a08…
|
noreply
|
307 |
|
|
0981a08…
|
noreply
|
308 |
result = manager.authenticate() |
|
0981a08…
|
noreply
|
309 |
if result.success: |
|
0981a08…
|
noreply
|
310 |
return f"{service.title()} authenticated ({result.method})" |
|
0981a08…
|
noreply
|
311 |
return f"{service.title()} auth failed: {result.error}" |
|
0981a08…
|
noreply
|
312 |
|
|
0981a08…
|
noreply
|
313 |
def _cmd_provider(self, args: str) -> str: |
|
0981a08…
|
noreply
|
314 |
"""List available providers or switch to a specific one.""" |
|
0981a08…
|
noreply
|
315 |
args = args.strip().lower() |
|
0981a08…
|
noreply
|
316 |
if not args or args == "list": |
|
0981a08…
|
noreply
|
317 |
lines = ["Available providers:"] |
|
0981a08…
|
noreply
|
318 |
known = [ |
|
0981a08…
|
noreply
|
319 |
"openai", |
|
0981a08…
|
noreply
|
320 |
"anthropic", |
|
0981a08…
|
noreply
|
321 |
"gemini", |
|
0981a08…
|
noreply
|
322 |
"ollama", |
|
0981a08…
|
noreply
|
323 |
"azure", |
|
0981a08…
|
noreply
|
324 |
"together", |
|
0981a08…
|
noreply
|
325 |
"fireworks", |
|
0981a08…
|
noreply
|
326 |
"cerebras", |
|
0981a08…
|
noreply
|
327 |
"xai", |
|
0981a08…
|
noreply
|
328 |
] |
|
0981a08…
|
noreply
|
329 |
import os |
|
0981a08…
|
noreply
|
330 |
|
|
0981a08…
|
noreply
|
331 |
key_map = { |
|
0981a08…
|
noreply
|
332 |
"openai": "OPENAI_API_KEY", |
|
0981a08…
|
noreply
|
333 |
"anthropic": "ANTHROPIC_API_KEY", |
|
0981a08…
|
noreply
|
334 |
"gemini": "GEMINI_API_KEY", |
|
0981a08…
|
noreply
|
335 |
"azure": "AZURE_OPENAI_API_KEY", |
|
0981a08…
|
noreply
|
336 |
"together": "TOGETHER_API_KEY", |
|
0981a08…
|
noreply
|
337 |
"fireworks": "FIREWORKS_API_KEY", |
|
0981a08…
|
noreply
|
338 |
"cerebras": "CEREBRAS_API_KEY", |
|
0981a08…
|
noreply
|
339 |
"xai": "XAI_API_KEY", |
|
0981a08…
|
noreply
|
340 |
} |
|
0981a08…
|
noreply
|
341 |
current = getattr(self.provider_manager, "provider", self._provider_name) |
|
0981a08…
|
noreply
|
342 |
for name in known: |
|
0981a08…
|
noreply
|
343 |
env = key_map.get(name) |
|
0981a08…
|
noreply
|
344 |
has_key = bool(os.environ.get(env, "")) if env else None |
|
0981a08…
|
noreply
|
345 |
if name == "ollama": |
|
0981a08…
|
noreply
|
346 |
status = "local" |
|
0981a08…
|
noreply
|
347 |
elif has_key: |
|
0981a08…
|
noreply
|
348 |
status = "ready" |
|
0981a08…
|
noreply
|
349 |
else: |
|
0981a08…
|
noreply
|
350 |
status = "no key" |
|
0981a08…
|
noreply
|
351 |
active = " (active)" if name == current else "" |
|
0981a08…
|
noreply
|
352 |
lines.append(f" {name}: {status}{active}") |
|
0981a08…
|
noreply
|
353 |
lines.append(f"\nCurrent: {current or 'none'}") |
|
0981a08…
|
noreply
|
354 |
return "\n".join(lines) |
|
0981a08…
|
noreply
|
355 |
|
|
0981a08…
|
noreply
|
356 |
# Switch provider |
|
0981a08…
|
noreply
|
357 |
self._provider_name = args |
|
0981a08…
|
noreply
|
358 |
self._chat_model = None |
|
0981a08…
|
noreply
|
359 |
self._init_provider() |
|
0981a08…
|
noreply
|
360 |
self._init_agent() |
|
0981a08…
|
noreply
|
361 |
if self.provider_manager: |
|
0981a08…
|
noreply
|
362 |
return f"Switched to provider: {args}" |
|
0981a08…
|
noreply
|
363 |
return f"Failed to initialise provider: {args}" |
|
0981a08…
|
noreply
|
364 |
|
|
0981a08…
|
noreply
|
365 |
def _cmd_model(self, args: str) -> str: |
|
0981a08…
|
noreply
|
366 |
"""Switch the chat model.""" |
|
0981a08…
|
noreply
|
367 |
args = args.strip() |
|
0981a08…
|
noreply
|
368 |
if not args: |
|
0981a08…
|
noreply
|
369 |
current = self._chat_model or "default" |
|
0981a08…
|
noreply
|
370 |
return f"Current model: {current}\nUsage: /model MODEL_NAME" |
|
0981a08…
|
noreply
|
371 |
self._chat_model = args |
|
0981a08…
|
noreply
|
372 |
self._init_provider() |
|
0981a08…
|
noreply
|
373 |
self._init_agent() |
|
0981a08…
|
noreply
|
374 |
if self.provider_manager: |
|
0981a08…
|
noreply
|
375 |
return f"Switched to model: {args}" |
|
0981a08…
|
noreply
|
376 |
return f"Failed to initialise with model: {args}" |
|
0981a08…
|
noreply
|
377 |
|
|
0981a08…
|
noreply
|
378 |
def _cmd_chat(self, message: str) -> str: |
|
0981a08…
|
noreply
|
379 |
if not self.provider_manager or not self.agent: |
|
0981a08…
|
noreply
|
380 |
return ( |
|
0981a08…
|
noreply
|
381 |
"Chat requires an LLM provider. Set one of:\n" |
|
0981a08…
|
noreply
|
382 |
" OPENAI_API_KEY\n" |
|
0981a08…
|
noreply
|
383 |
" ANTHROPIC_API_KEY\n" |
|
0981a08…
|
noreply
|
384 |
" GEMINI_API_KEY\n" |
|
0981a08…
|
noreply
|
385 |
"Or pass --provider / --chat-model." |
|
0981a08…
|
noreply
|
386 |
) |
|
0981a08…
|
noreply
|
387 |
try: |
|
0981a08…
|
noreply
|
388 |
return self.agent.chat(message) |
|
0981a08…
|
noreply
|
389 |
except Exception as exc: |
|
0981a08…
|
noreply
|
390 |
return f"Chat error: {exc}" |
|
0981a08…
|
noreply
|
391 |
|
|
0981a08…
|
noreply
|
392 |
# ── Main dispatch ── |
|
0981a08…
|
noreply
|
393 |
|
|
0981a08…
|
noreply
|
394 |
def handle_input(self, line: str) -> str: |
|
0981a08…
|
noreply
|
395 |
"""Process a single input line and return output.""" |
|
0981a08…
|
noreply
|
396 |
line = line.strip() |
|
0981a08…
|
noreply
|
397 |
if not line: |
|
0981a08…
|
noreply
|
398 |
return "" |
|
0981a08…
|
noreply
|
399 |
|
|
0981a08…
|
noreply
|
400 |
# Bare quit/exit/bye without slash |
|
0981a08…
|
noreply
|
401 |
if line.lower() in ("quit", "exit", "bye", "q"): |
|
0981a08…
|
noreply
|
402 |
return "__QUIT__" |
|
0981a08…
|
noreply
|
403 |
|
|
0981a08…
|
noreply
|
404 |
if not line.startswith("/"): |
|
0981a08…
|
noreply
|
405 |
return self._cmd_chat(line) |
|
0981a08…
|
noreply
|
406 |
|
|
0981a08…
|
noreply
|
407 |
parts = line.split(maxsplit=1) |
|
0981a08…
|
noreply
|
408 |
cmd = parts[0].lower() |
|
0981a08…
|
noreply
|
409 |
args = parts[1] if len(parts) > 1 else "" |
|
0981a08…
|
noreply
|
410 |
|
|
0981a08…
|
noreply
|
411 |
if cmd in ("/quit", "/exit"): |
|
0981a08…
|
noreply
|
412 |
return "__QUIT__" |
|
0981a08…
|
noreply
|
413 |
if cmd == "/help": |
|
0981a08…
|
noreply
|
414 |
return self._cmd_help() |
|
0981a08…
|
noreply
|
415 |
if cmd == "/status": |
|
0981a08…
|
noreply
|
416 |
return self._cmd_status() |
|
0981a08…
|
noreply
|
417 |
if cmd == "/skills": |
|
0981a08…
|
noreply
|
418 |
return self._cmd_skills() |
|
0981a08…
|
noreply
|
419 |
if cmd == "/entities": |
|
0981a08…
|
noreply
|
420 |
return self._cmd_entities(args) |
|
0981a08…
|
noreply
|
421 |
if cmd == "/search": |
|
0981a08…
|
noreply
|
422 |
return self._cmd_search(args) |
|
0981a08…
|
noreply
|
423 |
if cmd == "/neighbors": |
|
0981a08…
|
noreply
|
424 |
return self._cmd_neighbors(args) |
|
0981a08…
|
noreply
|
425 |
if cmd == "/export": |
|
0981a08…
|
noreply
|
426 |
return self._cmd_export(args) |
|
0981a08…
|
noreply
|
427 |
if cmd == "/analyze": |
|
0981a08…
|
noreply
|
428 |
return self._cmd_analyze(args) |
|
0981a08…
|
noreply
|
429 |
if cmd == "/ingest": |
|
0981a08…
|
noreply
|
430 |
return self._cmd_ingest(args) |
|
0981a08…
|
noreply
|
431 |
if cmd == "/auth": |
|
0981a08…
|
noreply
|
432 |
return self._cmd_auth(args) |
|
0981a08…
|
noreply
|
433 |
if cmd == "/provider": |
|
0981a08…
|
noreply
|
434 |
return self._cmd_provider(args) |
|
0981a08…
|
noreply
|
435 |
if cmd == "/model": |
|
0981a08…
|
noreply
|
436 |
return self._cmd_model(args) |
|
0981a08…
|
noreply
|
437 |
if cmd == "/run": |
|
0981a08…
|
noreply
|
438 |
return self._cmd_run_skill(args) |
|
0981a08…
|
noreply
|
439 |
if cmd == "/plan": |
|
0981a08…
|
noreply
|
440 |
return self._cmd_run_skill("project_plan") |
|
0981a08…
|
noreply
|
441 |
if cmd == "/prd": |
|
0981a08…
|
noreply
|
442 |
return self._cmd_run_skill("prd") |
|
0981a08…
|
noreply
|
443 |
if cmd == "/tasks": |
|
0981a08…
|
noreply
|
444 |
return self._cmd_run_skill("task_breakdown") |
|
0981a08…
|
noreply
|
445 |
|
|
0981a08…
|
noreply
|
446 |
return f"Unknown command: {cmd}. Type /help for help." |
|
0981a08…
|
noreply
|
447 |
|
|
1707c67…
|
noreply
|
448 |
COMMANDS = [ |
|
1707c67…
|
noreply
|
449 |
"/help", |
|
1707c67…
|
noreply
|
450 |
"/status", |
|
1707c67…
|
noreply
|
451 |
"/skills", |
|
1707c67…
|
noreply
|
452 |
"/entities", |
|
1707c67…
|
noreply
|
453 |
"/search", |
|
1707c67…
|
noreply
|
454 |
"/neighbors", |
|
1707c67…
|
noreply
|
455 |
"/export", |
|
1707c67…
|
noreply
|
456 |
"/analyze", |
|
1707c67…
|
noreply
|
457 |
"/ingest", |
|
1707c67…
|
noreply
|
458 |
"/auth", |
|
1707c67…
|
noreply
|
459 |
"/provider", |
|
1707c67…
|
noreply
|
460 |
"/model", |
|
1707c67…
|
noreply
|
461 |
"/run", |
|
1707c67…
|
noreply
|
462 |
"/plan", |
|
1707c67…
|
noreply
|
463 |
"/prd", |
|
1707c67…
|
noreply
|
464 |
"/tasks", |
|
1707c67…
|
noreply
|
465 |
"/quit", |
|
1707c67…
|
noreply
|
466 |
"/exit", |
|
1707c67…
|
noreply
|
467 |
] |
|
1707c67…
|
noreply
|
468 |
|
|
1707c67…
|
noreply
|
469 |
def _setup_readline(self) -> None: |
|
1707c67…
|
noreply
|
470 |
"""Set up readline for tab completion and history.""" |
|
1707c67…
|
noreply
|
471 |
try: |
|
1707c67…
|
noreply
|
472 |
import readline |
|
1707c67…
|
noreply
|
473 |
except ImportError: |
|
1707c67…
|
noreply
|
474 |
return |
|
1707c67…
|
noreply
|
475 |
|
|
1707c67…
|
noreply
|
476 |
commands = self.COMMANDS |
|
1707c67…
|
noreply
|
477 |
|
|
1707c67…
|
noreply
|
478 |
def completer(text, state): |
|
1707c67…
|
noreply
|
479 |
if text.startswith("/"): |
|
1707c67…
|
noreply
|
480 |
matches = [c for c in commands if c.startswith(text)] |
|
1707c67…
|
noreply
|
481 |
else: |
|
1707c67…
|
noreply
|
482 |
matches = [c for c in commands if c.startswith("/" + text)] |
|
1707c67…
|
noreply
|
483 |
matches = [m[1:] for m in matches] # strip leading / |
|
1707c67…
|
noreply
|
484 |
if state < len(matches): |
|
1707c67…
|
noreply
|
485 |
return matches[state] |
|
1707c67…
|
noreply
|
486 |
return None |
|
1707c67…
|
noreply
|
487 |
|
|
1707c67…
|
noreply
|
488 |
readline.set_completer(completer) |
|
1707c67…
|
noreply
|
489 |
readline.set_completer_delims(" \t\n") |
|
1707c67…
|
noreply
|
490 |
# macOS uses libedit which needs a different syntax |
|
1707c67…
|
noreply
|
491 |
if "libedit" in readline.__doc__: |
|
1707c67…
|
noreply
|
492 |
readline.parse_and_bind("bind ^I rl_complete") |
|
1707c67…
|
noreply
|
493 |
else: |
|
1707c67…
|
noreply
|
494 |
readline.parse_and_bind("tab: complete") |
|
1707c67…
|
noreply
|
495 |
|
|
1707c67…
|
noreply
|
496 |
# Load history |
|
1707c67…
|
noreply
|
497 |
history_path = Path.home() / ".planopticon_history" |
|
1707c67…
|
noreply
|
498 |
try: |
|
1707c67…
|
noreply
|
499 |
if history_path.exists(): |
|
1707c67…
|
noreply
|
500 |
readline.read_history_file(str(history_path)) |
|
1707c67…
|
noreply
|
501 |
except Exception: |
|
1707c67…
|
noreply
|
502 |
pass |
|
1707c67…
|
noreply
|
503 |
|
|
1707c67…
|
noreply
|
504 |
self._history_path = history_path |
|
1707c67…
|
noreply
|
505 |
|
|
1707c67…
|
noreply
|
506 |
def _save_history(self) -> None: |
|
1707c67…
|
noreply
|
507 |
"""Save readline history.""" |
|
1707c67…
|
noreply
|
508 |
try: |
|
1707c67…
|
noreply
|
509 |
import readline |
|
1707c67…
|
noreply
|
510 |
|
|
1707c67…
|
noreply
|
511 |
readline.write_history_file(str(self._history_path)) |
|
1707c67…
|
noreply
|
512 |
except Exception: |
|
1707c67…
|
noreply
|
513 |
pass |
|
1707c67…
|
noreply
|
514 |
|
|
0981a08…
|
noreply
|
515 |
def run(self) -> None: |
|
0981a08…
|
noreply
|
516 |
"""Main REPL loop.""" |
|
0981a08…
|
noreply
|
517 |
self._discover() |
|
0981a08…
|
noreply
|
518 |
self._init_provider() |
|
0981a08…
|
noreply
|
519 |
self._init_agent() |
|
1707c67…
|
noreply
|
520 |
self._setup_readline() |
|
0981a08…
|
noreply
|
521 |
|
|
0981a08…
|
noreply
|
522 |
print(self._welcome_banner()) |
|
0981a08…
|
noreply
|
523 |
|
|
0981a08…
|
noreply
|
524 |
while True: |
|
0981a08…
|
noreply
|
525 |
try: |
|
0981a08…
|
noreply
|
526 |
line = input("planopticon> ") |
|
0981a08…
|
noreply
|
527 |
except (KeyboardInterrupt, EOFError): |
|
0981a08…
|
noreply
|
528 |
print("\nBye.") |
|
0981a08…
|
noreply
|
529 |
break |
|
0981a08…
|
noreply
|
530 |
|
|
0981a08…
|
noreply
|
531 |
output = self.handle_input(line) |
|
0981a08…
|
noreply
|
532 |
if output == "__QUIT__": |
|
0981a08…
|
noreply
|
533 |
print("Bye.") |
|
0981a08…
|
noreply
|
534 |
break |
|
0981a08…
|
noreply
|
535 |
if output: |
|
0981a08…
|
noreply
|
536 |
print(output) |
|
1707c67…
|
noreply
|
537 |
|
|
1707c67…
|
noreply
|
538 |
self._save_history() |