Navegador

fix: ruff lint — format all files, remove unused vars, shorten long lines Bumps to 0.7.2.

lmata 2026-03-23 13:53 trunk
Commit e5e1a3b325ee737fd1bef364f6fa2a0ff1611a9ed5eee4ca0901a1d361dd72fd
53 files changed +1 -1 +1 -3 +4 -9 +2 -4 +2 -6 +1 -5 +67 -65 +21 -14 +15 -18 +1 -4 +1 -1 +3 -4 +6 -4 +1 -1 +27 -18 +1 -4 +12 -4 +1 -3 +1 -4 +2 -4 +55 -33 +2 -4 +43 -28 +35 -25 +17 -13 +1 -3 +18 -13 +2 -6 +6 -8 +9 -13 +8 -13 +10 -16 +5 -7 +1 +1 -3 +2 -6 +12 -5 +1 -4 +3 -9 +6 -15 +1 -3 +1 -3 +3 -9 +11 -16 +4 -6 +6 -18 +21 -36 +8 -27 +2 -6 +3 -9 +2 -5 +54 -43 +1 -1
~ navegador/__init__.py ~ navegador/adr.py ~ navegador/analysis/testmap.py ~ navegador/api_schema.py ~ navegador/churn.py ~ navegador/cicd.py ~ navegador/cli/commands.py ~ navegador/cluster/core.py ~ navegador/cluster/fossil_live.py ~ navegador/cluster/locking.py ~ navegador/cluster/messaging.py ~ navegador/cluster/partitioning.py ~ navegador/cluster/pubsub.py ~ navegador/cluster/sessions.py ~ navegador/cluster/taskqueue.py ~ navegador/codeowners.py ~ navegador/completions.py ~ navegador/context/loader.py ~ navegador/diff.py ~ navegador/editor.py ~ navegador/enrichment/express.py ~ navegador/enrichment/fastapi.py ~ navegador/enrichment/react.py ~ navegador/enrichment/react_native.py ~ navegador/explorer/server.py ~ navegador/explorer/templates.py ~ navegador/graph/export.py ~ navegador/graph/migrations.py ~ navegador/ingestion/c.py ~ navegador/ingestion/cpp.py ~ navegador/ingestion/csharp.py ~ navegador/ingestion/kotlin.py ~ navegador/ingestion/optimization.py ~ navegador/ingestion/parser.py ~ navegador/ingestion/php.py ~ navegador/ingestion/ruby.py ~ navegador/ingestion/swift.py ~ navegador/intelligence/community.py ~ navegador/intelligence/docgen.py ~ navegador/intelligence/nlp.py ~ navegador/intelligence/search.py ~ navegador/llm.py ~ navegador/mcp/security.py ~ navegador/mcp/server.py ~ navegador/monorepo.py ~ navegador/multirepo.py ~ navegador/planopticon_pipeline.py ~ navegador/pm.py ~ navegador/refactor.py ~ navegador/sdk.py ~ navegador/security.py ~ navegador/vcs.py ~ pyproject.toml
--- navegador/__init__.py
+++ navegador/__init__.py
@@ -1,10 +1,10 @@
11
"""
22
Navegador — AST + knowledge graph context engine for AI coding agents.
33
"""
44
5
-__version__ = "0.7.1"
5
+__version__ = "0.7.2"
66
__author__ = "CONFLICT LLC"
77
88
from navegador.sdk import Navegador
99
1010
__all__ = ["Navegador"]
1111
--- navegador/__init__.py
+++ navegador/__init__.py
@@ -1,10 +1,10 @@
1 """
2 Navegador — AST + knowledge graph context engine for AI coding agents.
3 """
4
5 __version__ = "0.7.1"
6 __author__ = "CONFLICT LLC"
7
8 from navegador.sdk import Navegador
9
10 __all__ = ["Navegador"]
11
--- navegador/__init__.py
+++ navegador/__init__.py
@@ -1,10 +1,10 @@
1 """
2 Navegador — AST + knowledge graph context engine for AI coding agents.
3 """
4
5 __version__ = "0.7.2"
6 __author__ = "CONFLICT LLC"
7
8 from navegador.sdk import Navegador
9
10 __all__ = ["Navegador"]
11
--- navegador/adr.py
+++ navegador/adr.py
@@ -24,13 +24,11 @@
2424
logger = logging.getLogger(__name__)
2525
2626
# ── Regex helpers ─────────────────────────────────────────────────────────────
2727
2828
_H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE)
29
-_STATUS = re.compile(
30
- r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL
31
-)
29
+_STATUS = re.compile(r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL)
3230
_RATIONALE = re.compile(
3331
r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)",
3432
re.MULTILINE | re.DOTALL,
3533
)
3634
_RATIONALE_FALLBACK = re.compile(
3735
--- navegador/adr.py
+++ navegador/adr.py
@@ -24,13 +24,11 @@
24 logger = logging.getLogger(__name__)
25
26 # ── Regex helpers ─────────────────────────────────────────────────────────────
27
28 _H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE)
29 _STATUS = re.compile(
30 r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL
31 )
32 _RATIONALE = re.compile(
33 r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)",
34 re.MULTILINE | re.DOTALL,
35 )
36 _RATIONALE_FALLBACK = re.compile(
37
--- navegador/adr.py
+++ navegador/adr.py
@@ -24,13 +24,11 @@
24 logger = logging.getLogger(__name__)
25
26 # ── Regex helpers ─────────────────────────────────────────────────────────────
27
28 _H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE)
29 _STATUS = re.compile(r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL)
 
 
30 _RATIONALE = re.compile(
31 r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)",
32 re.MULTILINE | re.DOTALL,
33 )
34 _RATIONALE_FALLBACK = re.compile(
35
--- navegador/analysis/testmap.py
+++ navegador/analysis/testmap.py
@@ -146,13 +146,11 @@
146146
test_file=test_file,
147147
prod_name=prod_name,
148148
prod_file=prod_file,
149149
prod_type=prod_type,
150150
source=(
151
- "calls"
152
- if self._resolve_via_calls(test_name, test_file)
153
- else "heuristic"
151
+ "calls" if self._resolve_via_calls(test_name, test_file) else "heuristic"
154152
),
155153
)
156154
links.append(link)
157155
# Persist the TESTS edge
158156
try:
@@ -183,17 +181,14 @@
183181
rows = result.result_set or []
184182
except Exception:
185183
return []
186184
187185
return [
188
- {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]}
189
- for row in rows
186
+ {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} for row in rows
190187
]
191188
192
- def _resolve_via_calls(
193
- self, test_name: str, test_file: str
194
- ) -> tuple[str, str, str] | None:
189
+ def _resolve_via_calls(self, test_name: str, test_file: str) -> tuple[str, str, str] | None:
195190
"""Return (type, name, file_path) of the first non-test callee, or None."""
196191
try:
197192
result = self.store.query(
198193
_CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file}
199194
)
@@ -213,11 +208,11 @@
213208
test_validate_token → validate_token, then validate
214209
"""
215210
if not test_name.startswith("test_"):
216211
return None
217212
218
- stripped = test_name[len("test_"):]
213
+ stripped = test_name[len("test_") :]
219214
parts = stripped.split("_")
220215
221216
# Try full stripped name first, then progressively shorter prefixes
222217
candidates = []
223218
for i in range(len(parts), 0, -1):
224219
--- navegador/analysis/testmap.py
+++ navegador/analysis/testmap.py
@@ -146,13 +146,11 @@
146 test_file=test_file,
147 prod_name=prod_name,
148 prod_file=prod_file,
149 prod_type=prod_type,
150 source=(
151 "calls"
152 if self._resolve_via_calls(test_name, test_file)
153 else "heuristic"
154 ),
155 )
156 links.append(link)
157 # Persist the TESTS edge
158 try:
@@ -183,17 +181,14 @@
183 rows = result.result_set or []
184 except Exception:
185 return []
186
187 return [
188 {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]}
189 for row in rows
190 ]
191
192 def _resolve_via_calls(
193 self, test_name: str, test_file: str
194 ) -> tuple[str, str, str] | None:
195 """Return (type, name, file_path) of the first non-test callee, or None."""
196 try:
197 result = self.store.query(
198 _CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file}
199 )
@@ -213,11 +208,11 @@
213 test_validate_token → validate_token, then validate
214 """
215 if not test_name.startswith("test_"):
216 return None
217
218 stripped = test_name[len("test_"):]
219 parts = stripped.split("_")
220
221 # Try full stripped name first, then progressively shorter prefixes
222 candidates = []
223 for i in range(len(parts), 0, -1):
224
--- navegador/analysis/testmap.py
+++ navegador/analysis/testmap.py
@@ -146,13 +146,11 @@
146 test_file=test_file,
147 prod_name=prod_name,
148 prod_file=prod_file,
149 prod_type=prod_type,
150 source=(
151 "calls" if self._resolve_via_calls(test_name, test_file) else "heuristic"
 
 
152 ),
153 )
154 links.append(link)
155 # Persist the TESTS edge
156 try:
@@ -183,17 +181,14 @@
181 rows = result.result_set or []
182 except Exception:
183 return []
184
185 return [
186 {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} for row in rows
 
187 ]
188
189 def _resolve_via_calls(self, test_name: str, test_file: str) -> tuple[str, str, str] | None:
 
 
190 """Return (type, name, file_path) of the first non-test callee, or None."""
191 try:
192 result = self.store.query(
193 _CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file}
194 )
@@ -213,11 +208,11 @@
208 test_validate_token → validate_token, then validate
209 """
210 if not test_name.startswith("test_"):
211 return None
212
213 stripped = test_name[len("test_") :]
214 parts = stripped.split("_")
215
216 # Try full stripped name first, then progressively shorter prefixes
217 candidates = []
218 for i in range(len(parts), 0, -1):
219
--- navegador/api_schema.py
+++ navegador/api_schema.py
@@ -18,11 +18,10 @@
1818
import logging
1919
import re
2020
from pathlib import Path
2121
from typing import Any
2222
23
-from navegador.graph.schema import EdgeType, NodeLabel
2423
from navegador.graph.store import GraphStore
2524
2625
logger = logging.getLogger(__name__)
2726
2827
# ── New node label for API endpoints ─────────────────────────────────────────
@@ -94,13 +93,11 @@
9493
)
9594
endpoints += 1
9695
9796
# ── Component schemas / definitions ───────────────────────────────────
9897
component_schemas = (
99
- (spec.get("components") or {}).get("schemas")
100
- or spec.get("definitions")
101
- or {}
98
+ (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {}
10299
)
103100
for schema_name, schema_body in component_schemas.items():
104101
if not isinstance(schema_body, dict):
105102
continue
106103
description = schema_body.get("description") or ""
@@ -231,10 +228,11 @@
231228
Sufficient for the simple flat/nested structure of OpenAPI specs.
232229
Falls back to PyYAML if available.
233230
"""
234231
try:
235232
import yaml # type: ignore[import]
233
+
236234
return yaml.safe_load(text)
237235
except ImportError:
238236
pass
239237
240238
# Minimal hand-rolled YAML → dict for simple key: value structures
241239
--- navegador/api_schema.py
+++ navegador/api_schema.py
@@ -18,11 +18,10 @@
18 import logging
19 import re
20 from pathlib import Path
21 from typing import Any
22
23 from navegador.graph.schema import EdgeType, NodeLabel
24 from navegador.graph.store import GraphStore
25
26 logger = logging.getLogger(__name__)
27
28 # ── New node label for API endpoints ─────────────────────────────────────────
@@ -94,13 +93,11 @@
94 )
95 endpoints += 1
96
97 # ── Component schemas / definitions ───────────────────────────────────
98 component_schemas = (
99 (spec.get("components") or {}).get("schemas")
100 or spec.get("definitions")
101 or {}
102 )
103 for schema_name, schema_body in component_schemas.items():
104 if not isinstance(schema_body, dict):
105 continue
106 description = schema_body.get("description") or ""
@@ -231,10 +228,11 @@
231 Sufficient for the simple flat/nested structure of OpenAPI specs.
232 Falls back to PyYAML if available.
233 """
234 try:
235 import yaml # type: ignore[import]
 
236 return yaml.safe_load(text)
237 except ImportError:
238 pass
239
240 # Minimal hand-rolled YAML → dict for simple key: value structures
241
--- navegador/api_schema.py
+++ navegador/api_schema.py
@@ -18,11 +18,10 @@
18 import logging
19 import re
20 from pathlib import Path
21 from typing import Any
22
 
23 from navegador.graph.store import GraphStore
24
25 logger = logging.getLogger(__name__)
26
27 # ── New node label for API endpoints ─────────────────────────────────────────
@@ -94,13 +93,11 @@
93 )
94 endpoints += 1
95
96 # ── Component schemas / definitions ───────────────────────────────────
97 component_schemas = (
98 (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {}
 
 
99 )
100 for schema_name, schema_body in component_schemas.items():
101 if not isinstance(schema_body, dict):
102 continue
103 description = schema_body.get("description") or ""
@@ -231,10 +228,11 @@
228 Sufficient for the simple flat/nested structure of OpenAPI specs.
229 Falls back to PyYAML if available.
230 """
231 try:
232 import yaml # type: ignore[import]
233
234 return yaml.safe_load(text)
235 except ImportError:
236 pass
237
238 # Minimal hand-rolled YAML → dict for simple key: value structures
239
--- navegador/churn.py
+++ navegador/churn.py
@@ -25,11 +25,10 @@
2525
from collections import defaultdict
2626
from dataclasses import dataclass
2727
from itertools import combinations
2828
from pathlib import Path
2929
30
-
3130
# ── Data models ───────────────────────────────────────────────────────────────
3231
3332
3433
@dataclass
3534
class ChurnEntry:
@@ -255,21 +254,18 @@
255254
couplings_written = 0
256255
257256
# -- Update File node churn scores ------------------------------------
258257
for entry in self.file_churn():
259258
cypher = (
260
- "MATCH (f:File {file_path: $fp}) "
261
- "SET f.churn_score = $score, f.lines_changed = $lc"
259
+ "MATCH (f:File {file_path: $fp}) SET f.churn_score = $score, f.lines_changed = $lc"
262260
)
263261
result = store.query(
264262
cypher,
265263
{"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed},
266264
)
267265
# FalkorDB returns stats; count rows affected if available
268
- if getattr(result, "nodes_modified", None) or getattr(
269
- result, "properties_set", None
270
- ):
266
+ if getattr(result, "nodes_modified", None) or getattr(result, "properties_set", None):
271267
churn_updated += 1
272268
else:
273269
# Fallback: assume the match succeeded if no error was raised
274270
churn_updated += 1
275271
276272
--- navegador/churn.py
+++ navegador/churn.py
@@ -25,11 +25,10 @@
25 from collections import defaultdict
26 from dataclasses import dataclass
27 from itertools import combinations
28 from pathlib import Path
29
30
31 # ── Data models ───────────────────────────────────────────────────────────────
32
33
34 @dataclass
35 class ChurnEntry:
@@ -255,21 +254,18 @@
255 couplings_written = 0
256
257 # -- Update File node churn scores ------------------------------------
258 for entry in self.file_churn():
259 cypher = (
260 "MATCH (f:File {file_path: $fp}) "
261 "SET f.churn_score = $score, f.lines_changed = $lc"
262 )
263 result = store.query(
264 cypher,
265 {"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed},
266 )
267 # FalkorDB returns stats; count rows affected if available
268 if getattr(result, "nodes_modified", None) or getattr(
269 result, "properties_set", None
270 ):
271 churn_updated += 1
272 else:
273 # Fallback: assume the match succeeded if no error was raised
274 churn_updated += 1
275
276
--- navegador/churn.py
+++ navegador/churn.py
@@ -25,11 +25,10 @@
25 from collections import defaultdict
26 from dataclasses import dataclass
27 from itertools import combinations
28 from pathlib import Path
29
 
30 # ── Data models ───────────────────────────────────────────────────────────────
31
32
33 @dataclass
34 class ChurnEntry:
@@ -255,21 +254,18 @@
254 couplings_written = 0
255
256 # -- Update File node churn scores ------------------------------------
257 for entry in self.file_churn():
258 cypher = (
259 "MATCH (f:File {file_path: $fp}) SET f.churn_score = $score, f.lines_changed = $lc"
 
260 )
261 result = store.query(
262 cypher,
263 {"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed},
264 )
265 # FalkorDB returns stats; count rows affected if available
266 if getattr(result, "nodes_modified", None) or getattr(result, "properties_set", None):
 
 
267 churn_updated += 1
268 else:
269 # Fallback: assume the match succeeded if no error was raised
270 churn_updated += 1
271
272
--- navegador/cicd.py
+++ navegador/cicd.py
@@ -10,13 +10,11 @@
1010
1111
import json
1212
import os
1313
import sys
1414
from dataclasses import dataclass, field
15
-from pathlib import Path
1615
from typing import Any
17
-
1816
1917
# ── Exit codes ────────────────────────────────────────────────────────────────
2018
2119
EXIT_SUCCESS = 0
2220
EXIT_ERROR = 1
@@ -176,13 +174,11 @@
176174
summary_path = os.environ.get("GITHUB_STEP_SUMMARY")
177175
if not summary_path:
178176
return
179177
180178
lines: list[str] = []
181
- status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(
182
- payload["status"], "ℹ️"
183
- )
179
+ status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(payload["status"], "ℹ️")
184180
lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n")
185181
186182
if payload.get("data"):
187183
lines.append("### Stats\n")
188184
lines.append("| Key | Value |")
189185
--- navegador/cicd.py
+++ navegador/cicd.py
@@ -10,13 +10,11 @@
10
11 import json
12 import os
13 import sys
14 from dataclasses import dataclass, field
15 from pathlib import Path
16 from typing import Any
17
18
19 # ── Exit codes ────────────────────────────────────────────────────────────────
20
21 EXIT_SUCCESS = 0
22 EXIT_ERROR = 1
@@ -176,13 +174,11 @@
176 summary_path = os.environ.get("GITHUB_STEP_SUMMARY")
177 if not summary_path:
178 return
179
180 lines: list[str] = []
181 status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(
182 payload["status"], "ℹ️"
183 )
184 lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n")
185
186 if payload.get("data"):
187 lines.append("### Stats\n")
188 lines.append("| Key | Value |")
189
--- navegador/cicd.py
+++ navegador/cicd.py
@@ -10,13 +10,11 @@
10
11 import json
12 import os
13 import sys
14 from dataclasses import dataclass, field
 
15 from typing import Any
 
16
17 # ── Exit codes ────────────────────────────────────────────────────────────────
18
19 EXIT_SUCCESS = 0
20 EXIT_ERROR = 1
@@ -176,13 +174,11 @@
174 summary_path = os.environ.get("GITHUB_STEP_SUMMARY")
175 if not summary_path:
176 return
177
178 lines: list[str] = []
179 status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(payload["status"], "ℹ️")
 
 
180 lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n")
181
182 if payload.get("data"):
183 lines.append("### Stats\n")
184 lines.append("| Key | Value |")
185
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -142,12 +142,21 @@
142142
@click.option(
143143
"--monorepo",
144144
is_flag=True,
145145
help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).",
146146
)
147
-def ingest(repo_path: str, db: str, clear: bool, incremental: bool, watch: bool,
148
- interval: float, as_json: bool, redact: bool, monorepo: bool):
147
+def ingest(
148
+ repo_path: str,
149
+ db: str,
150
+ clear: bool,
151
+ incremental: bool,
152
+ watch: bool,
153
+ interval: float,
154
+ as_json: bool,
155
+ redact: bool,
156
+ monorepo: bool,
157
+):
149158
"""Ingest a repository's code into the graph (AST + call graph)."""
150159
if monorepo:
151160
from navegador.monorepo import MonorepoIngester
152161
153162
store = _get_store(db)
@@ -177,13 +186,11 @@
177186
178187
def _on_cycle(stats):
179188
changed = stats["files"]
180189
skipped = stats["skipped"]
181190
if changed:
182
- console.print(
183
- f" [green]{changed} changed[/green], {skipped} unchanged"
184
- )
191
+ console.print(f" [green]{changed} changed[/green], {skipped} unchanged")
185192
return True # keep watching
186193
187194
try:
188195
ingester.watch(repo_path, interval=interval, callback=_on_cycle)
189196
except KeyboardInterrupt:
@@ -727,12 +734,14 @@
727734
def migrate(db: str, check: bool):
728735
"""Apply pending schema migrations to the graph."""
729736
from navegador.graph.migrations import (
730737
CURRENT_SCHEMA_VERSION,
731738
get_schema_version,
732
- migrate as do_migrate,
733739
needs_migration,
740
+ )
741
+ from navegador.graph.migrations import (
742
+ migrate as do_migrate,
734743
)
735744
736745
store = _get_store(db)
737746
738747
if check:
@@ -875,15 +884,11 @@
875884
param_hint="--framework",
876885
)
877886
targets = {framework_name: available[framework_name]}
878887
else:
879888
# Auto-detect: only run enrichers whose detect() returns True.
880
- targets = {
881
- name: cls
882
- for name, cls in available.items()
883
- if cls(store).detect()
884
- }
889
+ targets = {name: cls for name, cls in available.items() if cls(store).detect()}
885890
if not targets and not as_json:
886891
console.print("[yellow]No frameworks detected in the graph.[/yellow]")
887892
return
888893
889894
all_results: dict[str, dict] = {}
@@ -1099,13 +1104,11 @@
10991104
try:
11001105
store = _get_store(db)
11011106
current = get_schema_version(store)
11021107
data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION}
11031108
if needs_migration(store):
1104
- reporter.add_warning(
1105
- f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}"
1106
- )
1109
+ reporter.add_warning(f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}")
11071110
except Exception as exc: # noqa: BLE001
11081111
reporter.add_error(str(exc))
11091112
11101113
reporter.emit(data=data or None)
11111114
sys.exit(reporter.exit_code())
@@ -1151,13 +1154,11 @@
11511154
else:
11521155
line = get_eval_line(shell)
11531156
rc = rc_path or get_rc_path(shell)
11541157
console.print(f"Add the following line to [bold]{rc}[/bold]:\n")
11551158
click.echo(f" {line}")
1156
- console.print(
1157
- f"\nOr run: [bold]navegador completions {shell} --install[/bold]"
1158
- )
1159
+ console.print(f"\nOr run: [bold]navegador completions {shell} --install[/bold]")
11591160
11601161
11611162
# ── Churn / behavioural coupling ─────────────────────────────────────────────
11621163
11631164
@@ -1293,12 +1294,11 @@
12931294
"--read-only",
12941295
"read_only",
12951296
is_flag=True,
12961297
default=False,
12971298
help=(
1298
- "Start in read-only mode: disables ingest_repo and blocks write "
1299
- "operations in query_graph."
1299
+ "Start in read-only mode: disables ingest_repo and blocks write operations in query_graph."
13001300
),
13011301
)
13021302
def mcp(db: str, read_only: bool):
13031303
"""Start the MCP server for AI agent integration (stdio)."""
13041304
from mcp.server.stdio import stdio_server # type: ignore[import]
@@ -1337,13 +1337,11 @@
13371337
13381338
if as_json:
13391339
click.echo(json.dumps(result.to_dict(), indent=2))
13401340
return
13411341
1342
- console.print(
1343
- f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})"
1344
- )
1342
+ console.print(f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})")
13451343
if not result.affected_nodes:
13461344
console.print("[yellow]No affected nodes found.[/yellow]")
13471345
return
13481346
13491347
table = Table(title=f"Affected nodes ({len(result.affected_nodes)})")
@@ -1350,13 +1348,11 @@
13501348
table.add_column("Type", style="cyan")
13511349
table.add_column("Name", style="bold")
13521350
table.add_column("File")
13531351
table.add_column("Line", justify="right")
13541352
for node in result.affected_nodes:
1355
- table.add_row(
1356
- node["type"], node["name"], node["file_path"], str(node["line_start"] or "")
1357
- )
1353
+ table.add_row(node["type"], node["name"], node["file_path"], str(node["line_start"] or ""))
13581354
console.print(table)
13591355
13601356
if result.affected_files:
13611357
console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]")
13621358
for fp in result.affected_files:
@@ -1393,18 +1389,16 @@
13931389
13941390
if not chains:
13951391
console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].")
13961392
return
13971393
1398
- console.print(
1399
- f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)"
1400
- )
1394
+ console.print(f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)")
14011395
for i, chain in enumerate(chains, 1):
14021396
steps = chain.to_list()
1403
- path_str = " → ".join(
1404
- [steps[0]["caller"]] + [s["callee"] for s in steps]
1405
- ) if steps else name
1397
+ path_str = (
1398
+ " → ".join([steps[0]["caller"]] + [s["callee"] for s in steps]) if steps else name
1399
+ )
14061400
console.print(f" {i}. {path_str}")
14071401
14081402
14091403
# ── ANALYSIS: dead code ───────────────────────────────────────────────────────
14101404
@@ -1433,11 +1427,13 @@
14331427
f"{summary['unreachable_classes']} dead classes, "
14341428
f"{summary['orphan_files']} orphan files"
14351429
)
14361430
14371431
if report.unreachable_functions:
1438
- fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})")
1432
+ fn_table = Table(
1433
+ title=f"Unreachable functions/methods ({len(report.unreachable_functions)})"
1434
+ )
14391435
fn_table.add_column("Type", style="cyan")
14401436
fn_table.add_column("Name", style="bold")
14411437
fn_table.add_column("File")
14421438
fn_table.add_column("Line", justify="right")
14431439
for fn in report.unreachable_functions:
@@ -1507,14 +1503,14 @@
15071503
# ── ANALYSIS: cycles ──────────────────────────────────────────────────────────
15081504
15091505
15101506
@main.command()
15111507
@DB_OPTION
1512
-@click.option("--imports", "check_imports", is_flag=True, default=False,
1513
- help="Check import cycles only.")
1514
-@click.option("--calls", "check_calls", is_flag=True, default=False,
1515
- help="Check call cycles only.")
1508
+@click.option(
1509
+ "--imports", "check_imports", is_flag=True, default=False, help="Check import cycles only."
1510
+)
1511
+@click.option("--calls", "check_calls", is_flag=True, default=False, help="Check call cycles only.")
15161512
@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
15171513
def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool):
15181514
"""Detect circular dependencies in import and call graphs.
15191515
15201516
By default checks both import cycles and call cycles.
@@ -1529,13 +1525,11 @@
15291525
import_cycles = detector.detect_import_cycles() if run_imports else []
15301526
call_cycles = detector.detect_call_cycles() if run_calls else []
15311527
15321528
if as_json:
15331529
click.echo(
1534
- json.dumps(
1535
- {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2
1536
- )
1530
+ json.dumps({"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2)
15371531
)
15381532
return
15391533
15401534
if not import_cycles and not call_cycles:
15411535
console.print("[green]No circular dependencies found.[/green]")
@@ -1816,20 +1810,26 @@
18161810
def pm():
18171811
"""Ingest project management tickets (GitHub Issues, Linear, Jira)."""
18181812
18191813
18201814
@pm.command("ingest")
1821
-@click.option("--github", "github_repo", default="", metavar="OWNER/REPO",
1822
- help="GitHub repository in owner/repo format.")
1823
-@click.option("--token", default="", envvar="GITHUB_TOKEN",
1824
- help="GitHub personal access token.")
1825
-@click.option("--state", default="open",
1826
- type=click.Choice(["open", "closed", "all"]),
1827
- show_default=True,
1828
- help="GitHub issue state filter.")
1829
-@click.option("--limit", default=100, show_default=True,
1830
- help="Maximum number of issues to fetch.")
1815
+@click.option(
1816
+ "--github",
1817
+ "github_repo",
1818
+ default="",
1819
+ metavar="OWNER/REPO",
1820
+ help="GitHub repository in owner/repo format.",
1821
+)
1822
+@click.option("--token", default="", envvar="GITHUB_TOKEN", help="GitHub personal access token.")
1823
+@click.option(
1824
+ "--state",
1825
+ default="open",
1826
+ type=click.Choice(["open", "closed", "all"]),
1827
+ show_default=True,
1828
+ help="GitHub issue state filter.",
1829
+)
1830
+@click.option("--limit", default=100, show_default=True, help="Maximum number of issues to fetch.")
18311831
@DB_OPTION
18321832
@click.option("--json", "as_json", is_flag=True)
18331833
def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool):
18341834
"""Ingest tickets from a PM tool into the knowledge graph.
18351835
@@ -1838,11 +1838,13 @@
18381838
navegador pm ingest --github owner/repo
18391839
navegador pm ingest --github owner/repo --token ghp_...
18401840
navegador pm ingest --github owner/repo --state all --limit 200
18411841
"""
18421842
if not github_repo:
1843
- raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).")
1843
+ raise click.UsageError(
1844
+ "Provide --github <owner/repo> (more backends coming in a future release)."
1845
+ )
18441846
18451847
from navegador.pm import TicketIngester
18461848
18471849
ing = TicketIngester(_get_store(db))
18481850
stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit)
@@ -1867,11 +1869,12 @@
18671869
18681870
18691871
@deps.command("ingest")
18701872
@click.argument("path", type=click.Path(exists=True))
18711873
@click.option(
1872
- "--type", "dep_type",
1874
+ "--type",
1875
+ "dep_type",
18731876
type=click.Choice(["auto", "npm", "pip", "cargo"]),
18741877
default="auto",
18751878
show_default=True,
18761879
help="Manifest type. auto detects from filename.",
18771880
)
@@ -1922,12 +1925,11 @@
19221925
19231926
if as_json:
19241927
click.echo(json.dumps(stats, indent=2))
19251928
else:
19261929
console.print(
1927
- f"[green]Dependencies ingested[/green] ({dep_type}): "
1928
- f"{stats['packages']} packages"
1930
+ f"[green]Dependencies ingested[/green] ({dep_type}): {stats['packages']} packages"
19291931
)
19301932
19311933
19321934
# ── Submodules: ingest parent + submodules (#61) ──────────────────────────────
19331935
@@ -2029,13 +2031,11 @@
20292031
raise click.UsageError("Provide at least one NAME=PATH repo.")
20302032
20312033
wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode))
20322034
for repo_spec in repos:
20332035
if "=" not in repo_spec:
2034
- raise click.UsageError(
2035
- f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format."
2036
- )
2036
+ raise click.UsageError(f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format.")
20372037
name, path = repo_spec.split("=", 1)
20382038
wm.add_repo(name.strip(), path.strip())
20392039
20402040
stats = wm.ingest_all(clear=clear)
20412041
@@ -2096,11 +2096,13 @@
20962096
from navegador.intelligence.search import SemanticSearch
20972097
from navegador.llm import auto_provider, get_provider
20982098
20992099
store = _get_store(db)
21002100
provider = (
2101
- get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2101
+ get_provider(llm_provider, model=llm_model)
2102
+ if llm_provider
2103
+ else auto_provider(model=llm_model)
21022104
)
21032105
ss = SemanticSearch(store, provider)
21042106
21052107
if do_index:
21062108
n = ss.index()
@@ -2220,11 +2222,13 @@
22202222
from navegador.intelligence.nlp import NLPEngine
22212223
from navegador.llm import auto_provider, get_provider
22222224
22232225
store = _get_store(db)
22242226
provider = (
2225
- get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2227
+ get_provider(llm_provider, model=llm_model)
2228
+ if llm_provider
2229
+ else auto_provider(model=llm_model)
22262230
)
22272231
engine = NLPEngine(store, provider)
22282232
22292233
with console.status("[bold]Thinking...[/bold]"):
22302234
answer = engine.natural_query(question)
@@ -2244,13 +2248,11 @@
22442248
default="",
22452249
help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
22462250
)
22472251
@click.option("--model", "llm_model", default="", help="LLM model name.")
22482252
@click.option("--file", "file_path", default="", help="Narrow to a specific file.")
2249
-def generate_docs_cmd(
2250
- name: str, db: str, llm_provider: str, llm_model: str, file_path: str
2251
-):
2253
+def generate_docs_cmd(name: str, db: str, llm_provider: str, llm_model: str, file_path: str):
22522254
"""Generate LLM-powered documentation for a named symbol.
22532255
22542256
\b
22552257
Examples:
22562258
navegador generate-docs authenticate_user
@@ -2259,11 +2261,13 @@
22592261
from navegador.intelligence.nlp import NLPEngine
22602262
from navegador.llm import auto_provider, get_provider
22612263
22622264
store = _get_store(db)
22632265
provider = (
2264
- get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2266
+ get_provider(llm_provider, model=llm_model)
2267
+ if llm_provider
2268
+ else auto_provider(model=llm_model)
22652269
)
22662270
engine = NLPEngine(store, provider)
22672271
22682272
with console.status("[bold]Generating docs...[/bold]"):
22692273
docs = engine.generate_docs(name, file_path=file_path)
@@ -2284,13 +2288,11 @@
22842288
default="",
22852289
help="LLM provider (anthropic, openai, ollama). Template mode if omitted.",
22862290
)
22872291
@click.option("--model", "llm_model", default="", help="LLM model name.")
22882292
@click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).")
2289
-def docs(
2290
- target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool
2291
-):
2293
+def docs(target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool):
22922294
"""Generate markdown documentation from the graph.
22932295
22942296
TARGET can be a file path or a module name (dotted or partial).
22952297
Use --project to generate full project docs instead.
22962298
22972299
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -142,12 +142,21 @@
142 @click.option(
143 "--monorepo",
144 is_flag=True,
145 help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).",
146 )
147 def ingest(repo_path: str, db: str, clear: bool, incremental: bool, watch: bool,
148 interval: float, as_json: bool, redact: bool, monorepo: bool):
 
 
 
 
 
 
 
 
 
149 """Ingest a repository's code into the graph (AST + call graph)."""
150 if monorepo:
151 from navegador.monorepo import MonorepoIngester
152
153 store = _get_store(db)
@@ -177,13 +186,11 @@
177
178 def _on_cycle(stats):
179 changed = stats["files"]
180 skipped = stats["skipped"]
181 if changed:
182 console.print(
183 f" [green]{changed} changed[/green], {skipped} unchanged"
184 )
185 return True # keep watching
186
187 try:
188 ingester.watch(repo_path, interval=interval, callback=_on_cycle)
189 except KeyboardInterrupt:
@@ -727,12 +734,14 @@
727 def migrate(db: str, check: bool):
728 """Apply pending schema migrations to the graph."""
729 from navegador.graph.migrations import (
730 CURRENT_SCHEMA_VERSION,
731 get_schema_version,
732 migrate as do_migrate,
733 needs_migration,
 
 
 
734 )
735
736 store = _get_store(db)
737
738 if check:
@@ -875,15 +884,11 @@
875 param_hint="--framework",
876 )
877 targets = {framework_name: available[framework_name]}
878 else:
879 # Auto-detect: only run enrichers whose detect() returns True.
880 targets = {
881 name: cls
882 for name, cls in available.items()
883 if cls(store).detect()
884 }
885 if not targets and not as_json:
886 console.print("[yellow]No frameworks detected in the graph.[/yellow]")
887 return
888
889 all_results: dict[str, dict] = {}
@@ -1099,13 +1104,11 @@
1099 try:
1100 store = _get_store(db)
1101 current = get_schema_version(store)
1102 data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION}
1103 if needs_migration(store):
1104 reporter.add_warning(
1105 f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}"
1106 )
1107 except Exception as exc: # noqa: BLE001
1108 reporter.add_error(str(exc))
1109
1110 reporter.emit(data=data or None)
1111 sys.exit(reporter.exit_code())
@@ -1151,13 +1154,11 @@
1151 else:
1152 line = get_eval_line(shell)
1153 rc = rc_path or get_rc_path(shell)
1154 console.print(f"Add the following line to [bold]{rc}[/bold]:\n")
1155 click.echo(f" {line}")
1156 console.print(
1157 f"\nOr run: [bold]navegador completions {shell} --install[/bold]"
1158 )
1159
1160
1161 # ── Churn / behavioural coupling ─────────────────────────────────────────────
1162
1163
@@ -1293,12 +1294,11 @@
1293 "--read-only",
1294 "read_only",
1295 is_flag=True,
1296 default=False,
1297 help=(
1298 "Start in read-only mode: disables ingest_repo and blocks write "
1299 "operations in query_graph."
1300 ),
1301 )
1302 def mcp(db: str, read_only: bool):
1303 """Start the MCP server for AI agent integration (stdio)."""
1304 from mcp.server.stdio import stdio_server # type: ignore[import]
@@ -1337,13 +1337,11 @@
1337
1338 if as_json:
1339 click.echo(json.dumps(result.to_dict(), indent=2))
1340 return
1341
1342 console.print(
1343 f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})"
1344 )
1345 if not result.affected_nodes:
1346 console.print("[yellow]No affected nodes found.[/yellow]")
1347 return
1348
1349 table = Table(title=f"Affected nodes ({len(result.affected_nodes)})")
@@ -1350,13 +1348,11 @@
1350 table.add_column("Type", style="cyan")
1351 table.add_column("Name", style="bold")
1352 table.add_column("File")
1353 table.add_column("Line", justify="right")
1354 for node in result.affected_nodes:
1355 table.add_row(
1356 node["type"], node["name"], node["file_path"], str(node["line_start"] or "")
1357 )
1358 console.print(table)
1359
1360 if result.affected_files:
1361 console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]")
1362 for fp in result.affected_files:
@@ -1393,18 +1389,16 @@
1393
1394 if not chains:
1395 console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].")
1396 return
1397
1398 console.print(
1399 f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)"
1400 )
1401 for i, chain in enumerate(chains, 1):
1402 steps = chain.to_list()
1403 path_str = " → ".join(
1404 [steps[0]["caller"]] + [s["callee"] for s in steps]
1405 ) if steps else name
1406 console.print(f" {i}. {path_str}")
1407
1408
1409 # ── ANALYSIS: dead code ───────────────────────────────────────────────────────
1410
@@ -1433,11 +1427,13 @@
1433 f"{summary['unreachable_classes']} dead classes, "
1434 f"{summary['orphan_files']} orphan files"
1435 )
1436
1437 if report.unreachable_functions:
1438 fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})")
 
 
1439 fn_table.add_column("Type", style="cyan")
1440 fn_table.add_column("Name", style="bold")
1441 fn_table.add_column("File")
1442 fn_table.add_column("Line", justify="right")
1443 for fn in report.unreachable_functions:
@@ -1507,14 +1503,14 @@
1507 # ── ANALYSIS: cycles ──────────────────────────────────────────────────────────
1508
1509
1510 @main.command()
1511 @DB_OPTION
1512 @click.option("--imports", "check_imports", is_flag=True, default=False,
1513 help="Check import cycles only.")
1514 @click.option("--calls", "check_calls", is_flag=True, default=False,
1515 help="Check call cycles only.")
1516 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1517 def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool):
1518 """Detect circular dependencies in import and call graphs.
1519
1520 By default checks both import cycles and call cycles.
@@ -1529,13 +1525,11 @@
1529 import_cycles = detector.detect_import_cycles() if run_imports else []
1530 call_cycles = detector.detect_call_cycles() if run_calls else []
1531
1532 if as_json:
1533 click.echo(
1534 json.dumps(
1535 {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2
1536 )
1537 )
1538 return
1539
1540 if not import_cycles and not call_cycles:
1541 console.print("[green]No circular dependencies found.[/green]")
@@ -1816,20 +1810,26 @@
1816 def pm():
1817 """Ingest project management tickets (GitHub Issues, Linear, Jira)."""
1818
1819
1820 @pm.command("ingest")
1821 @click.option("--github", "github_repo", default="", metavar="OWNER/REPO",
1822 help="GitHub repository in owner/repo format.")
1823 @click.option("--token", default="", envvar="GITHUB_TOKEN",
1824 help="GitHub personal access token.")
1825 @click.option("--state", default="open",
1826 type=click.Choice(["open", "closed", "all"]),
1827 show_default=True,
1828 help="GitHub issue state filter.")
1829 @click.option("--limit", default=100, show_default=True,
1830 help="Maximum number of issues to fetch.")
 
 
 
 
 
 
1831 @DB_OPTION
1832 @click.option("--json", "as_json", is_flag=True)
1833 def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool):
1834 """Ingest tickets from a PM tool into the knowledge graph.
1835
@@ -1838,11 +1838,13 @@
1838 navegador pm ingest --github owner/repo
1839 navegador pm ingest --github owner/repo --token ghp_...
1840 navegador pm ingest --github owner/repo --state all --limit 200
1841 """
1842 if not github_repo:
1843 raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).")
 
 
1844
1845 from navegador.pm import TicketIngester
1846
1847 ing = TicketIngester(_get_store(db))
1848 stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit)
@@ -1867,11 +1869,12 @@
1867
1868
1869 @deps.command("ingest")
1870 @click.argument("path", type=click.Path(exists=True))
1871 @click.option(
1872 "--type", "dep_type",
 
1873 type=click.Choice(["auto", "npm", "pip", "cargo"]),
1874 default="auto",
1875 show_default=True,
1876 help="Manifest type. auto detects from filename.",
1877 )
@@ -1922,12 +1925,11 @@
1922
1923 if as_json:
1924 click.echo(json.dumps(stats, indent=2))
1925 else:
1926 console.print(
1927 f"[green]Dependencies ingested[/green] ({dep_type}): "
1928 f"{stats['packages']} packages"
1929 )
1930
1931
1932 # ── Submodules: ingest parent + submodules (#61) ──────────────────────────────
1933
@@ -2029,13 +2031,11 @@
2029 raise click.UsageError("Provide at least one NAME=PATH repo.")
2030
2031 wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode))
2032 for repo_spec in repos:
2033 if "=" not in repo_spec:
2034 raise click.UsageError(
2035 f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format."
2036 )
2037 name, path = repo_spec.split("=", 1)
2038 wm.add_repo(name.strip(), path.strip())
2039
2040 stats = wm.ingest_all(clear=clear)
2041
@@ -2096,11 +2096,13 @@
2096 from navegador.intelligence.search import SemanticSearch
2097 from navegador.llm import auto_provider, get_provider
2098
2099 store = _get_store(db)
2100 provider = (
2101 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
 
 
2102 )
2103 ss = SemanticSearch(store, provider)
2104
2105 if do_index:
2106 n = ss.index()
@@ -2220,11 +2222,13 @@
2220 from navegador.intelligence.nlp import NLPEngine
2221 from navegador.llm import auto_provider, get_provider
2222
2223 store = _get_store(db)
2224 provider = (
2225 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
 
 
2226 )
2227 engine = NLPEngine(store, provider)
2228
2229 with console.status("[bold]Thinking...[/bold]"):
2230 answer = engine.natural_query(question)
@@ -2244,13 +2248,11 @@
2244 default="",
2245 help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2246 )
2247 @click.option("--model", "llm_model", default="", help="LLM model name.")
2248 @click.option("--file", "file_path", default="", help="Narrow to a specific file.")
2249 def generate_docs_cmd(
2250 name: str, db: str, llm_provider: str, llm_model: str, file_path: str
2251 ):
2252 """Generate LLM-powered documentation for a named symbol.
2253
2254 \b
2255 Examples:
2256 navegador generate-docs authenticate_user
@@ -2259,11 +2261,13 @@
2259 from navegador.intelligence.nlp import NLPEngine
2260 from navegador.llm import auto_provider, get_provider
2261
2262 store = _get_store(db)
2263 provider = (
2264 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
 
 
2265 )
2266 engine = NLPEngine(store, provider)
2267
2268 with console.status("[bold]Generating docs...[/bold]"):
2269 docs = engine.generate_docs(name, file_path=file_path)
@@ -2284,13 +2288,11 @@
2284 default="",
2285 help="LLM provider (anthropic, openai, ollama). Template mode if omitted.",
2286 )
2287 @click.option("--model", "llm_model", default="", help="LLM model name.")
2288 @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).")
2289 def docs(
2290 target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool
2291 ):
2292 """Generate markdown documentation from the graph.
2293
2294 TARGET can be a file path or a module name (dotted or partial).
2295 Use --project to generate full project docs instead.
2296
2297
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -142,12 +142,21 @@
142 @click.option(
143 "--monorepo",
144 is_flag=True,
145 help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).",
146 )
147 def ingest(
148 repo_path: str,
149 db: str,
150 clear: bool,
151 incremental: bool,
152 watch: bool,
153 interval: float,
154 as_json: bool,
155 redact: bool,
156 monorepo: bool,
157 ):
158 """Ingest a repository's code into the graph (AST + call graph)."""
159 if monorepo:
160 from navegador.monorepo import MonorepoIngester
161
162 store = _get_store(db)
@@ -177,13 +186,11 @@
186
187 def _on_cycle(stats):
188 changed = stats["files"]
189 skipped = stats["skipped"]
190 if changed:
191 console.print(f" [green]{changed} changed[/green], {skipped} unchanged")
 
 
192 return True # keep watching
193
194 try:
195 ingester.watch(repo_path, interval=interval, callback=_on_cycle)
196 except KeyboardInterrupt:
@@ -727,12 +734,14 @@
734 def migrate(db: str, check: bool):
735 """Apply pending schema migrations to the graph."""
736 from navegador.graph.migrations import (
737 CURRENT_SCHEMA_VERSION,
738 get_schema_version,
 
739 needs_migration,
740 )
741 from navegador.graph.migrations import (
742 migrate as do_migrate,
743 )
744
745 store = _get_store(db)
746
747 if check:
@@ -875,15 +884,11 @@
884 param_hint="--framework",
885 )
886 targets = {framework_name: available[framework_name]}
887 else:
888 # Auto-detect: only run enrichers whose detect() returns True.
889 targets = {name: cls for name, cls in available.items() if cls(store).detect()}
 
 
 
 
890 if not targets and not as_json:
891 console.print("[yellow]No frameworks detected in the graph.[/yellow]")
892 return
893
894 all_results: dict[str, dict] = {}
@@ -1099,13 +1104,11 @@
1104 try:
1105 store = _get_store(db)
1106 current = get_schema_version(store)
1107 data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION}
1108 if needs_migration(store):
1109 reporter.add_warning(f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}")
 
 
1110 except Exception as exc: # noqa: BLE001
1111 reporter.add_error(str(exc))
1112
1113 reporter.emit(data=data or None)
1114 sys.exit(reporter.exit_code())
@@ -1151,13 +1154,11 @@
1154 else:
1155 line = get_eval_line(shell)
1156 rc = rc_path or get_rc_path(shell)
1157 console.print(f"Add the following line to [bold]{rc}[/bold]:\n")
1158 click.echo(f" {line}")
1159 console.print(f"\nOr run: [bold]navegador completions {shell} --install[/bold]")
 
 
1160
1161
1162 # ── Churn / behavioural coupling ─────────────────────────────────────────────
1163
1164
@@ -1293,12 +1294,11 @@
1294 "--read-only",
1295 "read_only",
1296 is_flag=True,
1297 default=False,
1298 help=(
1299 "Start in read-only mode: disables ingest_repo and blocks write operations in query_graph."
 
1300 ),
1301 )
1302 def mcp(db: str, read_only: bool):
1303 """Start the MCP server for AI agent integration (stdio)."""
1304 from mcp.server.stdio import stdio_server # type: ignore[import]
@@ -1337,13 +1337,11 @@
1337
1338 if as_json:
1339 click.echo(json.dumps(result.to_dict(), indent=2))
1340 return
1341
1342 console.print(f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})")
 
 
1343 if not result.affected_nodes:
1344 console.print("[yellow]No affected nodes found.[/yellow]")
1345 return
1346
1347 table = Table(title=f"Affected nodes ({len(result.affected_nodes)})")
@@ -1350,13 +1348,11 @@
1348 table.add_column("Type", style="cyan")
1349 table.add_column("Name", style="bold")
1350 table.add_column("File")
1351 table.add_column("Line", justify="right")
1352 for node in result.affected_nodes:
1353 table.add_row(node["type"], node["name"], node["file_path"], str(node["line_start"] or ""))
 
 
1354 console.print(table)
1355
1356 if result.affected_files:
1357 console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]")
1358 for fp in result.affected_files:
@@ -1393,18 +1389,16 @@
1389
1390 if not chains:
1391 console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].")
1392 return
1393
1394 console.print(f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)")
 
 
1395 for i, chain in enumerate(chains, 1):
1396 steps = chain.to_list()
1397 path_str = (
1398 " → ".join([steps[0]["caller"]] + [s["callee"] for s in steps]) if steps else name
1399 )
1400 console.print(f" {i}. {path_str}")
1401
1402
1403 # ── ANALYSIS: dead code ───────────────────────────────────────────────────────
1404
@@ -1433,11 +1427,13 @@
1427 f"{summary['unreachable_classes']} dead classes, "
1428 f"{summary['orphan_files']} orphan files"
1429 )
1430
1431 if report.unreachable_functions:
1432 fn_table = Table(
1433 title=f"Unreachable functions/methods ({len(report.unreachable_functions)})"
1434 )
1435 fn_table.add_column("Type", style="cyan")
1436 fn_table.add_column("Name", style="bold")
1437 fn_table.add_column("File")
1438 fn_table.add_column("Line", justify="right")
1439 for fn in report.unreachable_functions:
@@ -1507,14 +1503,14 @@
1503 # ── ANALYSIS: cycles ──────────────────────────────────────────────────────────
1504
1505
1506 @main.command()
1507 @DB_OPTION
1508 @click.option(
1509 "--imports", "check_imports", is_flag=True, default=False, help="Check import cycles only."
1510 )
1511 @click.option("--calls", "check_calls", is_flag=True, default=False, help="Check call cycles only.")
1512 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1513 def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool):
1514 """Detect circular dependencies in import and call graphs.
1515
1516 By default checks both import cycles and call cycles.
@@ -1529,13 +1525,11 @@
1525 import_cycles = detector.detect_import_cycles() if run_imports else []
1526 call_cycles = detector.detect_call_cycles() if run_calls else []
1527
1528 if as_json:
1529 click.echo(
1530 json.dumps({"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2)
 
 
1531 )
1532 return
1533
1534 if not import_cycles and not call_cycles:
1535 console.print("[green]No circular dependencies found.[/green]")
@@ -1816,20 +1810,26 @@
1810 def pm():
1811 """Ingest project management tickets (GitHub Issues, Linear, Jira)."""
1812
1813
1814 @pm.command("ingest")
1815 @click.option(
1816 "--github",
1817 "github_repo",
1818 default="",
1819 metavar="OWNER/REPO",
1820 help="GitHub repository in owner/repo format.",
1821 )
1822 @click.option("--token", default="", envvar="GITHUB_TOKEN", help="GitHub personal access token.")
1823 @click.option(
1824 "--state",
1825 default="open",
1826 type=click.Choice(["open", "closed", "all"]),
1827 show_default=True,
1828 help="GitHub issue state filter.",
1829 )
1830 @click.option("--limit", default=100, show_default=True, help="Maximum number of issues to fetch.")
1831 @DB_OPTION
1832 @click.option("--json", "as_json", is_flag=True)
1833 def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool):
1834 """Ingest tickets from a PM tool into the knowledge graph.
1835
@@ -1838,11 +1838,13 @@
1838 navegador pm ingest --github owner/repo
1839 navegador pm ingest --github owner/repo --token ghp_...
1840 navegador pm ingest --github owner/repo --state all --limit 200
1841 """
1842 if not github_repo:
1843 raise click.UsageError(
1844 "Provide --github <owner/repo> (more backends coming in a future release)."
1845 )
1846
1847 from navegador.pm import TicketIngester
1848
1849 ing = TicketIngester(_get_store(db))
1850 stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit)
@@ -1867,11 +1869,12 @@
1869
1870
1871 @deps.command("ingest")
1872 @click.argument("path", type=click.Path(exists=True))
1873 @click.option(
1874 "--type",
1875 "dep_type",
1876 type=click.Choice(["auto", "npm", "pip", "cargo"]),
1877 default="auto",
1878 show_default=True,
1879 help="Manifest type. auto detects from filename.",
1880 )
@@ -1922,12 +1925,11 @@
1925
1926 if as_json:
1927 click.echo(json.dumps(stats, indent=2))
1928 else:
1929 console.print(
1930 f"[green]Dependencies ingested[/green] ({dep_type}): {stats['packages']} packages"
 
1931 )
1932
1933
1934 # ── Submodules: ingest parent + submodules (#61) ──────────────────────────────
1935
@@ -2029,13 +2031,11 @@
2031 raise click.UsageError("Provide at least one NAME=PATH repo.")
2032
2033 wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode))
2034 for repo_spec in repos:
2035 if "=" not in repo_spec:
2036 raise click.UsageError(f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format.")
 
 
2037 name, path = repo_spec.split("=", 1)
2038 wm.add_repo(name.strip(), path.strip())
2039
2040 stats = wm.ingest_all(clear=clear)
2041
@@ -2096,11 +2096,13 @@
2096 from navegador.intelligence.search import SemanticSearch
2097 from navegador.llm import auto_provider, get_provider
2098
2099 store = _get_store(db)
2100 provider = (
2101 get_provider(llm_provider, model=llm_model)
2102 if llm_provider
2103 else auto_provider(model=llm_model)
2104 )
2105 ss = SemanticSearch(store, provider)
2106
2107 if do_index:
2108 n = ss.index()
@@ -2220,11 +2222,13 @@
2222 from navegador.intelligence.nlp import NLPEngine
2223 from navegador.llm import auto_provider, get_provider
2224
2225 store = _get_store(db)
2226 provider = (
2227 get_provider(llm_provider, model=llm_model)
2228 if llm_provider
2229 else auto_provider(model=llm_model)
2230 )
2231 engine = NLPEngine(store, provider)
2232
2233 with console.status("[bold]Thinking...[/bold]"):
2234 answer = engine.natural_query(question)
@@ -2244,13 +2248,11 @@
2248 default="",
2249 help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2250 )
2251 @click.option("--model", "llm_model", default="", help="LLM model name.")
2252 @click.option("--file", "file_path", default="", help="Narrow to a specific file.")
2253 def generate_docs_cmd(name: str, db: str, llm_provider: str, llm_model: str, file_path: str):
 
 
2254 """Generate LLM-powered documentation for a named symbol.
2255
2256 \b
2257 Examples:
2258 navegador generate-docs authenticate_user
@@ -2259,11 +2261,13 @@
2261 from navegador.intelligence.nlp import NLPEngine
2262 from navegador.llm import auto_provider, get_provider
2263
2264 store = _get_store(db)
2265 provider = (
2266 get_provider(llm_provider, model=llm_model)
2267 if llm_provider
2268 else auto_provider(model=llm_model)
2269 )
2270 engine = NLPEngine(store, provider)
2271
2272 with console.status("[bold]Generating docs...[/bold]"):
2273 docs = engine.generate_docs(name, file_path=file_path)
@@ -2284,13 +2288,11 @@
2288 default="",
2289 help="LLM provider (anthropic, openai, ollama). Template mode if omitted.",
2290 )
2291 @click.option("--model", "llm_model", default="", help="LLM model name.")
2292 @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).")
2293 def docs(target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool):
 
 
2294 """Generate markdown documentation from the graph.
2295
2296 TARGET can be a file path or a module name (dotted or partial).
2297 Use --project to generate full project docs instead.
2298
2299
--- navegador/cluster/core.py
+++ navegador/cluster/core.py
@@ -107,18 +107,20 @@
107107
108108
edges = []
109109
if edges_result.result_set:
110110
for row in edges_result.result_set:
111111
src, rel_type, rel, dst = row
112
- edges.append({
113
- "src_labels": list(src.labels),
114
- "src_props": dict(src.properties),
115
- "rel_type": rel_type,
116
- "rel_props": dict(rel.properties) if rel.properties else {},
117
- "dst_labels": list(dst.labels),
118
- "dst_props": dict(dst.properties),
119
- })
112
+ edges.append(
113
+ {
114
+ "src_labels": list(src.labels),
115
+ "src_props": dict(src.properties),
116
+ "rel_type": rel_type,
117
+ "rel_props": dict(rel.properties) if rel.properties else {},
118
+ "dst_labels": list(dst.labels),
119
+ "dst_props": dict(dst.properties),
120
+ }
121
+ )
120122
121123
return {"nodes": nodes, "edges": edges}
122124
123125
def _import_to_local_graph(self, data: dict[str, Any]) -> None:
124126
"""Write snapshot data into the local SQLite graph."""
@@ -167,15 +169,18 @@
167169
serialized = json.dumps(data)
168170
pipe = self._redis.pipeline()
169171
pipe.set(_SNAPSHOT_KEY, serialized)
170172
new_version = self._redis_version() + 1
171173
pipe.set(_VERSION_KEY, new_version)
172
- pipe.hset(_META_KEY, mapping={
173
- "last_push": time.time(),
174
- "node_count": len(data["nodes"]),
175
- "edge_count": len(data["edges"]),
176
- })
174
+ pipe.hset(
175
+ _META_KEY,
176
+ mapping={
177
+ "last_push": time.time(),
178
+ "node_count": len(data["nodes"]),
179
+ "edge_count": len(data["edges"]),
180
+ },
181
+ )
177182
pipe.execute()
178183
self._set_local_version(new_version)
179184
logger.info(
180185
"Pushed local graph to Redis (version %d): %d nodes, %d edges",
181186
new_version,
@@ -196,11 +201,13 @@
196201
197202
if shared_ver > local_ver:
198203
logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver)
199204
self.snapshot_to_local()
200205
else:
201
- logger.info("Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver)
206
+ logger.info(
207
+ "Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver
208
+ )
202209
self.push_to_shared()
203210
204211
def status(self) -> dict[str, Any]:
205212
"""
206213
Return a dict describing the sync state.
207214
--- navegador/cluster/core.py
+++ navegador/cluster/core.py
@@ -107,18 +107,20 @@
107
108 edges = []
109 if edges_result.result_set:
110 for row in edges_result.result_set:
111 src, rel_type, rel, dst = row
112 edges.append({
113 "src_labels": list(src.labels),
114 "src_props": dict(src.properties),
115 "rel_type": rel_type,
116 "rel_props": dict(rel.properties) if rel.properties else {},
117 "dst_labels": list(dst.labels),
118 "dst_props": dict(dst.properties),
119 })
 
 
120
121 return {"nodes": nodes, "edges": edges}
122
123 def _import_to_local_graph(self, data: dict[str, Any]) -> None:
124 """Write snapshot data into the local SQLite graph."""
@@ -167,15 +169,18 @@
167 serialized = json.dumps(data)
168 pipe = self._redis.pipeline()
169 pipe.set(_SNAPSHOT_KEY, serialized)
170 new_version = self._redis_version() + 1
171 pipe.set(_VERSION_KEY, new_version)
172 pipe.hset(_META_KEY, mapping={
173 "last_push": time.time(),
174 "node_count": len(data["nodes"]),
175 "edge_count": len(data["edges"]),
176 })
 
 
 
177 pipe.execute()
178 self._set_local_version(new_version)
179 logger.info(
180 "Pushed local graph to Redis (version %d): %d nodes, %d edges",
181 new_version,
@@ -196,11 +201,13 @@
196
197 if shared_ver > local_ver:
198 logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver)
199 self.snapshot_to_local()
200 else:
201 logger.info("Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver)
 
 
202 self.push_to_shared()
203
204 def status(self) -> dict[str, Any]:
205 """
206 Return a dict describing the sync state.
207
--- navegador/cluster/core.py
+++ navegador/cluster/core.py
@@ -107,18 +107,20 @@
107
108 edges = []
109 if edges_result.result_set:
110 for row in edges_result.result_set:
111 src, rel_type, rel, dst = row
112 edges.append(
113 {
114 "src_labels": list(src.labels),
115 "src_props": dict(src.properties),
116 "rel_type": rel_type,
117 "rel_props": dict(rel.properties) if rel.properties else {},
118 "dst_labels": list(dst.labels),
119 "dst_props": dict(dst.properties),
120 }
121 )
122
123 return {"nodes": nodes, "edges": edges}
124
125 def _import_to_local_graph(self, data: dict[str, Any]) -> None:
126 """Write snapshot data into the local SQLite graph."""
@@ -167,15 +169,18 @@
169 serialized = json.dumps(data)
170 pipe = self._redis.pipeline()
171 pipe.set(_SNAPSHOT_KEY, serialized)
172 new_version = self._redis_version() + 1
173 pipe.set(_VERSION_KEY, new_version)
174 pipe.hset(
175 _META_KEY,
176 mapping={
177 "last_push": time.time(),
178 "node_count": len(data["nodes"]),
179 "edge_count": len(data["edges"]),
180 },
181 )
182 pipe.execute()
183 self._set_local_version(new_version)
184 logger.info(
185 "Pushed local graph to Redis (version %d): %d nodes, %d edges",
186 new_version,
@@ -196,11 +201,13 @@
201
202 if shared_ver > local_ver:
203 logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver)
204 self.snapshot_to_local()
205 else:
206 logger.info(
207 "Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver
208 )
209 self.push_to_shared()
210
211 def status(self) -> dict[str, Any]:
212 """
213 Return a dict describing the sync state.
214
--- navegador/cluster/fossil_live.py
+++ navegador/cluster/fossil_live.py
@@ -7,13 +7,11 @@
77
SQLite-backed.
88
"""
99
1010
from __future__ import annotations
1111
12
-import json
1312
import logging
14
-import time
1513
from pathlib import Path
1614
from typing import TYPE_CHECKING, Any
1715
1816
if TYPE_CHECKING:
1917
from navegador.graph.store import GraphStore
@@ -71,10 +69,11 @@
7169
client = store._client # type: ignore[attr-defined]
7270
for attr in ("_db", "connection", "_connection", "db"):
7371
conn = getattr(client, attr, None)
7472
if conn is not None:
7573
import sqlite3
74
+
7675
if isinstance(conn, sqlite3.Connection):
7776
return conn
7877
except Exception:
7978
pass
8079
return None
@@ -94,13 +93,11 @@
9493
if self._attached:
9594
return
9695
9796
native_conn = self._extract_sqlite_conn(store)
9897
if native_conn is not None:
99
- native_conn.execute(
100
- f"ATTACH DATABASE ? AS fossil", (str(self._fossil_path),)
101
- )
98
+ native_conn.execute("ATTACH DATABASE ? AS fossil", (str(self._fossil_path),))
10299
self._conn = native_conn
103100
self._attached = True
104101
logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path)
105102
else:
106103
logger.warning(
@@ -139,20 +136,22 @@
139136
result = []
140137
for row in rows:
141138
if hasattr(row, "keys"):
142139
result.append(dict(row))
143140
else:
144
- result.append({
145
- "type": row[0],
146
- "mtime": row[1],
147
- "objid": row[2],
148
- "uid": row[3],
149
- "user": row[4],
150
- "euser": row[5],
151
- "comment": row[6],
152
- "ecomment": row[7],
153
- })
141
+ result.append(
142
+ {
143
+ "type": row[0],
144
+ "mtime": row[1],
145
+ "objid": row[2],
146
+ "uid": row[3],
147
+ "user": row[4],
148
+ "euser": row[5],
149
+ "comment": row[6],
150
+ "ecomment": row[7],
151
+ }
152
+ )
154153
return result
155154
156155
def query_tickets(self) -> list[dict]:
157156
"""
158157
Query Fossil tickets.
@@ -232,9 +231,7 @@
232231
f"SET {prop_str}",
233232
props,
234233
)
235234
ticket_count += 1
236235
237
- logger.info(
238
- "Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count
239
- )
236
+ logger.info("Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count)
240237
return {"commits": commit_count, "tickets": ticket_count}
241238
--- navegador/cluster/fossil_live.py
+++ navegador/cluster/fossil_live.py
@@ -7,13 +7,11 @@
7 SQLite-backed.
8 """
9
10 from __future__ import annotations
11
12 import json
13 import logging
14 import time
15 from pathlib import Path
16 from typing import TYPE_CHECKING, Any
17
18 if TYPE_CHECKING:
19 from navegador.graph.store import GraphStore
@@ -71,10 +69,11 @@
71 client = store._client # type: ignore[attr-defined]
72 for attr in ("_db", "connection", "_connection", "db"):
73 conn = getattr(client, attr, None)
74 if conn is not None:
75 import sqlite3
 
76 if isinstance(conn, sqlite3.Connection):
77 return conn
78 except Exception:
79 pass
80 return None
@@ -94,13 +93,11 @@
94 if self._attached:
95 return
96
97 native_conn = self._extract_sqlite_conn(store)
98 if native_conn is not None:
99 native_conn.execute(
100 f"ATTACH DATABASE ? AS fossil", (str(self._fossil_path),)
101 )
102 self._conn = native_conn
103 self._attached = True
104 logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path)
105 else:
106 logger.warning(
@@ -139,20 +136,22 @@
139 result = []
140 for row in rows:
141 if hasattr(row, "keys"):
142 result.append(dict(row))
143 else:
144 result.append({
145 "type": row[0],
146 "mtime": row[1],
147 "objid": row[2],
148 "uid": row[3],
149 "user": row[4],
150 "euser": row[5],
151 "comment": row[6],
152 "ecomment": row[7],
153 })
 
 
154 return result
155
156 def query_tickets(self) -> list[dict]:
157 """
158 Query Fossil tickets.
@@ -232,9 +231,7 @@
232 f"SET {prop_str}",
233 props,
234 )
235 ticket_count += 1
236
237 logger.info(
238 "Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count
239 )
240 return {"commits": commit_count, "tickets": ticket_count}
241
--- navegador/cluster/fossil_live.py
+++ navegador/cluster/fossil_live.py
@@ -7,13 +7,11 @@
7 SQLite-backed.
8 """
9
10 from __future__ import annotations
11
 
12 import logging
 
13 from pathlib import Path
14 from typing import TYPE_CHECKING, Any
15
16 if TYPE_CHECKING:
17 from navegador.graph.store import GraphStore
@@ -71,10 +69,11 @@
69 client = store._client # type: ignore[attr-defined]
70 for attr in ("_db", "connection", "_connection", "db"):
71 conn = getattr(client, attr, None)
72 if conn is not None:
73 import sqlite3
74
75 if isinstance(conn, sqlite3.Connection):
76 return conn
77 except Exception:
78 pass
79 return None
@@ -94,13 +93,11 @@
93 if self._attached:
94 return
95
96 native_conn = self._extract_sqlite_conn(store)
97 if native_conn is not None:
98 native_conn.execute("ATTACH DATABASE ? AS fossil", (str(self._fossil_path),))
 
 
99 self._conn = native_conn
100 self._attached = True
101 logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path)
102 else:
103 logger.warning(
@@ -139,20 +136,22 @@
136 result = []
137 for row in rows:
138 if hasattr(row, "keys"):
139 result.append(dict(row))
140 else:
141 result.append(
142 {
143 "type": row[0],
144 "mtime": row[1],
145 "objid": row[2],
146 "uid": row[3],
147 "user": row[4],
148 "euser": row[5],
149 "comment": row[6],
150 "ecomment": row[7],
151 }
152 )
153 return result
154
155 def query_tickets(self) -> list[dict]:
156 """
157 Query Fossil tickets.
@@ -232,9 +231,7 @@
231 f"SET {prop_str}",
232 props,
233 )
234 ticket_count += 1
235
236 logger.info("Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count)
 
 
237 return {"commits": commit_count, "tickets": ticket_count}
238
--- navegador/cluster/locking.py
+++ navegador/cluster/locking.py
@@ -8,11 +8,10 @@
88
from __future__ import annotations
99
1010
import logging
1111
import time
1212
import uuid
13
-from contextlib import contextmanager
1413
from typing import Any
1514
1615
logger = logging.getLogger(__name__)
1716
1817
_LOCK_PREFIX = "navegador:lock:"
@@ -127,12 +126,10 @@
127126
128127
def __enter__(self) -> "DistributedLock":
129128
deadline = time.monotonic() + self._timeout
130129
acquired = self.acquire(blocking=True, deadline=deadline)
131130
if not acquired:
132
- raise LockTimeout(
133
- f"Could not acquire lock '{self._name}' within {self._timeout}s"
134
- )
131
+ raise LockTimeout(f"Could not acquire lock '{self._name}' within {self._timeout}s")
135132
return self
136133
137134
def __exit__(self, *_: object) -> None:
138135
self.release()
139136
--- navegador/cluster/locking.py
+++ navegador/cluster/locking.py
@@ -8,11 +8,10 @@
8 from __future__ import annotations
9
10 import logging
11 import time
12 import uuid
13 from contextlib import contextmanager
14 from typing import Any
15
16 logger = logging.getLogger(__name__)
17
18 _LOCK_PREFIX = "navegador:lock:"
@@ -127,12 +126,10 @@
127
128 def __enter__(self) -> "DistributedLock":
129 deadline = time.monotonic() + self._timeout
130 acquired = self.acquire(blocking=True, deadline=deadline)
131 if not acquired:
132 raise LockTimeout(
133 f"Could not acquire lock '{self._name}' within {self._timeout}s"
134 )
135 return self
136
137 def __exit__(self, *_: object) -> None:
138 self.release()
139
--- navegador/cluster/locking.py
+++ navegador/cluster/locking.py
@@ -8,11 +8,10 @@
8 from __future__ import annotations
9
10 import logging
11 import time
12 import uuid
 
13 from typing import Any
14
15 logger = logging.getLogger(__name__)
16
17 _LOCK_PREFIX = "navegador:lock:"
@@ -127,12 +126,10 @@
126
127 def __enter__(self) -> "DistributedLock":
128 deadline = time.monotonic() + self._timeout
129 acquired = self.acquire(blocking=True, deadline=deadline)
130 if not acquired:
131 raise LockTimeout(f"Could not acquire lock '{self._name}' within {self._timeout}s")
 
 
132 return self
133
134 def __exit__(self, *_: object) -> None:
135 self.release()
136
--- navegador/cluster/messaging.py
+++ navegador/cluster/messaging.py
@@ -11,11 +11,11 @@
1111
1212
import json
1313
import logging
1414
import time
1515
import uuid
16
-from dataclasses import asdict, dataclass, field
16
+from dataclasses import asdict, dataclass
1717
from typing import Any
1818
1919
logger = logging.getLogger(__name__)
2020
2121
_QUEUE_PREFIX = "navegador:msg:queue:"
2222
--- navegador/cluster/messaging.py
+++ navegador/cluster/messaging.py
@@ -11,11 +11,11 @@
11
12 import json
13 import logging
14 import time
15 import uuid
16 from dataclasses import asdict, dataclass, field
17 from typing import Any
18
19 logger = logging.getLogger(__name__)
20
21 _QUEUE_PREFIX = "navegador:msg:queue:"
22
--- navegador/cluster/messaging.py
+++ navegador/cluster/messaging.py
@@ -11,11 +11,11 @@
11
12 import json
13 import logging
14 import time
15 import uuid
16 from dataclasses import asdict, dataclass
17 from typing import Any
18
19 logger = logging.getLogger(__name__)
20
21 _QUEUE_PREFIX = "navegador:msg:queue:"
22
--- navegador/cluster/partitioning.py
+++ navegador/cluster/partitioning.py
@@ -19,11 +19,11 @@
1919
2020
from __future__ import annotations
2121
2222
import logging
2323
import math
24
-from dataclasses import dataclass, field
24
+from dataclasses import dataclass
2525
from typing import Any
2626
2727
logger = logging.getLogger(__name__)
2828
2929
@@ -63,12 +63,11 @@
6363
# ── Internal ──────────────────────────────────────────────────────────────
6464
6565
def _get_all_file_paths(self) -> list[str]:
6666
"""Retrieve distinct file paths recorded in the graph."""
6767
result = self._store.query(
68
- "MATCH (n) WHERE n.file_path IS NOT NULL "
69
- "RETURN DISTINCT n.file_path AS fp ORDER BY fp"
68
+ "MATCH (n) WHERE n.file_path IS NOT NULL RETURN DISTINCT n.file_path AS fp ORDER BY fp"
7069
)
7170
if not result.result_set:
7271
return []
7372
paths: list[str] = []
7473
for row in result.result_set:
@@ -85,11 +84,11 @@
8584
if not items:
8685
return [[] for _ in range(n)]
8786
chunk_size = math.ceil(len(items) / n)
8887
buckets = []
8988
for i in range(0, len(items), chunk_size):
90
- buckets.append(items[i: i + chunk_size])
89
+ buckets.append(items[i : i + chunk_size])
9190
# Pad with empty lists if fewer chunks than agents
9291
while len(buckets) < n:
9392
buckets.append([])
9493
return buckets[:n]
9594
9695
--- navegador/cluster/partitioning.py
+++ navegador/cluster/partitioning.py
@@ -19,11 +19,11 @@
19
20 from __future__ import annotations
21
22 import logging
23 import math
24 from dataclasses import dataclass, field
25 from typing import Any
26
27 logger = logging.getLogger(__name__)
28
29
@@ -63,12 +63,11 @@
63 # ── Internal ──────────────────────────────────────────────────────────────
64
65 def _get_all_file_paths(self) -> list[str]:
66 """Retrieve distinct file paths recorded in the graph."""
67 result = self._store.query(
68 "MATCH (n) WHERE n.file_path IS NOT NULL "
69 "RETURN DISTINCT n.file_path AS fp ORDER BY fp"
70 )
71 if not result.result_set:
72 return []
73 paths: list[str] = []
74 for row in result.result_set:
@@ -85,11 +84,11 @@
85 if not items:
86 return [[] for _ in range(n)]
87 chunk_size = math.ceil(len(items) / n)
88 buckets = []
89 for i in range(0, len(items), chunk_size):
90 buckets.append(items[i: i + chunk_size])
91 # Pad with empty lists if fewer chunks than agents
92 while len(buckets) < n:
93 buckets.append([])
94 return buckets[:n]
95
96
--- navegador/cluster/partitioning.py
+++ navegador/cluster/partitioning.py
@@ -19,11 +19,11 @@
19
20 from __future__ import annotations
21
22 import logging
23 import math
24 from dataclasses import dataclass
25 from typing import Any
26
27 logger = logging.getLogger(__name__)
28
29
@@ -63,12 +63,11 @@
63 # ── Internal ──────────────────────────────────────────────────────────────
64
65 def _get_all_file_paths(self) -> list[str]:
66 """Retrieve distinct file paths recorded in the graph."""
67 result = self._store.query(
68 "MATCH (n) WHERE n.file_path IS NOT NULL RETURN DISTINCT n.file_path AS fp ORDER BY fp"
 
69 )
70 if not result.result_set:
71 return []
72 paths: list[str] = []
73 for row in result.result_set:
@@ -85,11 +84,11 @@
84 if not items:
85 return [[] for _ in range(n)]
86 chunk_size = math.ceil(len(items) / n)
87 buckets = []
88 for i in range(0, len(items), chunk_size):
89 buckets.append(items[i : i + chunk_size])
90 # Pad with empty lists if fewer chunks than agents
91 while len(buckets) < n:
92 buckets.append([])
93 return buckets[:n]
94
95
--- navegador/cluster/pubsub.py
+++ navegador/cluster/pubsub.py
@@ -92,14 +92,16 @@
9292
-------
9393
int
9494
Number of clients that received the message.
9595
"""
9696
channel = _channel_name(event_type)
97
- payload = json.dumps({
98
- "event_type": event_type.value if isinstance(event_type, EventType) else event_type,
99
- "data": data,
100
- })
97
+ payload = json.dumps(
98
+ {
99
+ "event_type": event_type.value if isinstance(event_type, EventType) else event_type,
100
+ "data": data,
101
+ }
102
+ )
101103
result = self._redis.publish(channel, payload)
102104
logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result)
103105
return result
104106
105107
def subscribe(
106108
--- navegador/cluster/pubsub.py
+++ navegador/cluster/pubsub.py
@@ -92,14 +92,16 @@
92 -------
93 int
94 Number of clients that received the message.
95 """
96 channel = _channel_name(event_type)
97 payload = json.dumps({
98 "event_type": event_type.value if isinstance(event_type, EventType) else event_type,
99 "data": data,
100 })
 
 
101 result = self._redis.publish(channel, payload)
102 logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result)
103 return result
104
105 def subscribe(
106
--- navegador/cluster/pubsub.py
+++ navegador/cluster/pubsub.py
@@ -92,14 +92,16 @@
92 -------
93 int
94 Number of clients that received the message.
95 """
96 channel = _channel_name(event_type)
97 payload = json.dumps(
98 {
99 "event_type": event_type.value if isinstance(event_type, EventType) else event_type,
100 "data": data,
101 }
102 )
103 result = self._redis.publish(channel, payload)
104 logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result)
105 return result
106
107 def subscribe(
108
--- navegador/cluster/sessions.py
+++ navegador/cluster/sessions.py
@@ -27,11 +27,11 @@
2727
import uuid
2828
from typing import Any
2929
3030
logger = logging.getLogger(__name__)
3131
32
-_SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON
32
+_SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON
3333
_SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs
3434
3535
3636
def _make_session_id() -> str:
3737
return str(uuid.uuid4())
3838
--- navegador/cluster/sessions.py
+++ navegador/cluster/sessions.py
@@ -27,11 +27,11 @@
27 import uuid
28 from typing import Any
29
30 logger = logging.getLogger(__name__)
31
32 _SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON
33 _SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs
34
35
36 def _make_session_id() -> str:
37 return str(uuid.uuid4())
38
--- navegador/cluster/sessions.py
+++ navegador/cluster/sessions.py
@@ -27,11 +27,11 @@
27 import uuid
28 from typing import Any
29
30 logger = logging.getLogger(__name__)
31
32 _SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON
33 _SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs
34
35
36 def _make_session_id() -> str:
37 return str(uuid.uuid4())
38
--- navegador/cluster/taskqueue.py
+++ navegador/cluster/taskqueue.py
@@ -25,18 +25,18 @@
2525
2626
import json
2727
import logging
2828
import time
2929
import uuid
30
-from dataclasses import asdict, dataclass, field
30
+from dataclasses import dataclass, field
3131
from enum import Enum
3232
from typing import Any
3333
3434
logger = logging.getLogger(__name__)
3535
36
-_QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP)
37
-_TASK_KEY_PREFIX = "navegador:task:" # Hash per task
36
+_QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP)
37
+_TASK_KEY_PREFIX = "navegador:task:" # Hash per task
3838
_INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs
3939
4040
4141
class TaskStatus(str, Enum):
4242
PENDING = "pending"
@@ -162,15 +162,18 @@
162162
return None
163163
164164
task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw
165165
now = time.time()
166166
pipe = self._redis.pipeline()
167
- pipe.hset(_task_key(task_id), mapping={
168
- "status": TaskStatus.IN_PROGRESS.value,
169
- "agent_id": agent_id,
170
- "updated_at": now,
171
- })
167
+ pipe.hset(
168
+ _task_key(task_id),
169
+ mapping={
170
+ "status": TaskStatus.IN_PROGRESS.value,
171
+ "agent_id": agent_id,
172
+ "updated_at": now,
173
+ },
174
+ )
172175
pipe.sadd(_INPROGRESS_KEY, task_id)
173176
pipe.execute()
174177
175178
raw = self._redis.hgetall(_task_key(task_id))
176179
task = Task.from_dict(raw)
@@ -179,27 +182,33 @@
179182
180183
def complete(self, task_id: str, result: Any = None) -> None:
181184
"""Mark a task as successfully completed."""
182185
result_encoded = json.dumps(result) if result is not None else ""
183186
pipe = self._redis.pipeline()
184
- pipe.hset(_task_key(task_id), mapping={
185
- "status": TaskStatus.DONE.value,
186
- "result": result_encoded,
187
- "updated_at": time.time(),
188
- })
187
+ pipe.hset(
188
+ _task_key(task_id),
189
+ mapping={
190
+ "status": TaskStatus.DONE.value,
191
+ "result": result_encoded,
192
+ "updated_at": time.time(),
193
+ },
194
+ )
189195
pipe.srem(_INPROGRESS_KEY, task_id)
190196
pipe.execute()
191197
logger.debug("Task %s completed", task_id)
192198
193199
def fail(self, task_id: str, error: str) -> None:
194200
"""Mark a task as failed with an error message."""
195201
pipe = self._redis.pipeline()
196
- pipe.hset(_task_key(task_id), mapping={
197
- "status": TaskStatus.FAILED.value,
198
- "error": error,
199
- "updated_at": time.time(),
200
- })
202
+ pipe.hset(
203
+ _task_key(task_id),
204
+ mapping={
205
+ "status": TaskStatus.FAILED.value,
206
+ "error": error,
207
+ "updated_at": time.time(),
208
+ },
209
+ )
201210
pipe.srem(_INPROGRESS_KEY, task_id)
202211
pipe.execute()
203212
logger.debug("Task %s failed: %s", task_id, error)
204213
205214
def status(self, task_id: str) -> dict[str, Any]:
206215
--- navegador/cluster/taskqueue.py
+++ navegador/cluster/taskqueue.py
@@ -25,18 +25,18 @@
25
26 import json
27 import logging
28 import time
29 import uuid
30 from dataclasses import asdict, dataclass, field
31 from enum import Enum
32 from typing import Any
33
34 logger = logging.getLogger(__name__)
35
36 _QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP)
37 _TASK_KEY_PREFIX = "navegador:task:" # Hash per task
38 _INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs
39
40
41 class TaskStatus(str, Enum):
42 PENDING = "pending"
@@ -162,15 +162,18 @@
162 return None
163
164 task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw
165 now = time.time()
166 pipe = self._redis.pipeline()
167 pipe.hset(_task_key(task_id), mapping={
168 "status": TaskStatus.IN_PROGRESS.value,
169 "agent_id": agent_id,
170 "updated_at": now,
171 })
 
 
 
172 pipe.sadd(_INPROGRESS_KEY, task_id)
173 pipe.execute()
174
175 raw = self._redis.hgetall(_task_key(task_id))
176 task = Task.from_dict(raw)
@@ -179,27 +182,33 @@
179
180 def complete(self, task_id: str, result: Any = None) -> None:
181 """Mark a task as successfully completed."""
182 result_encoded = json.dumps(result) if result is not None else ""
183 pipe = self._redis.pipeline()
184 pipe.hset(_task_key(task_id), mapping={
185 "status": TaskStatus.DONE.value,
186 "result": result_encoded,
187 "updated_at": time.time(),
188 })
 
 
 
189 pipe.srem(_INPROGRESS_KEY, task_id)
190 pipe.execute()
191 logger.debug("Task %s completed", task_id)
192
193 def fail(self, task_id: str, error: str) -> None:
194 """Mark a task as failed with an error message."""
195 pipe = self._redis.pipeline()
196 pipe.hset(_task_key(task_id), mapping={
197 "status": TaskStatus.FAILED.value,
198 "error": error,
199 "updated_at": time.time(),
200 })
 
 
 
201 pipe.srem(_INPROGRESS_KEY, task_id)
202 pipe.execute()
203 logger.debug("Task %s failed: %s", task_id, error)
204
205 def status(self, task_id: str) -> dict[str, Any]:
206
--- navegador/cluster/taskqueue.py
+++ navegador/cluster/taskqueue.py
@@ -25,18 +25,18 @@
25
26 import json
27 import logging
28 import time
29 import uuid
30 from dataclasses import dataclass, field
31 from enum import Enum
32 from typing import Any
33
34 logger = logging.getLogger(__name__)
35
36 _QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP)
37 _TASK_KEY_PREFIX = "navegador:task:" # Hash per task
38 _INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs
39
40
41 class TaskStatus(str, Enum):
42 PENDING = "pending"
@@ -162,15 +162,18 @@
162 return None
163
164 task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw
165 now = time.time()
166 pipe = self._redis.pipeline()
167 pipe.hset(
168 _task_key(task_id),
169 mapping={
170 "status": TaskStatus.IN_PROGRESS.value,
171 "agent_id": agent_id,
172 "updated_at": now,
173 },
174 )
175 pipe.sadd(_INPROGRESS_KEY, task_id)
176 pipe.execute()
177
178 raw = self._redis.hgetall(_task_key(task_id))
179 task = Task.from_dict(raw)
@@ -179,27 +182,33 @@
182
183 def complete(self, task_id: str, result: Any = None) -> None:
184 """Mark a task as successfully completed."""
185 result_encoded = json.dumps(result) if result is not None else ""
186 pipe = self._redis.pipeline()
187 pipe.hset(
188 _task_key(task_id),
189 mapping={
190 "status": TaskStatus.DONE.value,
191 "result": result_encoded,
192 "updated_at": time.time(),
193 },
194 )
195 pipe.srem(_INPROGRESS_KEY, task_id)
196 pipe.execute()
197 logger.debug("Task %s completed", task_id)
198
199 def fail(self, task_id: str, error: str) -> None:
200 """Mark a task as failed with an error message."""
201 pipe = self._redis.pipeline()
202 pipe.hset(
203 _task_key(task_id),
204 mapping={
205 "status": TaskStatus.FAILED.value,
206 "error": error,
207 "updated_at": time.time(),
208 },
209 )
210 pipe.srem(_INPROGRESS_KEY, task_id)
211 pipe.execute()
212 logger.debug("Task %s failed: %s", task_id, error)
213
214 def status(self, task_id: str) -> dict[str, Any]:
215
--- navegador/codeowners.py
+++ navegador/codeowners.py
@@ -12,11 +12,10 @@
1212
"""
1313
1414
from __future__ import annotations
1515
1616
import logging
17
-import re
1817
from pathlib import Path
1918
from typing import Any
2019
2120
from navegador.graph.schema import EdgeType, NodeLabel
2221
from navegador.graph.store import GraphStore
@@ -113,13 +112,11 @@
113112
logger.info("CodeownersIngester: %s", stats)
114113
return stats
115114
116115
# ── Parsing ───────────────────────────────────────────────────────────────
117116
118
- def _parse_codeowners(
119
- self, path: Path
120
- ) -> list[tuple[str, list[str]]]:
117
+ def _parse_codeowners(self, path: Path) -> list[tuple[str, list[str]]]:
121118
"""
122119
Parse a CODEOWNERS file at *path*.
123120
124121
Returns a list of (pattern, [owner, ...]) tuples. Comment lines and
125122
blank lines are ignored.
126123
--- navegador/codeowners.py
+++ navegador/codeowners.py
@@ -12,11 +12,10 @@
12 """
13
14 from __future__ import annotations
15
16 import logging
17 import re
18 from pathlib import Path
19 from typing import Any
20
21 from navegador.graph.schema import EdgeType, NodeLabel
22 from navegador.graph.store import GraphStore
@@ -113,13 +112,11 @@
113 logger.info("CodeownersIngester: %s", stats)
114 return stats
115
116 # ── Parsing ───────────────────────────────────────────────────────────────
117
118 def _parse_codeowners(
119 self, path: Path
120 ) -> list[tuple[str, list[str]]]:
121 """
122 Parse a CODEOWNERS file at *path*.
123
124 Returns a list of (pattern, [owner, ...]) tuples. Comment lines and
125 blank lines are ignored.
126
--- navegador/codeowners.py
+++ navegador/codeowners.py
@@ -12,11 +12,10 @@
12 """
13
14 from __future__ import annotations
15
16 import logging
 
17 from pathlib import Path
18 from typing import Any
19
20 from navegador.graph.schema import EdgeType, NodeLabel
21 from navegador.graph.store import GraphStore
@@ -113,13 +112,11 @@
112 logger.info("CodeownersIngester: %s", stats)
113 return stats
114
115 # ── Parsing ───────────────────────────────────────────────────────────────
116
117 def _parse_codeowners(self, path: Path) -> list[tuple[str, list[str]]]:
 
 
118 """
119 Parse a CODEOWNERS file at *path*.
120
121 Returns a list of (pattern, [owner, ...]) tuples. Comment lines and
122 blank lines are ignored.
123
--- navegador/completions.py
+++ navegador/completions.py
@@ -45,28 +45,34 @@
4545
"""Return the eval/source line to add to the shell rc file.
4646
4747
Raises ValueError for unsupported shells.
4848
"""
4949
if shell not in SUPPORTED_SHELLS:
50
- raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
50
+ raise ValueError(
51
+ f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
52
+ )
5153
return _EVAL_LINES[shell]
5254
5355
5456
def get_rc_path(shell: str) -> str:
5557
"""Return the default rc file path (unexpanded) for *shell*.
5658
5759
Raises ValueError for unsupported shells.
5860
"""
5961
if shell not in SUPPORTED_SHELLS:
60
- raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
62
+ raise ValueError(
63
+ f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
64
+ )
6165
return _RC_PATHS[shell]
6266
6367
6468
def get_install_instruction(shell: str) -> str:
6569
"""Return a human-readable instruction for adding completions to *shell*."""
6670
if shell not in SUPPORTED_SHELLS:
67
- raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
71
+ raise ValueError(
72
+ f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
73
+ )
6874
rc = _RC_PATHS[shell]
6975
line = _EVAL_LINES[shell]
7076
return f"Add the following line to {rc}:\n\n {line}"
7177
7278
@@ -82,11 +88,13 @@
8288
8389
Raises:
8490
ValueError: For unsupported shells.
8591
"""
8692
if shell not in SUPPORTED_SHELLS:
87
- raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
93
+ raise ValueError(
94
+ f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
95
+ )
8896
8997
target = Path(rc_path or _RC_PATHS[shell]).expanduser()
9098
line = _EVAL_LINES[shell]
9199
92100
# Idempotent: don't append if the line is already present
93101
--- navegador/completions.py
+++ navegador/completions.py
@@ -45,28 +45,34 @@
45 """Return the eval/source line to add to the shell rc file.
46
47 Raises ValueError for unsupported shells.
48 """
49 if shell not in SUPPORTED_SHELLS:
50 raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
 
 
51 return _EVAL_LINES[shell]
52
53
54 def get_rc_path(shell: str) -> str:
55 """Return the default rc file path (unexpanded) for *shell*.
56
57 Raises ValueError for unsupported shells.
58 """
59 if shell not in SUPPORTED_SHELLS:
60 raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
 
 
61 return _RC_PATHS[shell]
62
63
64 def get_install_instruction(shell: str) -> str:
65 """Return a human-readable instruction for adding completions to *shell*."""
66 if shell not in SUPPORTED_SHELLS:
67 raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
 
 
68 rc = _RC_PATHS[shell]
69 line = _EVAL_LINES[shell]
70 return f"Add the following line to {rc}:\n\n {line}"
71
72
@@ -82,11 +88,13 @@
82
83 Raises:
84 ValueError: For unsupported shells.
85 """
86 if shell not in SUPPORTED_SHELLS:
87 raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}")
 
 
88
89 target = Path(rc_path or _RC_PATHS[shell]).expanduser()
90 line = _EVAL_LINES[shell]
91
92 # Idempotent: don't append if the line is already present
93
--- navegador/completions.py
+++ navegador/completions.py
@@ -45,28 +45,34 @@
45 """Return the eval/source line to add to the shell rc file.
46
47 Raises ValueError for unsupported shells.
48 """
49 if shell not in SUPPORTED_SHELLS:
50 raise ValueError(
51 f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
52 )
53 return _EVAL_LINES[shell]
54
55
56 def get_rc_path(shell: str) -> str:
57 """Return the default rc file path (unexpanded) for *shell*.
58
59 Raises ValueError for unsupported shells.
60 """
61 if shell not in SUPPORTED_SHELLS:
62 raise ValueError(
63 f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
64 )
65 return _RC_PATHS[shell]
66
67
68 def get_install_instruction(shell: str) -> str:
69 """Return a human-readable instruction for adding completions to *shell*."""
70 if shell not in SUPPORTED_SHELLS:
71 raise ValueError(
72 f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
73 )
74 rc = _RC_PATHS[shell]
75 line = _EVAL_LINES[shell]
76 return f"Add the following line to {rc}:\n\n {line}"
77
78
@@ -82,11 +88,13 @@
88
89 Raises:
90 ValueError: For unsupported shells.
91 """
92 if shell not in SUPPORTED_SHELLS:
93 raise ValueError(
94 f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}"
95 )
96
97 target = Path(rc_path or _RC_PATHS[shell]).expanduser()
98 line = _EVAL_LINES[shell]
99
100 # Idempotent: don't append if the line is already present
101
--- navegador/context/loader.py
+++ navegador/context/loader.py
@@ -339,13 +339,11 @@
339339
340340
# ── Knowledge: find owners ────────────────────────────────────────────────
341341
342342
def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]:
343343
"""Find people assigned to a named node."""
344
- result = self.store.query(
345
- queries.FIND_OWNERS, {"name": name, "file_path": file_path}
346
- )
344
+ result = self.store.query(queries.FIND_OWNERS, {"name": name, "file_path": file_path})
347345
return [
348346
ContextNode(
349347
type="Person",
350348
name=row[2],
351349
description=f"role={row[4]}, team={row[5]}",
352350
--- navegador/context/loader.py
+++ navegador/context/loader.py
@@ -339,13 +339,11 @@
339
340 # ── Knowledge: find owners ────────────────────────────────────────────────
341
342 def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]:
343 """Find people assigned to a named node."""
344 result = self.store.query(
345 queries.FIND_OWNERS, {"name": name, "file_path": file_path}
346 )
347 return [
348 ContextNode(
349 type="Person",
350 name=row[2],
351 description=f"role={row[4]}, team={row[5]}",
352
--- navegador/context/loader.py
+++ navegador/context/loader.py
@@ -339,13 +339,11 @@
339
340 # ── Knowledge: find owners ────────────────────────────────────────────────
341
342 def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]:
343 """Find people assigned to a named node."""
344 result = self.store.query(queries.FIND_OWNERS, {"name": name, "file_path": file_path})
 
 
345 return [
346 ContextNode(
347 type="Person",
348 name=row[2],
349 description=f"role={row[4]}, team={row[5]}",
350
--- navegador/diff.py
+++ navegador/diff.py
@@ -27,11 +27,10 @@
2727
from typing import Any
2828
2929
from navegador.graph.store import GraphStore
3030
from navegador.vcs import GitAdapter
3131
32
-
3332
# ── Cypher helpers ────────────────────────────────────────────────────────────
3433
3534
# All symbols (Function / Class / Method) in a given file with their line ranges
3635
_SYMBOLS_IN_FILE = """
3736
MATCH (n)
@@ -101,13 +100,11 @@
101100
else:
102101
current_new_start = int(new_info)
103102
current_new_count = 1
104103
if current_file and current_new_count > 0:
105104
end = current_new_start + max(current_new_count - 1, 0)
106
- result.setdefault(current_file, []).append(
107
- (current_new_start, end)
108
- )
105
+ result.setdefault(current_file, []).append((current_new_start, end))
109106
except (ValueError, IndexError):
110107
pass
111108
112109
return result
113110
114111
--- navegador/diff.py
+++ navegador/diff.py
@@ -27,11 +27,10 @@
27 from typing import Any
28
29 from navegador.graph.store import GraphStore
30 from navegador.vcs import GitAdapter
31
32
33 # ── Cypher helpers ────────────────────────────────────────────────────────────
34
35 # All symbols (Function / Class / Method) in a given file with their line ranges
36 _SYMBOLS_IN_FILE = """
37 MATCH (n)
@@ -101,13 +100,11 @@
101 else:
102 current_new_start = int(new_info)
103 current_new_count = 1
104 if current_file and current_new_count > 0:
105 end = current_new_start + max(current_new_count - 1, 0)
106 result.setdefault(current_file, []).append(
107 (current_new_start, end)
108 )
109 except (ValueError, IndexError):
110 pass
111
112 return result
113
114
--- navegador/diff.py
+++ navegador/diff.py
@@ -27,11 +27,10 @@
27 from typing import Any
28
29 from navegador.graph.store import GraphStore
30 from navegador.vcs import GitAdapter
31
 
32 # ── Cypher helpers ────────────────────────────────────────────────────────────
33
34 # All symbols (Function / Class / Method) in a given file with their line ranges
35 _SYMBOLS_IN_FILE = """
36 MATCH (n)
@@ -101,13 +100,11 @@
100 else:
101 current_new_start = int(new_info)
102 current_new_count = 1
103 if current_file and current_new_count > 0:
104 end = current_new_start + max(current_new_count - 1, 0)
105 result.setdefault(current_file, []).append((current_new_start, end))
 
 
106 except (ValueError, IndexError):
107 pass
108
109 return result
110
111
--- navegador/editor.py
+++ navegador/editor.py
@@ -49,12 +49,11 @@
4949
5050
Raises ValueError for unsupported editors.
5151
"""
5252
if editor not in SUPPORTED_EDITORS:
5353
raise ValueError(
54
- f"Unsupported editor {editor!r}. "
55
- f"Choose from: {', '.join(SUPPORTED_EDITORS)}"
54
+ f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}"
5655
)
5756
return _mcp_block(self.db)
5857
5958
def config_json(self, editor: str) -> str:
6059
"""Return the JSON string for *editor*'s config file."""
@@ -62,12 +61,11 @@
6261
6362
def config_path(self, editor: str) -> str:
6463
"""Return the relative config file path for *editor*."""
6564
if editor not in SUPPORTED_EDITORS:
6665
raise ValueError(
67
- f"Unsupported editor {editor!r}. "
68
- f"Choose from: {', '.join(SUPPORTED_EDITORS)}"
66
+ f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}"
6967
)
7068
return _CONFIG_PATHS[editor]
7169
7270
def write_config(self, editor: str, base_dir: str = ".") -> Path:
7371
"""Write the config file to the expected path under *base_dir*.
7472
--- navegador/editor.py
+++ navegador/editor.py
@@ -49,12 +49,11 @@
49
50 Raises ValueError for unsupported editors.
51 """
52 if editor not in SUPPORTED_EDITORS:
53 raise ValueError(
54 f"Unsupported editor {editor!r}. "
55 f"Choose from: {', '.join(SUPPORTED_EDITORS)}"
56 )
57 return _mcp_block(self.db)
58
59 def config_json(self, editor: str) -> str:
60 """Return the JSON string for *editor*'s config file."""
@@ -62,12 +61,11 @@
62
63 def config_path(self, editor: str) -> str:
64 """Return the relative config file path for *editor*."""
65 if editor not in SUPPORTED_EDITORS:
66 raise ValueError(
67 f"Unsupported editor {editor!r}. "
68 f"Choose from: {', '.join(SUPPORTED_EDITORS)}"
69 )
70 return _CONFIG_PATHS[editor]
71
72 def write_config(self, editor: str, base_dir: str = ".") -> Path:
73 """Write the config file to the expected path under *base_dir*.
74
--- navegador/editor.py
+++ navegador/editor.py
@@ -49,12 +49,11 @@
49
50 Raises ValueError for unsupported editors.
51 """
52 if editor not in SUPPORTED_EDITORS:
53 raise ValueError(
54 f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}"
 
55 )
56 return _mcp_block(self.db)
57
58 def config_json(self, editor: str) -> str:
59 """Return the JSON string for *editor*'s config file."""
@@ -62,12 +61,11 @@
61
62 def config_path(self, editor: str) -> str:
63 """Return the relative config file path for *editor*."""
64 if editor not in SUPPORTED_EDITORS:
65 raise ValueError(
66 f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}"
 
67 )
68 return _CONFIG_PATHS[editor]
69
70 def write_config(self, editor: str, base_dir: str = ".") -> Path:
71 """Write the config file to the expected path under *base_dir*.
72
--- navegador/enrichment/express.py
+++ navegador/enrichment/express.py
@@ -9,12 +9,22 @@
99
"""
1010
1111
from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher
1212
1313
# HTTP method prefixes that indicate a route definition
14
-_ROUTE_PREFIXES = ("app.get", "app.post", "app.put", "app.delete", "app.patch", "router.get",
15
- "router.post", "router.put", "router.delete", "router.patch")
14
+_ROUTE_PREFIXES = (
15
+ "app.get",
16
+ "app.post",
17
+ "app.put",
18
+ "app.delete",
19
+ "app.patch",
20
+ "router.get",
21
+ "router.post",
22
+ "router.put",
23
+ "router.delete",
24
+ "router.patch",
25
+)
1626
1727
1828
class ExpressEnricher(FrameworkEnricher):
1929
"""Enricher for Express.js codebases."""
2030
@@ -28,60 +38,72 @@
2838
2939
def enrich(self) -> EnrichmentResult:
3040
result = EnrichmentResult()
3141
3242
# ── Routes: app.<method> or router.<method> patterns ─────────────────
33
- route_rows = self.store.query(
34
- "MATCH (n) WHERE (n.name STARTS WITH 'app.get' "
35
- "OR n.name STARTS WITH 'app.post' "
36
- "OR n.name STARTS WITH 'app.put' "
37
- "OR n.name STARTS WITH 'app.delete' "
38
- "OR n.name STARTS WITH 'app.patch' "
39
- "OR n.name STARTS WITH 'router.get' "
40
- "OR n.name STARTS WITH 'router.post' "
41
- "OR n.name STARTS WITH 'router.put' "
42
- "OR n.name STARTS WITH 'router.delete' "
43
- "OR n.name STARTS WITH 'router.patch') "
44
- "AND n.file_path IS NOT NULL "
45
- "RETURN n.name, n.file_path",
46
- ).result_set or []
43
+ route_rows = (
44
+ self.store.query(
45
+ "MATCH (n) WHERE (n.name STARTS WITH 'app.get' "
46
+ "OR n.name STARTS WITH 'app.post' "
47
+ "OR n.name STARTS WITH 'app.put' "
48
+ "OR n.name STARTS WITH 'app.delete' "
49
+ "OR n.name STARTS WITH 'app.patch' "
50
+ "OR n.name STARTS WITH 'router.get' "
51
+ "OR n.name STARTS WITH 'router.post' "
52
+ "OR n.name STARTS WITH 'router.put' "
53
+ "OR n.name STARTS WITH 'router.delete' "
54
+ "OR n.name STARTS WITH 'router.patch') "
55
+ "AND n.file_path IS NOT NULL "
56
+ "RETURN n.name, n.file_path",
57
+ ).result_set
58
+ or []
59
+ )
4760
for name, file_path in route_rows:
4861
self._promote_node(name, file_path, "ExpressRoute")
4962
result.promoted += 1
5063
result.patterns_found["routes"] = len(route_rows)
5164
5265
# ── Middleware: app.use calls ─────────────────────────────────────────
53
- middleware_rows = self.store.query(
54
- "MATCH (n) WHERE (n.name STARTS WITH 'app.use' "
55
- "OR n.name STARTS WITH 'router.use') "
56
- "AND n.file_path IS NOT NULL "
57
- "RETURN n.name, n.file_path",
58
- ).result_set or []
66
+ middleware_rows = (
67
+ self.store.query(
68
+ "MATCH (n) WHERE (n.name STARTS WITH 'app.use' "
69
+ "OR n.name STARTS WITH 'router.use') "
70
+ "AND n.file_path IS NOT NULL "
71
+ "RETURN n.name, n.file_path",
72
+ ).result_set
73
+ or []
74
+ )
5975
for name, file_path in middleware_rows:
6076
self._promote_node(name, file_path, "ExpressMiddleware")
6177
result.promoted += 1
6278
result.patterns_found["middleware"] = len(middleware_rows)
6379
6480
# ── Controllers: nodes whose file_path contains /controllers/ ─────────
65
- controller_rows = self.store.query(
66
- "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' "
67
- "AND n.name IS NOT NULL "
68
- "RETURN n.name, n.file_path",
69
- ).result_set or []
81
+ controller_rows = (
82
+ self.store.query(
83
+ "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' "
84
+ "AND n.name IS NOT NULL "
85
+ "RETURN n.name, n.file_path",
86
+ ).result_set
87
+ or []
88
+ )
7089
for name, file_path in controller_rows:
7190
self._promote_node(name, file_path, "ExpressController")
7291
result.promoted += 1
7392
result.patterns_found["controllers"] = len(controller_rows)
7493
7594
# ── Routers: Router() / express.Router() instantiations ──────────────
76
- router_rows = self.store.query(
77
- "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' "
78
- "OR n.name CONTAINS 'express.Router') "
79
- "AND n.file_path IS NOT NULL "
80
- "RETURN n.name, n.file_path",
81
- ).result_set or []
95
+ router_rows = (
96
+ self.store.query(
97
+ "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' "
98
+ "OR n.name CONTAINS 'express.Router') "
99
+ "AND n.file_path IS NOT NULL "
100
+ "RETURN n.name, n.file_path",
101
+ ).result_set
102
+ or []
103
+ )
82104
for name, file_path in router_rows:
83105
self._promote_node(name, file_path, "ExpressRouter")
84106
result.promoted += 1
85107
result.patterns_found["routers"] = len(router_rows)
86108
87109
return result
88110
--- navegador/enrichment/express.py
+++ navegador/enrichment/express.py
@@ -9,12 +9,22 @@
9 """
10
11 from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher
12
13 # HTTP method prefixes that indicate a route definition
14 _ROUTE_PREFIXES = ("app.get", "app.post", "app.put", "app.delete", "app.patch", "router.get",
15 "router.post", "router.put", "router.delete", "router.patch")
 
 
 
 
 
 
 
 
 
 
16
17
18 class ExpressEnricher(FrameworkEnricher):
19 """Enricher for Express.js codebases."""
20
@@ -28,60 +38,72 @@
28
29 def enrich(self) -> EnrichmentResult:
30 result = EnrichmentResult()
31
32 # ── Routes: app.<method> or router.<method> patterns ─────────────────
33 route_rows = self.store.query(
34 "MATCH (n) WHERE (n.name STARTS WITH 'app.get' "
35 "OR n.name STARTS WITH 'app.post' "
36 "OR n.name STARTS WITH 'app.put' "
37 "OR n.name STARTS WITH 'app.delete' "
38 "OR n.name STARTS WITH 'app.patch' "
39 "OR n.name STARTS WITH 'router.get' "
40 "OR n.name STARTS WITH 'router.post' "
41 "OR n.name STARTS WITH 'router.put' "
42 "OR n.name STARTS WITH 'router.delete' "
43 "OR n.name STARTS WITH 'router.patch') "
44 "AND n.file_path IS NOT NULL "
45 "RETURN n.name, n.file_path",
46 ).result_set or []
 
 
 
47 for name, file_path in route_rows:
48 self._promote_node(name, file_path, "ExpressRoute")
49 result.promoted += 1
50 result.patterns_found["routes"] = len(route_rows)
51
52 # ── Middleware: app.use calls ─────────────────────────────────────────
53 middleware_rows = self.store.query(
54 "MATCH (n) WHERE (n.name STARTS WITH 'app.use' "
55 "OR n.name STARTS WITH 'router.use') "
56 "AND n.file_path IS NOT NULL "
57 "RETURN n.name, n.file_path",
58 ).result_set or []
 
 
 
59 for name, file_path in middleware_rows:
60 self._promote_node(name, file_path, "ExpressMiddleware")
61 result.promoted += 1
62 result.patterns_found["middleware"] = len(middleware_rows)
63
64 # ── Controllers: nodes whose file_path contains /controllers/ ─────────
65 controller_rows = self.store.query(
66 "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' "
67 "AND n.name IS NOT NULL "
68 "RETURN n.name, n.file_path",
69 ).result_set or []
 
 
 
70 for name, file_path in controller_rows:
71 self._promote_node(name, file_path, "ExpressController")
72 result.promoted += 1
73 result.patterns_found["controllers"] = len(controller_rows)
74
75 # ── Routers: Router() / express.Router() instantiations ──────────────
76 router_rows = self.store.query(
77 "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' "
78 "OR n.name CONTAINS 'express.Router') "
79 "AND n.file_path IS NOT NULL "
80 "RETURN n.name, n.file_path",
81 ).result_set or []
 
 
 
82 for name, file_path in router_rows:
83 self._promote_node(name, file_path, "ExpressRouter")
84 result.promoted += 1
85 result.patterns_found["routers"] = len(router_rows)
86
87 return result
88
--- navegador/enrichment/express.py
+++ navegador/enrichment/express.py
@@ -9,12 +9,22 @@
9 """
10
11 from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher
12
13 # HTTP method prefixes that indicate a route definition
14 _ROUTE_PREFIXES = (
15 "app.get",
16 "app.post",
17 "app.put",
18 "app.delete",
19 "app.patch",
20 "router.get",
21 "router.post",
22 "router.put",
23 "router.delete",
24 "router.patch",
25 )
26
27
28 class ExpressEnricher(FrameworkEnricher):
29 """Enricher for Express.js codebases."""
30
@@ -28,60 +38,72 @@
38
39 def enrich(self) -> EnrichmentResult:
40 result = EnrichmentResult()
41
42 # ── Routes: app.<method> or router.<method> patterns ─────────────────
43 route_rows = (
44 self.store.query(
45 "MATCH (n) WHERE (n.name STARTS WITH 'app.get' "
46 "OR n.name STARTS WITH 'app.post' "
47 "OR n.name STARTS WITH 'app.put' "
48 "OR n.name STARTS WITH 'app.delete' "
49 "OR n.name STARTS WITH 'app.patch' "
50 "OR n.name STARTS WITH 'router.get' "
51 "OR n.name STARTS WITH 'router.post' "
52 "OR n.name STARTS WITH 'router.put' "
53 "OR n.name STARTS WITH 'router.delete' "
54 "OR n.name STARTS WITH 'router.patch') "
55 "AND n.file_path IS NOT NULL "
56 "RETURN n.name, n.file_path",
57 ).result_set
58 or []
59 )
60 for name, file_path in route_rows:
61 self._promote_node(name, file_path, "ExpressRoute")
62 result.promoted += 1
63 result.patterns_found["routes"] = len(route_rows)
64
65 # ── Middleware: app.use calls ─────────────────────────────────────────
66 middleware_rows = (
67 self.store.query(
68 "MATCH (n) WHERE (n.name STARTS WITH 'app.use' "
69 "OR n.name STARTS WITH 'router.use') "
70 "AND n.file_path IS NOT NULL "
71 "RETURN n.name, n.file_path",
72 ).result_set
73 or []
74 )
75 for name, file_path in middleware_rows:
76 self._promote_node(name, file_path, "ExpressMiddleware")
77 result.promoted += 1
78 result.patterns_found["middleware"] = len(middleware_rows)
79
80 # ── Controllers: nodes whose file_path contains /controllers/ ─────────
81 controller_rows = (
82 self.store.query(
83 "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' "
84 "AND n.name IS NOT NULL "
85 "RETURN n.name, n.file_path",
86 ).result_set
87 or []
88 )
89 for name, file_path in controller_rows:
90 self._promote_node(name, file_path, "ExpressController")
91 result.promoted += 1
92 result.patterns_found["controllers"] = len(controller_rows)
93
94 # ── Routers: Router() / express.Router() instantiations ──────────────
95 router_rows = (
96 self.store.query(
97 "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' "
98 "OR n.name CONTAINS 'express.Router') "
99 "AND n.file_path IS NOT NULL "
100 "RETURN n.name, n.file_path",
101 ).result_set
102 or []
103 )
104 for name, file_path in router_rows:
105 self._promote_node(name, file_path, "ExpressRouter")
106 result.promoted += 1
107 result.patterns_found["routers"] = len(router_rows)
108
109 return result
110
--- navegador/enrichment/fastapi.py
+++ navegador/enrichment/fastapi.py
@@ -76,12 +76,11 @@
7676
)
7777
rows = result.result_set or []
7878
for row in rows:
7979
name, file_path = row[0], row[1]
8080
if name and file_path:
81
- self._promote_node(name, file_path, "Route",
82
- {"http_method": http_method})
81
+ self._promote_node(name, file_path, "Route", {"http_method": http_method})
8382
promoted += 1
8483
8584
# Strategy 2: signature / docstring heuristics (no Decorator nodes)
8685
for http_method in _HTTP_METHODS:
8786
for prop in ("signature", "docstring"):
@@ -94,12 +93,11 @@
9493
)
9594
rows = result.result_set or []
9695
for row in rows:
9796
name, file_path = row[0], row[1]
9897
if name and file_path:
99
- self._promote_node(name, file_path, "Route",
100
- {"http_method": http_method})
98
+ self._promote_node(name, file_path, "Route", {"http_method": http_method})
10199
promoted += 1
102100
103101
return promoted
104102
105103
def _enrich_dependencies(self) -> int:
106104
--- navegador/enrichment/fastapi.py
+++ navegador/enrichment/fastapi.py
@@ -76,12 +76,11 @@
76 )
77 rows = result.result_set or []
78 for row in rows:
79 name, file_path = row[0], row[1]
80 if name and file_path:
81 self._promote_node(name, file_path, "Route",
82 {"http_method": http_method})
83 promoted += 1
84
85 # Strategy 2: signature / docstring heuristics (no Decorator nodes)
86 for http_method in _HTTP_METHODS:
87 for prop in ("signature", "docstring"):
@@ -94,12 +93,11 @@
94 )
95 rows = result.result_set or []
96 for row in rows:
97 name, file_path = row[0], row[1]
98 if name and file_path:
99 self._promote_node(name, file_path, "Route",
100 {"http_method": http_method})
101 promoted += 1
102
103 return promoted
104
105 def _enrich_dependencies(self) -> int:
106
--- navegador/enrichment/fastapi.py
+++ navegador/enrichment/fastapi.py
@@ -76,12 +76,11 @@
76 )
77 rows = result.result_set or []
78 for row in rows:
79 name, file_path = row[0], row[1]
80 if name and file_path:
81 self._promote_node(name, file_path, "Route", {"http_method": http_method})
 
82 promoted += 1
83
84 # Strategy 2: signature / docstring heuristics (no Decorator nodes)
85 for http_method in _HTTP_METHODS:
86 for prop in ("signature", "docstring"):
@@ -94,12 +93,11 @@
93 )
94 rows = result.result_set or []
95 for row in rows:
96 name, file_path = row[0], row[1]
97 if name and file_path:
98 self._promote_node(name, file_path, "Route", {"http_method": http_method})
 
99 promoted += 1
100
101 return promoted
102
103 def _enrich_dependencies(self) -> int:
104
--- navegador/enrichment/react.py
+++ navegador/enrichment/react.py
@@ -25,62 +25,77 @@
2525
2626
def enrich(self) -> EnrichmentResult:
2727
result = EnrichmentResult()
2828
2929
# ── Components: functions/classes defined in .jsx or .tsx files ──────
30
- component_rows = self.store.query(
31
- "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
32
- "AND n.name IS NOT NULL "
33
- "RETURN n.name, n.file_path",
34
- ).result_set or []
30
+ component_rows = (
31
+ self.store.query(
32
+ "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
33
+ "AND n.name IS NOT NULL "
34
+ "RETURN n.name, n.file_path",
35
+ ).result_set
36
+ or []
37
+ )
3538
for name, file_path in component_rows:
3639
self._promote_node(name, file_path, "ReactComponent")
3740
result.promoted += 1
3841
result.patterns_found["components"] = len(component_rows)
3942
4043
# ── Pages: nodes whose file_path contains /pages/ ────────────────────
41
- page_rows = self.store.query(
42
- "MATCH (n) WHERE n.file_path CONTAINS '/pages/' "
43
- "AND NOT n.file_path CONTAINS '/pages/api/' "
44
- "AND n.name IS NOT NULL "
45
- "RETURN n.name, n.file_path",
46
- ).result_set or []
44
+ page_rows = (
45
+ self.store.query(
46
+ "MATCH (n) WHERE n.file_path CONTAINS '/pages/' "
47
+ "AND NOT n.file_path CONTAINS '/pages/api/' "
48
+ "AND n.name IS NOT NULL "
49
+ "RETURN n.name, n.file_path",
50
+ ).result_set
51
+ or []
52
+ )
4753
for name, file_path in page_rows:
4854
self._promote_node(name, file_path, "NextPage")
4955
result.promoted += 1
5056
result.patterns_found["pages"] = len(page_rows)
5157
5258
# ── API Routes: nodes under pages/api/ or app/api/ ───────────────────
53
- api_rows = self.store.query(
54
- "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' "
55
- "OR n.file_path CONTAINS '/app/api/') "
56
- "AND n.name IS NOT NULL "
57
- "RETURN n.name, n.file_path",
58
- ).result_set or []
59
+ api_rows = (
60
+ self.store.query(
61
+ "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' "
62
+ "OR n.file_path CONTAINS '/app/api/') "
63
+ "AND n.name IS NOT NULL "
64
+ "RETURN n.name, n.file_path",
65
+ ).result_set
66
+ or []
67
+ )
5968
for name, file_path in api_rows:
6069
self._promote_node(name, file_path, "NextApiRoute")
6170
result.promoted += 1
6271
result.patterns_found["api_routes"] = len(api_rows)
6372
6473
# ── Hooks: functions whose name starts with "use" ────────────────────
65
- hook_rows = self.store.query(
66
- "MATCH (n) WHERE n.name STARTS WITH 'use' "
67
- "AND n.name <> 'use' "
68
- "AND n.file_path IS NOT NULL "
69
- "RETURN n.name, n.file_path",
70
- ).result_set or []
74
+ hook_rows = (
75
+ self.store.query(
76
+ "MATCH (n) WHERE n.name STARTS WITH 'use' "
77
+ "AND n.name <> 'use' "
78
+ "AND n.file_path IS NOT NULL "
79
+ "RETURN n.name, n.file_path",
80
+ ).result_set
81
+ or []
82
+ )
7183
for name, file_path in hook_rows:
7284
self._promote_node(name, file_path, "ReactHook")
7385
result.promoted += 1
7486
result.patterns_found["hooks"] = len(hook_rows)
7587
7688
# ── Stores: createStore / useStore patterns ───────────────────────────
77
- store_rows = self.store.query(
78
- "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') "
79
- "AND n.file_path IS NOT NULL "
80
- "RETURN n.name, n.file_path",
81
- ).result_set or []
89
+ store_rows = (
90
+ self.store.query(
91
+ "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') "
92
+ "AND n.file_path IS NOT NULL "
93
+ "RETURN n.name, n.file_path",
94
+ ).result_set
95
+ or []
96
+ )
8297
for name, file_path in store_rows:
8398
self._promote_node(name, file_path, "ReactStore")
8499
result.promoted += 1
85100
result.patterns_found["stores"] = len(store_rows)
86101
87102
--- navegador/enrichment/react.py
+++ navegador/enrichment/react.py
@@ -25,62 +25,77 @@
25
26 def enrich(self) -> EnrichmentResult:
27 result = EnrichmentResult()
28
29 # ── Components: functions/classes defined in .jsx or .tsx files ──────
30 component_rows = self.store.query(
31 "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
32 "AND n.name IS NOT NULL "
33 "RETURN n.name, n.file_path",
34 ).result_set or []
 
 
 
35 for name, file_path in component_rows:
36 self._promote_node(name, file_path, "ReactComponent")
37 result.promoted += 1
38 result.patterns_found["components"] = len(component_rows)
39
40 # ── Pages: nodes whose file_path contains /pages/ ────────────────────
41 page_rows = self.store.query(
42 "MATCH (n) WHERE n.file_path CONTAINS '/pages/' "
43 "AND NOT n.file_path CONTAINS '/pages/api/' "
44 "AND n.name IS NOT NULL "
45 "RETURN n.name, n.file_path",
46 ).result_set or []
 
 
 
47 for name, file_path in page_rows:
48 self._promote_node(name, file_path, "NextPage")
49 result.promoted += 1
50 result.patterns_found["pages"] = len(page_rows)
51
52 # ── API Routes: nodes under pages/api/ or app/api/ ───────────────────
53 api_rows = self.store.query(
54 "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' "
55 "OR n.file_path CONTAINS '/app/api/') "
56 "AND n.name IS NOT NULL "
57 "RETURN n.name, n.file_path",
58 ).result_set or []
 
 
 
59 for name, file_path in api_rows:
60 self._promote_node(name, file_path, "NextApiRoute")
61 result.promoted += 1
62 result.patterns_found["api_routes"] = len(api_rows)
63
64 # ── Hooks: functions whose name starts with "use" ────────────────────
65 hook_rows = self.store.query(
66 "MATCH (n) WHERE n.name STARTS WITH 'use' "
67 "AND n.name <> 'use' "
68 "AND n.file_path IS NOT NULL "
69 "RETURN n.name, n.file_path",
70 ).result_set or []
 
 
 
71 for name, file_path in hook_rows:
72 self._promote_node(name, file_path, "ReactHook")
73 result.promoted += 1
74 result.patterns_found["hooks"] = len(hook_rows)
75
76 # ── Stores: createStore / useStore patterns ───────────────────────────
77 store_rows = self.store.query(
78 "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') "
79 "AND n.file_path IS NOT NULL "
80 "RETURN n.name, n.file_path",
81 ).result_set or []
 
 
 
82 for name, file_path in store_rows:
83 self._promote_node(name, file_path, "ReactStore")
84 result.promoted += 1
85 result.patterns_found["stores"] = len(store_rows)
86
87
--- navegador/enrichment/react.py
+++ navegador/enrichment/react.py
@@ -25,62 +25,77 @@
25
26 def enrich(self) -> EnrichmentResult:
27 result = EnrichmentResult()
28
29 # ── Components: functions/classes defined in .jsx or .tsx files ──────
30 component_rows = (
31 self.store.query(
32 "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
33 "AND n.name IS NOT NULL "
34 "RETURN n.name, n.file_path",
35 ).result_set
36 or []
37 )
38 for name, file_path in component_rows:
39 self._promote_node(name, file_path, "ReactComponent")
40 result.promoted += 1
41 result.patterns_found["components"] = len(component_rows)
42
43 # ── Pages: nodes whose file_path contains /pages/ ────────────────────
44 page_rows = (
45 self.store.query(
46 "MATCH (n) WHERE n.file_path CONTAINS '/pages/' "
47 "AND NOT n.file_path CONTAINS '/pages/api/' "
48 "AND n.name IS NOT NULL "
49 "RETURN n.name, n.file_path",
50 ).result_set
51 or []
52 )
53 for name, file_path in page_rows:
54 self._promote_node(name, file_path, "NextPage")
55 result.promoted += 1
56 result.patterns_found["pages"] = len(page_rows)
57
58 # ── API Routes: nodes under pages/api/ or app/api/ ───────────────────
59 api_rows = (
60 self.store.query(
61 "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' "
62 "OR n.file_path CONTAINS '/app/api/') "
63 "AND n.name IS NOT NULL "
64 "RETURN n.name, n.file_path",
65 ).result_set
66 or []
67 )
68 for name, file_path in api_rows:
69 self._promote_node(name, file_path, "NextApiRoute")
70 result.promoted += 1
71 result.patterns_found["api_routes"] = len(api_rows)
72
73 # ── Hooks: functions whose name starts with "use" ────────────────────
74 hook_rows = (
75 self.store.query(
76 "MATCH (n) WHERE n.name STARTS WITH 'use' "
77 "AND n.name <> 'use' "
78 "AND n.file_path IS NOT NULL "
79 "RETURN n.name, n.file_path",
80 ).result_set
81 or []
82 )
83 for name, file_path in hook_rows:
84 self._promote_node(name, file_path, "ReactHook")
85 result.promoted += 1
86 result.patterns_found["hooks"] = len(hook_rows)
87
88 # ── Stores: createStore / useStore patterns ───────────────────────────
89 store_rows = (
90 self.store.query(
91 "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') "
92 "AND n.file_path IS NOT NULL "
93 "RETURN n.name, n.file_path",
94 ).result_set
95 or []
96 )
97 for name, file_path in store_rows:
98 self._promote_node(name, file_path, "ReactStore")
99 result.promoted += 1
100 result.patterns_found["stores"] = len(store_rows)
101
102
--- navegador/enrichment/react_native.py
+++ navegador/enrichment/react_native.py
@@ -34,53 +34,63 @@
3434
3535
def enrich(self) -> EnrichmentResult:
3636
result = EnrichmentResult()
3737
3838
# ── Components: functions/classes in .jsx or .tsx files ──────────────
39
- component_rows = self.store.query(
40
- "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
41
- "AND n.name IS NOT NULL "
42
- "RETURN n.name, n.file_path",
43
- ).result_set or []
39
+ component_rows = (
40
+ self.store.query(
41
+ "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
42
+ "AND n.name IS NOT NULL "
43
+ "RETURN n.name, n.file_path",
44
+ ).result_set
45
+ or []
46
+ )
4447
for name, file_path in component_rows:
4548
self._promote_node(name, file_path, "RNComponent")
4649
result.promoted += 1
4750
result.patterns_found["components"] = len(component_rows)
4851
4952
# ── Screens: nodes under screens/ or whose names end with "Screen" ───
50
- screen_rows = self.store.query(
51
- "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' "
52
- "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) "
53
- "AND n.name IS NOT NULL "
54
- "RETURN n.name, n.file_path",
55
- ).result_set or []
53
+ screen_rows = (
54
+ self.store.query(
55
+ "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' "
56
+ "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) "
57
+ "AND n.name IS NOT NULL "
58
+ "RETURN n.name, n.file_path",
59
+ ).result_set
60
+ or []
61
+ )
5662
for name, file_path in screen_rows:
5763
self._promote_node(name, file_path, "RNScreen")
5864
result.promoted += 1
5965
result.patterns_found["screens"] = len(screen_rows)
6066
6167
# ── Hooks: functions whose name starts with "use" ─────────────────────
62
- hook_rows = self.store.query(
63
- "MATCH (n) WHERE n.name STARTS WITH 'use' "
64
- "AND n.name <> 'use' "
65
- "AND n.file_path IS NOT NULL "
66
- "RETURN n.name, n.file_path",
67
- ).result_set or []
68
+ hook_rows = (
69
+ self.store.query(
70
+ "MATCH (n) WHERE n.name STARTS WITH 'use' "
71
+ "AND n.name <> 'use' "
72
+ "AND n.file_path IS NOT NULL "
73
+ "RETURN n.name, n.file_path",
74
+ ).result_set
75
+ or []
76
+ )
6877
for name, file_path in hook_rows:
6978
self._promote_node(name, file_path, "RNHook")
7079
result.promoted += 1
7180
result.patterns_found["hooks"] = len(hook_rows)
7281
7382
# ── Navigation: navigator factory / container patterns ────────────────
74
- nav_conditions = " OR ".join(
75
- f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS
76
- )
77
- nav_rows = self.store.query(
78
- f"MATCH (n) WHERE ({nav_conditions}) "
79
- "AND n.file_path IS NOT NULL "
80
- "RETURN n.name, n.file_path",
81
- ).result_set or []
83
+ nav_conditions = " OR ".join(f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS)
84
+ nav_rows = (
85
+ self.store.query(
86
+ f"MATCH (n) WHERE ({nav_conditions}) "
87
+ "AND n.file_path IS NOT NULL "
88
+ "RETURN n.name, n.file_path",
89
+ ).result_set
90
+ or []
91
+ )
8292
for name, file_path in nav_rows:
8393
self._promote_node(name, file_path, "RNNavigation")
8494
result.promoted += 1
8595
result.patterns_found["navigation"] = len(nav_rows)
8696
8797
--- navegador/enrichment/react_native.py
+++ navegador/enrichment/react_native.py
@@ -34,53 +34,63 @@
34
35 def enrich(self) -> EnrichmentResult:
36 result = EnrichmentResult()
37
38 # ── Components: functions/classes in .jsx or .tsx files ──────────────
39 component_rows = self.store.query(
40 "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
41 "AND n.name IS NOT NULL "
42 "RETURN n.name, n.file_path",
43 ).result_set or []
 
 
 
44 for name, file_path in component_rows:
45 self._promote_node(name, file_path, "RNComponent")
46 result.promoted += 1
47 result.patterns_found["components"] = len(component_rows)
48
49 # ── Screens: nodes under screens/ or whose names end with "Screen" ───
50 screen_rows = self.store.query(
51 "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' "
52 "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) "
53 "AND n.name IS NOT NULL "
54 "RETURN n.name, n.file_path",
55 ).result_set or []
 
 
 
56 for name, file_path in screen_rows:
57 self._promote_node(name, file_path, "RNScreen")
58 result.promoted += 1
59 result.patterns_found["screens"] = len(screen_rows)
60
61 # ── Hooks: functions whose name starts with "use" ─────────────────────
62 hook_rows = self.store.query(
63 "MATCH (n) WHERE n.name STARTS WITH 'use' "
64 "AND n.name <> 'use' "
65 "AND n.file_path IS NOT NULL "
66 "RETURN n.name, n.file_path",
67 ).result_set or []
 
 
 
68 for name, file_path in hook_rows:
69 self._promote_node(name, file_path, "RNHook")
70 result.promoted += 1
71 result.patterns_found["hooks"] = len(hook_rows)
72
73 # ── Navigation: navigator factory / container patterns ────────────────
74 nav_conditions = " OR ".join(
75 f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS
76 )
77 nav_rows = self.store.query(
78 f"MATCH (n) WHERE ({nav_conditions}) "
79 "AND n.file_path IS NOT NULL "
80 "RETURN n.name, n.file_path",
81 ).result_set or []
 
82 for name, file_path in nav_rows:
83 self._promote_node(name, file_path, "RNNavigation")
84 result.promoted += 1
85 result.patterns_found["navigation"] = len(nav_rows)
86
87
--- navegador/enrichment/react_native.py
+++ navegador/enrichment/react_native.py
@@ -34,53 +34,63 @@
34
35 def enrich(self) -> EnrichmentResult:
36 result = EnrichmentResult()
37
38 # ── Components: functions/classes in .jsx or .tsx files ──────────────
39 component_rows = (
40 self.store.query(
41 "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') "
42 "AND n.name IS NOT NULL "
43 "RETURN n.name, n.file_path",
44 ).result_set
45 or []
46 )
47 for name, file_path in component_rows:
48 self._promote_node(name, file_path, "RNComponent")
49 result.promoted += 1
50 result.patterns_found["components"] = len(component_rows)
51
52 # ── Screens: nodes under screens/ or whose names end with "Screen" ───
53 screen_rows = (
54 self.store.query(
55 "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' "
56 "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) "
57 "AND n.name IS NOT NULL "
58 "RETURN n.name, n.file_path",
59 ).result_set
60 or []
61 )
62 for name, file_path in screen_rows:
63 self._promote_node(name, file_path, "RNScreen")
64 result.promoted += 1
65 result.patterns_found["screens"] = len(screen_rows)
66
67 # ── Hooks: functions whose name starts with "use" ─────────────────────
68 hook_rows = (
69 self.store.query(
70 "MATCH (n) WHERE n.name STARTS WITH 'use' "
71 "AND n.name <> 'use' "
72 "AND n.file_path IS NOT NULL "
73 "RETURN n.name, n.file_path",
74 ).result_set
75 or []
76 )
77 for name, file_path in hook_rows:
78 self._promote_node(name, file_path, "RNHook")
79 result.promoted += 1
80 result.patterns_found["hooks"] = len(hook_rows)
81
82 # ── Navigation: navigator factory / container patterns ────────────────
83 nav_conditions = " OR ".join(f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS)
84 nav_rows = (
85 self.store.query(
86 f"MATCH (n) WHERE ({nav_conditions}) "
87 "AND n.file_path IS NOT NULL "
88 "RETURN n.name, n.file_path",
89 ).result_set
90 or []
91 )
92 for name, file_path in nav_rows:
93 self._promote_node(name, file_path, "RNNavigation")
94 result.promoted += 1
95 result.patterns_found["navigation"] = len(nav_rows)
96
97
--- navegador/explorer/server.py
+++ navegador/explorer/server.py
@@ -51,16 +51,18 @@
5151
)
5252
result = []
5353
for row in rows:
5454
nid, label, name, props = row[0], row[1], row[2], row[3]
5555
node_props = dict(props) if isinstance(props, dict) else {}
56
- result.append({
57
- "id": str(nid),
58
- "label": label or "default",
59
- "name": name or str(nid),
60
- "props": node_props,
61
- })
56
+ result.append(
57
+ {
58
+ "id": str(nid),
59
+ "label": label or "default",
60
+ "name": name or str(nid),
61
+ "props": node_props,
62
+ }
63
+ )
6264
return result
6365
6466
6567
def _get_all_edges(store: "GraphStore") -> list[dict]:
6668
rows = _query(
@@ -85,16 +87,18 @@
8587
"LIMIT $limit",
8688
{"q": q, "limit": limit},
8789
)
8890
result = []
8991
for row in rows:
90
- result.append({
91
- "label": row[0] or "",
92
- "name": row[1] or "",
93
- "file_path": row[2] or "",
94
- "domain": row[3] or "",
95
- })
92
+ result.append(
93
+ {
94
+ "label": row[0] or "",
95
+ "name": row[1] or "",
96
+ "file_path": row[2] or "",
97
+ "domain": row[3] or "",
98
+ }
99
+ )
96100
return result
97101
98102
99103
def _get_node_detail(store: "GraphStore", name: str) -> dict:
100104
# Node properties
@@ -210,11 +214,11 @@
210214
results = _search_nodes(self._store, q) if q else []
211215
self._send_json({"nodes": results})
212216
213217
# ── Node detail — /api/node/<name>
214218
elif path.startswith("/api/node/"):
215
- raw_name = path[len("/api/node/"):]
219
+ raw_name = path[len("/api/node/") :]
216220
name = unquote(raw_name)
217221
detail = _get_node_detail(self._store, name)
218222
self._send_json(detail)
219223
220224
# ── Stats
221225
--- navegador/explorer/server.py
+++ navegador/explorer/server.py
@@ -51,16 +51,18 @@
51 )
52 result = []
53 for row in rows:
54 nid, label, name, props = row[0], row[1], row[2], row[3]
55 node_props = dict(props) if isinstance(props, dict) else {}
56 result.append({
57 "id": str(nid),
58 "label": label or "default",
59 "name": name or str(nid),
60 "props": node_props,
61 })
 
 
62 return result
63
64
65 def _get_all_edges(store: "GraphStore") -> list[dict]:
66 rows = _query(
@@ -85,16 +87,18 @@
85 "LIMIT $limit",
86 {"q": q, "limit": limit},
87 )
88 result = []
89 for row in rows:
90 result.append({
91 "label": row[0] or "",
92 "name": row[1] or "",
93 "file_path": row[2] or "",
94 "domain": row[3] or "",
95 })
 
 
96 return result
97
98
99 def _get_node_detail(store: "GraphStore", name: str) -> dict:
100 # Node properties
@@ -210,11 +214,11 @@
210 results = _search_nodes(self._store, q) if q else []
211 self._send_json({"nodes": results})
212
213 # ── Node detail — /api/node/<name>
214 elif path.startswith("/api/node/"):
215 raw_name = path[len("/api/node/"):]
216 name = unquote(raw_name)
217 detail = _get_node_detail(self._store, name)
218 self._send_json(detail)
219
220 # ── Stats
221
--- navegador/explorer/server.py
+++ navegador/explorer/server.py
@@ -51,16 +51,18 @@
51 )
52 result = []
53 for row in rows:
54 nid, label, name, props = row[0], row[1], row[2], row[3]
55 node_props = dict(props) if isinstance(props, dict) else {}
56 result.append(
57 {
58 "id": str(nid),
59 "label": label or "default",
60 "name": name or str(nid),
61 "props": node_props,
62 }
63 )
64 return result
65
66
67 def _get_all_edges(store: "GraphStore") -> list[dict]:
68 rows = _query(
@@ -85,16 +87,18 @@
87 "LIMIT $limit",
88 {"q": q, "limit": limit},
89 )
90 result = []
91 for row in rows:
92 result.append(
93 {
94 "label": row[0] or "",
95 "name": row[1] or "",
96 "file_path": row[2] or "",
97 "domain": row[3] or "",
98 }
99 )
100 return result
101
102
103 def _get_node_detail(store: "GraphStore", name: str) -> dict:
104 # Node properties
@@ -210,11 +214,11 @@
214 results = _search_nodes(self._store, q) if q else []
215 self._send_json({"nodes": results})
216
217 # ── Node detail — /api/node/<name>
218 elif path.startswith("/api/node/"):
219 raw_name = path[len("/api/node/") :]
220 name = unquote(raw_name)
221 detail = _get_node_detail(self._store, name)
222 self._send_json(detail)
223
224 # ── Stats
225
--- navegador/explorer/templates.py
+++ navegador/explorer/templates.py
@@ -28,13 +28,11 @@
2828
"WikiPage": "#d2b4de",
2929
"Person": "#fadbd8",
3030
"default": "#aaaaaa",
3131
}
3232
33
-_COLORS_JS = "\n".join(
34
- f" '{label}': '{color}'," for label, color in NODE_COLORS.items()
35
-)
33
+_COLORS_JS = "\n".join(f" '{label}': '{color}'," for label, color in NODE_COLORS.items())
3634
3735
HTML_TEMPLATE = """<!DOCTYPE html>
3836
<html lang="en">
3937
<head>
4038
<meta charset="UTF-8">
4139
--- navegador/explorer/templates.py
+++ navegador/explorer/templates.py
@@ -28,13 +28,11 @@
28 "WikiPage": "#d2b4de",
29 "Person": "#fadbd8",
30 "default": "#aaaaaa",
31 }
32
33 _COLORS_JS = "\n".join(
34 f" '{label}': '{color}'," for label, color in NODE_COLORS.items()
35 )
36
37 HTML_TEMPLATE = """<!DOCTYPE html>
38 <html lang="en">
39 <head>
40 <meta charset="UTF-8">
41
--- navegador/explorer/templates.py
+++ navegador/explorer/templates.py
@@ -28,13 +28,11 @@
28 "WikiPage": "#d2b4de",
29 "Person": "#fadbd8",
30 "default": "#aaaaaa",
31 }
32
33 _COLORS_JS = "\n".join(f" '{label}': '{color}'," for label, color in NODE_COLORS.items())
 
 
34
35 HTML_TEMPLATE = """<!DOCTYPE html>
36 <html lang="en">
37 <head>
38 <meta charset="UTF-8">
39
--- navegador/graph/export.py
+++ navegador/graph/export.py
@@ -32,12 +32,17 @@
3232
nodes = _export_nodes(store)
3333
edges = _export_edges(store)
3434
3535
# Sort for deterministic output
3636
nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True)))
37
- edges.sort(key=lambda e: (e["type"], json.dumps(e["from"], sort_keys=True),
38
- json.dumps(e["to"], sort_keys=True)))
37
+ edges.sort(
38
+ key=lambda e: (
39
+ e["type"],
40
+ json.dumps(e["from"], sort_keys=True),
41
+ json.dumps(e["to"], sort_keys=True),
42
+ )
43
+ )
3944
4045
with output_path.open("w", encoding="utf-8") as f:
4146
for node in nodes:
4247
f.write(json.dumps(node, sort_keys=True) + "\n")
4348
for edge in edges:
@@ -87,13 +92,11 @@
8792
return {"nodes": node_count, "edges": edge_count}
8893
8994
9095
def _export_nodes(store: GraphStore) -> list[dict]:
9196
"""Export all nodes with their labels and properties."""
92
- result = store.query(
93
- "MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props"
94
- )
97
+ result = store.query("MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props")
9598
nodes = []
9699
for row in result.result_set or []:
97100
label = row[0]
98101
props = row[1] if isinstance(row[1], dict) else {}
99102
nodes.append({"kind": "node", "label": label, "props": props})
@@ -109,16 +112,18 @@
109112
"labels(b)[0] AS to_label, b.name AS to_name, "
110113
"coalesce(b.file_path, b.path, '') AS to_path"
111114
)
112115
edges = []
113116
for row in result.result_set or []:
114
- edges.append({
115
- "kind": "edge",
116
- "type": row[0],
117
- "from": {"label": row[1], "name": row[2], "path": row[3]},
118
- "to": {"label": row[4], "name": row[5], "path": row[6]},
119
- })
117
+ edges.append(
118
+ {
119
+ "kind": "edge",
120
+ "type": row[0],
121
+ "from": {"label": row[1], "name": row[2], "path": row[3]},
122
+ "to": {"label": row[4], "name": row[5], "path": row[6]},
123
+ }
124
+ )
120125
return edges
121126
122127
123128
def _import_node(store: GraphStore, record: dict) -> None:
124129
"""Create a node from an export record."""
@@ -143,12 +148,12 @@
143148
"""Create an edge from an export record."""
144149
edge_type = record["type"]
145150
from_info = record["from"]
146151
to_info = record["to"]
147152
148
- from_key = f"name: $from_name"
149
- to_key = f"name: $to_name"
153
+ from_key = "name: $from_name"
154
+ to_key = "name: $to_name"
150155
151156
params = {
152157
"from_name": from_info["name"],
153158
"to_name": to_info["name"],
154159
}
155160
--- navegador/graph/export.py
+++ navegador/graph/export.py
@@ -32,12 +32,17 @@
32 nodes = _export_nodes(store)
33 edges = _export_edges(store)
34
35 # Sort for deterministic output
36 nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True)))
37 edges.sort(key=lambda e: (e["type"], json.dumps(e["from"], sort_keys=True),
38 json.dumps(e["to"], sort_keys=True)))
 
 
 
 
 
39
40 with output_path.open("w", encoding="utf-8") as f:
41 for node in nodes:
42 f.write(json.dumps(node, sort_keys=True) + "\n")
43 for edge in edges:
@@ -87,13 +92,11 @@
87 return {"nodes": node_count, "edges": edge_count}
88
89
90 def _export_nodes(store: GraphStore) -> list[dict]:
91 """Export all nodes with their labels and properties."""
92 result = store.query(
93 "MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props"
94 )
95 nodes = []
96 for row in result.result_set or []:
97 label = row[0]
98 props = row[1] if isinstance(row[1], dict) else {}
99 nodes.append({"kind": "node", "label": label, "props": props})
@@ -109,16 +112,18 @@
109 "labels(b)[0] AS to_label, b.name AS to_name, "
110 "coalesce(b.file_path, b.path, '') AS to_path"
111 )
112 edges = []
113 for row in result.result_set or []:
114 edges.append({
115 "kind": "edge",
116 "type": row[0],
117 "from": {"label": row[1], "name": row[2], "path": row[3]},
118 "to": {"label": row[4], "name": row[5], "path": row[6]},
119 })
 
 
120 return edges
121
122
123 def _import_node(store: GraphStore, record: dict) -> None:
124 """Create a node from an export record."""
@@ -143,12 +148,12 @@
143 """Create an edge from an export record."""
144 edge_type = record["type"]
145 from_info = record["from"]
146 to_info = record["to"]
147
148 from_key = f"name: $from_name"
149 to_key = f"name: $to_name"
150
151 params = {
152 "from_name": from_info["name"],
153 "to_name": to_info["name"],
154 }
155
--- navegador/graph/export.py
+++ navegador/graph/export.py
@@ -32,12 +32,17 @@
32 nodes = _export_nodes(store)
33 edges = _export_edges(store)
34
35 # Sort for deterministic output
36 nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True)))
37 edges.sort(
38 key=lambda e: (
39 e["type"],
40 json.dumps(e["from"], sort_keys=True),
41 json.dumps(e["to"], sort_keys=True),
42 )
43 )
44
45 with output_path.open("w", encoding="utf-8") as f:
46 for node in nodes:
47 f.write(json.dumps(node, sort_keys=True) + "\n")
48 for edge in edges:
@@ -87,13 +92,11 @@
92 return {"nodes": node_count, "edges": edge_count}
93
94
95 def _export_nodes(store: GraphStore) -> list[dict]:
96 """Export all nodes with their labels and properties."""
97 result = store.query("MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props")
 
 
98 nodes = []
99 for row in result.result_set or []:
100 label = row[0]
101 props = row[1] if isinstance(row[1], dict) else {}
102 nodes.append({"kind": "node", "label": label, "props": props})
@@ -109,16 +112,18 @@
112 "labels(b)[0] AS to_label, b.name AS to_name, "
113 "coalesce(b.file_path, b.path, '') AS to_path"
114 )
115 edges = []
116 for row in result.result_set or []:
117 edges.append(
118 {
119 "kind": "edge",
120 "type": row[0],
121 "from": {"label": row[1], "name": row[2], "path": row[3]},
122 "to": {"label": row[4], "name": row[5], "path": row[6]},
123 }
124 )
125 return edges
126
127
128 def _import_node(store: GraphStore, record: dict) -> None:
129 """Create a node from an export record."""
@@ -143,12 +148,12 @@
148 """Create an edge from an export record."""
149 edge_type = record["type"]
150 from_info = record["from"]
151 to_info = record["to"]
152
153 from_key = "name: $from_name"
154 to_key = "name: $to_name"
155
156 params = {
157 "from_name": from_info["name"],
158 "to_name": to_info["name"],
159 }
160
--- navegador/graph/migrations.py
+++ navegador/graph/migrations.py
@@ -62,13 +62,11 @@
6262
applied: list[int] = []
6363
6464
while current < CURRENT_SCHEMA_VERSION:
6565
fn = _migrations.get(current)
6666
if fn is None:
67
- raise RuntimeError(
68
- f"No migration registered for version {current} -> {current + 1}"
69
- )
67
+ raise RuntimeError(f"No migration registered for version {current} -> {current + 1}")
7068
logger.info("Applying migration %d -> %d", current, current + 1)
7169
fn(store)
7270
current += 1
7371
set_schema_version(store, current)
7472
applied.append(current)
@@ -93,9 +91,7 @@
9391
9492
@migration(1)
9593
def _migrate_1_to_2(store: GraphStore) -> None:
9694
"""Add content_hash property to File nodes for incremental ingestion."""
9795
# Set content_hash to empty string on existing File nodes that lack it.
98
- store.query(
99
- "MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''"
100
- )
96
+ store.query("MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''")
10197
logger.info("Added content_hash to File nodes")
10298
--- navegador/graph/migrations.py
+++ navegador/graph/migrations.py
@@ -62,13 +62,11 @@
62 applied: list[int] = []
63
64 while current < CURRENT_SCHEMA_VERSION:
65 fn = _migrations.get(current)
66 if fn is None:
67 raise RuntimeError(
68 f"No migration registered for version {current} -> {current + 1}"
69 )
70 logger.info("Applying migration %d -> %d", current, current + 1)
71 fn(store)
72 current += 1
73 set_schema_version(store, current)
74 applied.append(current)
@@ -93,9 +91,7 @@
93
94 @migration(1)
95 def _migrate_1_to_2(store: GraphStore) -> None:
96 """Add content_hash property to File nodes for incremental ingestion."""
97 # Set content_hash to empty string on existing File nodes that lack it.
98 store.query(
99 "MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''"
100 )
101 logger.info("Added content_hash to File nodes")
102
--- navegador/graph/migrations.py
+++ navegador/graph/migrations.py
@@ -62,13 +62,11 @@
62 applied: list[int] = []
63
64 while current < CURRENT_SCHEMA_VERSION:
65 fn = _migrations.get(current)
66 if fn is None:
67 raise RuntimeError(f"No migration registered for version {current} -> {current + 1}")
 
 
68 logger.info("Applying migration %d -> %d", current, current + 1)
69 fn(store)
70 current += 1
71 set_schema_version(store, current)
72 applied.append(current)
@@ -93,9 +91,7 @@
91
92 @migration(1)
93 def _migrate_1_to_2(store: GraphStore) -> None:
94 """Add content_hash property to File nodes for incremental ingestion."""
95 # Set content_hash to empty string on existing File nodes that lack it.
96 store.query("MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''")
 
 
97 logger.info("Added content_hash to File nodes")
98
--- navegador/ingestion/c.py
+++ navegador/ingestion/c.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_c as tsc # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tsc.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-c: pip install tree-sitter-c"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-c: pip install tree-sitter-c") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -139,12 +137,14 @@
139137
name_node = next((c for c in node.children if c.type == "type_identifier"), None)
140138
if not name_node:
141139
return
142140
name = _node_text(name_node, source)
143141
144
- kind = "struct" if node.type == "struct_specifier" else (
145
- "union" if node.type == "union_specifier" else "enum"
142
+ kind = (
143
+ "struct"
144
+ if node.type == "struct_specifier"
145
+ else ("union" if node.type == "union_specifier" else "enum")
146146
)
147147
store.create_node(
148148
NodeLabel.Class,
149149
{
150150
"name": name,
@@ -205,13 +205,11 @@
205205
) -> None:
206206
def walk(node):
207207
if node.type == "call_expression":
208208
func = node.child_by_field_name("function")
209209
if not func:
210
- func = next(
211
- (c for c in node.children if c.type == "identifier"), None
212
- )
210
+ func = next((c for c in node.children if c.type == "identifier"), None)
213211
if func:
214212
callee = _node_text(func, source)
215213
store.create_edge(
216214
NodeLabel.Function,
217215
{"name": fn_name, "file_path": file_path},
218216
--- navegador/ingestion/c.py
+++ navegador/ingestion/c.py
@@ -18,13 +18,11 @@
18 import tree_sitter_c as tsc # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsc.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-c: pip install tree-sitter-c"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -139,12 +137,14 @@
139 name_node = next((c for c in node.children if c.type == "type_identifier"), None)
140 if not name_node:
141 return
142 name = _node_text(name_node, source)
143
144 kind = "struct" if node.type == "struct_specifier" else (
145 "union" if node.type == "union_specifier" else "enum"
 
 
146 )
147 store.create_node(
148 NodeLabel.Class,
149 {
150 "name": name,
@@ -205,13 +205,11 @@
205 ) -> None:
206 def walk(node):
207 if node.type == "call_expression":
208 func = node.child_by_field_name("function")
209 if not func:
210 func = next(
211 (c for c in node.children if c.type == "identifier"), None
212 )
213 if func:
214 callee = _node_text(func, source)
215 store.create_edge(
216 NodeLabel.Function,
217 {"name": fn_name, "file_path": file_path},
218
--- navegador/ingestion/c.py
+++ navegador/ingestion/c.py
@@ -18,13 +18,11 @@
18 import tree_sitter_c as tsc # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsc.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-c: pip install tree-sitter-c") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -139,12 +137,14 @@
137 name_node = next((c for c in node.children if c.type == "type_identifier"), None)
138 if not name_node:
139 return
140 name = _node_text(name_node, source)
141
142 kind = (
143 "struct"
144 if node.type == "struct_specifier"
145 else ("union" if node.type == "union_specifier" else "enum")
146 )
147 store.create_node(
148 NodeLabel.Class,
149 {
150 "name": name,
@@ -205,13 +205,11 @@
205 ) -> None:
206 def walk(node):
207 if node.type == "call_expression":
208 func = node.child_by_field_name("function")
209 if not func:
210 func = next((c for c in node.children if c.type == "identifier"), None)
 
 
211 if func:
212 callee = _node_text(func, source)
213 store.create_edge(
214 NodeLabel.Function,
215 {"name": fn_name, "file_path": file_path},
216
--- navegador/ingestion/cpp.py
+++ navegador/ingestion/cpp.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_cpp as tscpp # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tscpp.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-cpp: pip install tree-sitter-cpp"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-cpp: pip install tree-sitter-cpp") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -78,13 +76,11 @@
7876
return
7977
if node.type == "namespace_definition":
8078
# Recurse into namespace body
8179
body = node.child_by_field_name("body")
8280
if not body:
83
- body = next(
84
- (c for c in node.children if c.type == "declaration_list"), None
85
- )
81
+ body = next((c for c in node.children if c.type == "declaration_list"), None)
8682
if body:
8783
for child in body.children:
8884
self._walk(child, source, file_path, store, stats, class_name)
8985
return
9086
for child in node.children:
@@ -95,13 +91,11 @@
9591
def _handle_class(
9692
self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
9793
) -> None:
9894
name_node = node.child_by_field_name("name")
9995
if not name_node:
100
- name_node = next(
101
- (c for c in node.children if c.type == "type_identifier"), None
102
- )
96
+ name_node = next((c for c in node.children if c.type == "type_identifier"), None)
10397
if not name_node:
10498
return
10599
name = _node_text(name_node, source)
106100
107101
store.create_node(
@@ -125,13 +119,11 @@
125119
stats["edges"] += 1
126120
127121
# Base classes
128122
base_clause = node.child_by_field_name("base_clause")
129123
if not base_clause:
130
- base_clause = next(
131
- (c for c in node.children if c.type == "base_class_clause"), None
132
- )
124
+ base_clause = next((c for c in node.children if c.type == "base_class_clause"), None)
133125
if base_clause:
134126
for child in base_clause.children:
135127
if child.type == "type_identifier":
136128
parent_name = _node_text(child, source)
137129
store.create_edge(
@@ -262,11 +254,15 @@
262254
def walk(node):
263255
if node.type == "call_expression":
264256
func = node.child_by_field_name("function")
265257
if not func:
266258
func = next(
267
- (c for c in node.children if c.type in ("identifier", "qualified_identifier", "field_expression")),
259
+ (
260
+ c
261
+ for c in node.children
262
+ if c.type in ("identifier", "qualified_identifier", "field_expression")
263
+ ),
268264
None,
269265
)
270266
if func:
271267
callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1]
272268
store.create_edge(
273269
--- navegador/ingestion/cpp.py
+++ navegador/ingestion/cpp.py
@@ -18,13 +18,11 @@
18 import tree_sitter_cpp as tscpp # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tscpp.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-cpp: pip install tree-sitter-cpp"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -78,13 +76,11 @@
78 return
79 if node.type == "namespace_definition":
80 # Recurse into namespace body
81 body = node.child_by_field_name("body")
82 if not body:
83 body = next(
84 (c for c in node.children if c.type == "declaration_list"), None
85 )
86 if body:
87 for child in body.children:
88 self._walk(child, source, file_path, store, stats, class_name)
89 return
90 for child in node.children:
@@ -95,13 +91,11 @@
95 def _handle_class(
96 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
97 ) -> None:
98 name_node = node.child_by_field_name("name")
99 if not name_node:
100 name_node = next(
101 (c for c in node.children if c.type == "type_identifier"), None
102 )
103 if not name_node:
104 return
105 name = _node_text(name_node, source)
106
107 store.create_node(
@@ -125,13 +119,11 @@
125 stats["edges"] += 1
126
127 # Base classes
128 base_clause = node.child_by_field_name("base_clause")
129 if not base_clause:
130 base_clause = next(
131 (c for c in node.children if c.type == "base_class_clause"), None
132 )
133 if base_clause:
134 for child in base_clause.children:
135 if child.type == "type_identifier":
136 parent_name = _node_text(child, source)
137 store.create_edge(
@@ -262,11 +254,15 @@
262 def walk(node):
263 if node.type == "call_expression":
264 func = node.child_by_field_name("function")
265 if not func:
266 func = next(
267 (c for c in node.children if c.type in ("identifier", "qualified_identifier", "field_expression")),
 
 
 
 
268 None,
269 )
270 if func:
271 callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1]
272 store.create_edge(
273
--- navegador/ingestion/cpp.py
+++ navegador/ingestion/cpp.py
@@ -18,13 +18,11 @@
18 import tree_sitter_cpp as tscpp # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tscpp.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-cpp: pip install tree-sitter-cpp") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -78,13 +76,11 @@
76 return
77 if node.type == "namespace_definition":
78 # Recurse into namespace body
79 body = node.child_by_field_name("body")
80 if not body:
81 body = next((c for c in node.children if c.type == "declaration_list"), None)
 
 
82 if body:
83 for child in body.children:
84 self._walk(child, source, file_path, store, stats, class_name)
85 return
86 for child in node.children:
@@ -95,13 +91,11 @@
91 def _handle_class(
92 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
93 ) -> None:
94 name_node = node.child_by_field_name("name")
95 if not name_node:
96 name_node = next((c for c in node.children if c.type == "type_identifier"), None)
 
 
97 if not name_node:
98 return
99 name = _node_text(name_node, source)
100
101 store.create_node(
@@ -125,13 +119,11 @@
119 stats["edges"] += 1
120
121 # Base classes
122 base_clause = node.child_by_field_name("base_clause")
123 if not base_clause:
124 base_clause = next((c for c in node.children if c.type == "base_class_clause"), None)
 
 
125 if base_clause:
126 for child in base_clause.children:
127 if child.type == "type_identifier":
128 parent_name = _node_text(child, source)
129 store.create_edge(
@@ -262,11 +254,15 @@
254 def walk(node):
255 if node.type == "call_expression":
256 func = node.child_by_field_name("function")
257 if not func:
258 func = next(
259 (
260 c
261 for c in node.children
262 if c.type in ("identifier", "qualified_identifier", "field_expression")
263 ),
264 None,
265 )
266 if func:
267 callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1]
268 store.create_edge(
269
--- navegador/ingestion/csharp.py
+++ navegador/ingestion/csharp.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_c_sharp as tscsharp # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tscsharp.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -132,13 +130,11 @@
132130
stats["edges"] += 1
133131
134132
# Walk class body for methods
135133
body = node.child_by_field_name("body")
136134
if not body:
137
- body = next(
138
- (c for c in node.children if c.type == "declaration_list"), None
139
- )
135
+ body = next((c for c in node.children if c.type == "declaration_list"), None)
140136
if body:
141137
for child in body.children:
142138
if child.type in ("method_declaration", "constructor_declaration"):
143139
self._handle_method(child, source, file_path, store, stats, class_name=name)
144140
@@ -190,16 +186,11 @@
190186
def _handle_using(
191187
self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
192188
) -> None:
193189
raw = _node_text(node, source).strip()
194190
# "using System.Collections.Generic;" or "using static ..."
195
- module = (
196
- raw.removeprefix("using")
197
- .removeprefix(" static")
198
- .removesuffix(";")
199
- .strip()
200
- )
191
+ module = raw.removeprefix("using").removeprefix(" static").removesuffix(";").strip()
201192
if not module:
202193
return
203194
store.create_node(
204195
NodeLabel.Import,
205196
{
@@ -231,11 +222,15 @@
231222
def walk(node):
232223
if node.type == "invocation_expression":
233224
func = node.child_by_field_name("function")
234225
if not func:
235226
func = next(
236
- (c for c in node.children if c.type in ("identifier", "member_access_expression")),
227
+ (
228
+ c
229
+ for c in node.children
230
+ if c.type in ("identifier", "member_access_expression")
231
+ ),
237232
None,
238233
)
239234
if func:
240235
callee = _node_text(func, source).split(".")[-1]
241236
store.create_edge(
242237
--- navegador/ingestion/csharp.py
+++ navegador/ingestion/csharp.py
@@ -18,13 +18,11 @@
18 import tree_sitter_c_sharp as tscsharp # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tscsharp.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -132,13 +130,11 @@
132 stats["edges"] += 1
133
134 # Walk class body for methods
135 body = node.child_by_field_name("body")
136 if not body:
137 body = next(
138 (c for c in node.children if c.type == "declaration_list"), None
139 )
140 if body:
141 for child in body.children:
142 if child.type in ("method_declaration", "constructor_declaration"):
143 self._handle_method(child, source, file_path, store, stats, class_name=name)
144
@@ -190,16 +186,11 @@
190 def _handle_using(
191 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
192 ) -> None:
193 raw = _node_text(node, source).strip()
194 # "using System.Collections.Generic;" or "using static ..."
195 module = (
196 raw.removeprefix("using")
197 .removeprefix(" static")
198 .removesuffix(";")
199 .strip()
200 )
201 if not module:
202 return
203 store.create_node(
204 NodeLabel.Import,
205 {
@@ -231,11 +222,15 @@
231 def walk(node):
232 if node.type == "invocation_expression":
233 func = node.child_by_field_name("function")
234 if not func:
235 func = next(
236 (c for c in node.children if c.type in ("identifier", "member_access_expression")),
 
 
 
 
237 None,
238 )
239 if func:
240 callee = _node_text(func, source).split(".")[-1]
241 store.create_edge(
242
--- navegador/ingestion/csharp.py
+++ navegador/ingestion/csharp.py
@@ -18,13 +18,11 @@
18 import tree_sitter_c_sharp as tscsharp # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tscsharp.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -132,13 +130,11 @@
130 stats["edges"] += 1
131
132 # Walk class body for methods
133 body = node.child_by_field_name("body")
134 if not body:
135 body = next((c for c in node.children if c.type == "declaration_list"), None)
 
 
136 if body:
137 for child in body.children:
138 if child.type in ("method_declaration", "constructor_declaration"):
139 self._handle_method(child, source, file_path, store, stats, class_name=name)
140
@@ -190,16 +186,11 @@
186 def _handle_using(
187 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
188 ) -> None:
189 raw = _node_text(node, source).strip()
190 # "using System.Collections.Generic;" or "using static ..."
191 module = raw.removeprefix("using").removeprefix(" static").removesuffix(";").strip()
 
 
 
 
 
192 if not module:
193 return
194 store.create_node(
195 NodeLabel.Import,
196 {
@@ -231,11 +222,15 @@
222 def walk(node):
223 if node.type == "invocation_expression":
224 func = node.child_by_field_name("function")
225 if not func:
226 func = next(
227 (
228 c
229 for c in node.children
230 if c.type in ("identifier", "member_access_expression")
231 ),
232 None,
233 )
234 if func:
235 callee = _node_text(func, source).split(".")[-1]
236 store.create_edge(
237
--- navegador/ingestion/kotlin.py
+++ navegador/ingestion/kotlin.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_kotlin as tskotlin # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tskotlin.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-kotlin: pip install tree-sitter-kotlin"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-kotlin: pip install tree-sitter-kotlin") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -85,13 +83,11 @@
8583
self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
8684
) -> None:
8785
name_node = node.child_by_field_name("name")
8886
if not name_node:
8987
# fallback: first simple_identifier child
90
- name_node = next(
91
- (c for c in node.children if c.type == "simple_identifier"), None
92
- )
88
+ name_node = next((c for c in node.children if c.type == "simple_identifier"), None)
9389
if not name_node:
9490
return
9591
name = _node_text(name_node, source)
9692
9793
store.create_node(
@@ -115,13 +111,11 @@
115111
stats["edges"] += 1
116112
117113
# Walk class body for member functions
118114
body = node.child_by_field_name("body")
119115
if not body:
120
- body = next(
121
- (c for c in node.children if c.type in ("class_body", "object_body")), None
122
- )
116
+ body = next((c for c in node.children if c.type in ("class_body", "object_body")), None)
123117
if body:
124118
for child in body.children:
125119
if child.type == "function_declaration":
126120
self._handle_function(child, source, file_path, store, stats, class_name=name)
127121
@@ -134,13 +128,11 @@
134128
stats: dict,
135129
class_name: str | None,
136130
) -> None:
137131
name_node = node.child_by_field_name("name")
138132
if not name_node:
139
- name_node = next(
140
- (c for c in node.children if c.type == "simple_identifier"), None
141
- )
133
+ name_node = next((c for c in node.children if c.type == "simple_identifier"), None)
142134
if not name_node:
143135
return
144136
name = _node_text(name_node, source)
145137
146138
label = NodeLabel.Method if class_name else NodeLabel.Function
@@ -211,11 +203,15 @@
211203
def walk(node):
212204
if node.type == "call_expression":
213205
func = node.child_by_field_name("calleeExpression")
214206
if not func:
215207
func = next(
216
- (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")),
208
+ (
209
+ c
210
+ for c in node.children
211
+ if c.type in ("simple_identifier", "navigation_expression")
212
+ ),
217213
None,
218214
)
219215
if func:
220216
callee = _node_text(func, source).split(".")[-1]
221217
store.create_edge(
@@ -229,10 +225,8 @@
229225
for child in node.children:
230226
walk(child)
231227
232228
body = fn_node.child_by_field_name("body")
233229
if not body:
234
- body = next(
235
- (c for c in fn_node.children if c.type in ("function_body", "block")), None
236
- )
230
+ body = next((c for c in fn_node.children if c.type in ("function_body", "block")), None)
237231
if body:
238232
walk(body)
239233
--- navegador/ingestion/kotlin.py
+++ navegador/ingestion/kotlin.py
@@ -18,13 +18,11 @@
18 import tree_sitter_kotlin as tskotlin # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tskotlin.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-kotlin: pip install tree-sitter-kotlin"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -85,13 +83,11 @@
85 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
86 ) -> None:
87 name_node = node.child_by_field_name("name")
88 if not name_node:
89 # fallback: first simple_identifier child
90 name_node = next(
91 (c for c in node.children if c.type == "simple_identifier"), None
92 )
93 if not name_node:
94 return
95 name = _node_text(name_node, source)
96
97 store.create_node(
@@ -115,13 +111,11 @@
115 stats["edges"] += 1
116
117 # Walk class body for member functions
118 body = node.child_by_field_name("body")
119 if not body:
120 body = next(
121 (c for c in node.children if c.type in ("class_body", "object_body")), None
122 )
123 if body:
124 for child in body.children:
125 if child.type == "function_declaration":
126 self._handle_function(child, source, file_path, store, stats, class_name=name)
127
@@ -134,13 +128,11 @@
134 stats: dict,
135 class_name: str | None,
136 ) -> None:
137 name_node = node.child_by_field_name("name")
138 if not name_node:
139 name_node = next(
140 (c for c in node.children if c.type == "simple_identifier"), None
141 )
142 if not name_node:
143 return
144 name = _node_text(name_node, source)
145
146 label = NodeLabel.Method if class_name else NodeLabel.Function
@@ -211,11 +203,15 @@
211 def walk(node):
212 if node.type == "call_expression":
213 func = node.child_by_field_name("calleeExpression")
214 if not func:
215 func = next(
216 (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")),
 
 
 
 
217 None,
218 )
219 if func:
220 callee = _node_text(func, source).split(".")[-1]
221 store.create_edge(
@@ -229,10 +225,8 @@
229 for child in node.children:
230 walk(child)
231
232 body = fn_node.child_by_field_name("body")
233 if not body:
234 body = next(
235 (c for c in fn_node.children if c.type in ("function_body", "block")), None
236 )
237 if body:
238 walk(body)
239
--- navegador/ingestion/kotlin.py
+++ navegador/ingestion/kotlin.py
@@ -18,13 +18,11 @@
18 import tree_sitter_kotlin as tskotlin # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tskotlin.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-kotlin: pip install tree-sitter-kotlin") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -85,13 +83,11 @@
83 self, node, source: bytes, file_path: str, store: GraphStore, stats: dict
84 ) -> None:
85 name_node = node.child_by_field_name("name")
86 if not name_node:
87 # fallback: first simple_identifier child
88 name_node = next((c for c in node.children if c.type == "simple_identifier"), None)
 
 
89 if not name_node:
90 return
91 name = _node_text(name_node, source)
92
93 store.create_node(
@@ -115,13 +111,11 @@
111 stats["edges"] += 1
112
113 # Walk class body for member functions
114 body = node.child_by_field_name("body")
115 if not body:
116 body = next((c for c in node.children if c.type in ("class_body", "object_body")), None)
 
 
117 if body:
118 for child in body.children:
119 if child.type == "function_declaration":
120 self._handle_function(child, source, file_path, store, stats, class_name=name)
121
@@ -134,13 +128,11 @@
128 stats: dict,
129 class_name: str | None,
130 ) -> None:
131 name_node = node.child_by_field_name("name")
132 if not name_node:
133 name_node = next((c for c in node.children if c.type == "simple_identifier"), None)
 
 
134 if not name_node:
135 return
136 name = _node_text(name_node, source)
137
138 label = NodeLabel.Method if class_name else NodeLabel.Function
@@ -211,11 +203,15 @@
203 def walk(node):
204 if node.type == "call_expression":
205 func = node.child_by_field_name("calleeExpression")
206 if not func:
207 func = next(
208 (
209 c
210 for c in node.children
211 if c.type in ("simple_identifier", "navigation_expression")
212 ),
213 None,
214 )
215 if func:
216 callee = _node_text(func, source).split(".")[-1]
217 store.create_edge(
@@ -229,10 +225,8 @@
225 for child in node.children:
226 walk(child)
227
228 body = fn_node.child_by_field_name("body")
229 if not body:
230 body = next((c for c in fn_node.children if c.type in ("function_body", "block")), None)
 
 
231 if body:
232 walk(body)
233
--- navegador/ingestion/optimization.py
+++ navegador/ingestion/optimization.py
@@ -296,11 +296,13 @@
296296
nodes: list[NodeDescriptor] = []
297297
for row in rows:
298298
label, name, line_start = row[0], row[1], row[2]
299299
if name is None or line_start is None:
300300
continue
301
- nodes.append(NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start)))
301
+ nodes.append(
302
+ NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start))
303
+ )
302304
return nodes
303305
304306
305307
# ── #45 — Parallel ingestion with worker pool ─────────────────────────────────
306308
@@ -368,13 +370,11 @@
368370
},
369371
)
370372
371373
# Collect all candidate files up-front (fast, single-threaded).
372374
candidate_files = [
373
- f
374
- for f in self._ingester._iter_source_files(repo_path)
375
- if LANGUAGE_MAP.get(f.suffix)
375
+ f for f in self._ingester._iter_source_files(repo_path) if LANGUAGE_MAP.get(f.suffix)
376376
]
377377
378378
aggregated: dict[str, int] = {
379379
"files": 0,
380380
"functions": 0,
@@ -396,13 +396,11 @@
396396
return
397397
398398
if incremental:
399399
self._ingester._clear_file_subgraph(rel_path)
400400
401
- parse_path, effective_root = self._ingester._maybe_redact_to_tmp(
402
- source_file, repo_path
403
- )
401
+ parse_path, effective_root = self._ingester._maybe_redact_to_tmp(source_file, repo_path)
404402
try:
405403
parser = self._ingester._get_parser(language)
406404
file_stats = parser.parse_file(parse_path, effective_root, self._store)
407405
self._ingester._store_file_hash(rel_path, content_hash)
408406
with lock:
409407
--- navegador/ingestion/optimization.py
+++ navegador/ingestion/optimization.py
@@ -296,11 +296,13 @@
296 nodes: list[NodeDescriptor] = []
297 for row in rows:
298 label, name, line_start = row[0], row[1], row[2]
299 if name is None or line_start is None:
300 continue
301 nodes.append(NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start)))
 
 
302 return nodes
303
304
305 # ── #45 — Parallel ingestion with worker pool ─────────────────────────────────
306
@@ -368,13 +370,11 @@
368 },
369 )
370
371 # Collect all candidate files up-front (fast, single-threaded).
372 candidate_files = [
373 f
374 for f in self._ingester._iter_source_files(repo_path)
375 if LANGUAGE_MAP.get(f.suffix)
376 ]
377
378 aggregated: dict[str, int] = {
379 "files": 0,
380 "functions": 0,
@@ -396,13 +396,11 @@
396 return
397
398 if incremental:
399 self._ingester._clear_file_subgraph(rel_path)
400
401 parse_path, effective_root = self._ingester._maybe_redact_to_tmp(
402 source_file, repo_path
403 )
404 try:
405 parser = self._ingester._get_parser(language)
406 file_stats = parser.parse_file(parse_path, effective_root, self._store)
407 self._ingester._store_file_hash(rel_path, content_hash)
408 with lock:
409
--- navegador/ingestion/optimization.py
+++ navegador/ingestion/optimization.py
@@ -296,11 +296,13 @@
296 nodes: list[NodeDescriptor] = []
297 for row in rows:
298 label, name, line_start = row[0], row[1], row[2]
299 if name is None or line_start is None:
300 continue
301 nodes.append(
302 NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start))
303 )
304 return nodes
305
306
307 # ── #45 — Parallel ingestion with worker pool ─────────────────────────────────
308
@@ -368,13 +370,11 @@
370 },
371 )
372
373 # Collect all candidate files up-front (fast, single-threaded).
374 candidate_files = [
375 f for f in self._ingester._iter_source_files(repo_path) if LANGUAGE_MAP.get(f.suffix)
 
 
376 ]
377
378 aggregated: dict[str, int] = {
379 "files": 0,
380 "functions": 0,
@@ -396,13 +396,11 @@
396 return
397
398 if incremental:
399 self._ingester._clear_file_subgraph(rel_path)
400
401 parse_path, effective_root = self._ingester._maybe_redact_to_tmp(source_file, repo_path)
 
 
402 try:
403 parser = self._ingester._get_parser(language)
404 file_stats = parser.parse_file(parse_path, effective_root, self._store)
405 self._ingester._store_file_hash(rel_path, content_hash)
406 with lock:
407
--- navegador/ingestion/parser.py
+++ navegador/ingestion/parser.py
@@ -151,10 +151,11 @@
151151
logger.exception("Failed to parse %s", source_file)
152152
finally:
153153
# Remove the temporary redacted directory if one was created
154154
if effective_root is not repo_path:
155155
import shutil
156
+
156157
shutil.rmtree(effective_root, ignore_errors=True)
157158
158159
logger.info(
159160
"Ingested %s: %d files, %d functions, %d classes, %d skipped",
160161
repo_path.name,
161162
--- navegador/ingestion/parser.py
+++ navegador/ingestion/parser.py
@@ -151,10 +151,11 @@
151 logger.exception("Failed to parse %s", source_file)
152 finally:
153 # Remove the temporary redacted directory if one was created
154 if effective_root is not repo_path:
155 import shutil
 
156 shutil.rmtree(effective_root, ignore_errors=True)
157
158 logger.info(
159 "Ingested %s: %d files, %d functions, %d classes, %d skipped",
160 repo_path.name,
161
--- navegador/ingestion/parser.py
+++ navegador/ingestion/parser.py
@@ -151,10 +151,11 @@
151 logger.exception("Failed to parse %s", source_file)
152 finally:
153 # Remove the temporary redacted directory if one was created
154 if effective_root is not repo_path:
155 import shutil
156
157 shutil.rmtree(effective_root, ignore_errors=True)
158
159 logger.info(
160 "Ingested %s: %d files, %d functions, %d classes, %d skipped",
161 repo_path.name,
162
--- navegador/ingestion/php.py
+++ navegador/ingestion/php.py
@@ -22,13 +22,11 @@
2222
lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None)
2323
if lang_fn is None:
2424
raise ImportError("tree_sitter_php has no language() or language_php() callable")
2525
return Language(lang_fn())
2626
except ImportError as e:
27
- raise ImportError(
28
- "Install tree-sitter-php: pip install tree-sitter-php"
29
- ) from e
27
+ raise ImportError("Install tree-sitter-php: pip install tree-sitter-php") from e
3028
3129
3230
def _node_text(node, source: bytes) -> str:
3331
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3432
3533
--- navegador/ingestion/php.py
+++ navegador/ingestion/php.py
@@ -22,13 +22,11 @@
22 lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None)
23 if lang_fn is None:
24 raise ImportError("tree_sitter_php has no language() or language_php() callable")
25 return Language(lang_fn())
26 except ImportError as e:
27 raise ImportError(
28 "Install tree-sitter-php: pip install tree-sitter-php"
29 ) from e
30
31
32 def _node_text(node, source: bytes) -> str:
33 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
34
35
--- navegador/ingestion/php.py
+++ navegador/ingestion/php.py
@@ -22,13 +22,11 @@
22 lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None)
23 if lang_fn is None:
24 raise ImportError("tree_sitter_php has no language() or language_php() callable")
25 return Language(lang_fn())
26 except ImportError as e:
27 raise ImportError("Install tree-sitter-php: pip install tree-sitter-php") from e
 
 
28
29
30 def _node_text(node, source: bytes) -> str:
31 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
32
33
--- navegador/ingestion/ruby.py
+++ navegador/ingestion/ruby.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_ruby as tsruby # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tsruby.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-ruby: pip install tree-sitter-ruby"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-ruby: pip install tree-sitter-ruby") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -275,13 +273,11 @@
275273
) -> None:
276274
def walk(node):
277275
if node.type == "call":
278276
method_node = node.child_by_field_name("method")
279277
if not method_node:
280
- method_node = next(
281
- (c for c in node.children if c.type == "identifier"), None
282
- )
278
+ method_node = next((c for c in node.children if c.type == "identifier"), None)
283279
if method_node:
284280
callee = _node_text(method_node, source)
285281
if callee not in ("require", "require_relative", "load"):
286282
store.create_edge(
287283
fn_label,
288284
--- navegador/ingestion/ruby.py
+++ navegador/ingestion/ruby.py
@@ -18,13 +18,11 @@
18 import tree_sitter_ruby as tsruby # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsruby.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-ruby: pip install tree-sitter-ruby"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -275,13 +273,11 @@
275 ) -> None:
276 def walk(node):
277 if node.type == "call":
278 method_node = node.child_by_field_name("method")
279 if not method_node:
280 method_node = next(
281 (c for c in node.children if c.type == "identifier"), None
282 )
283 if method_node:
284 callee = _node_text(method_node, source)
285 if callee not in ("require", "require_relative", "load"):
286 store.create_edge(
287 fn_label,
288
--- navegador/ingestion/ruby.py
+++ navegador/ingestion/ruby.py
@@ -18,13 +18,11 @@
18 import tree_sitter_ruby as tsruby # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsruby.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-ruby: pip install tree-sitter-ruby") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -275,13 +273,11 @@
273 ) -> None:
274 def walk(node):
275 if node.type == "call":
276 method_node = node.child_by_field_name("method")
277 if not method_node:
278 method_node = next((c for c in node.children if c.type == "identifier"), None)
 
 
279 if method_node:
280 callee = _node_text(method_node, source)
281 if callee not in ("require", "require_relative", "load"):
282 store.create_edge(
283 fn_label,
284
--- navegador/ingestion/swift.py
+++ navegador/ingestion/swift.py
@@ -18,13 +18,11 @@
1818
import tree_sitter_swift as tsswift # type: ignore[import]
1919
from tree_sitter import Language
2020
2121
return Language(tsswift.language())
2222
except ImportError as e:
23
- raise ImportError(
24
- "Install tree-sitter-swift: pip install tree-sitter-swift"
25
- ) from e
23
+ raise ImportError("Install tree-sitter-swift: pip install tree-sitter-swift") from e
2624
2725
2826
def _node_text(node, source: bytes) -> str:
2927
return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
3028
@@ -137,11 +135,16 @@
137135
138136
# Walk body for member functions
139137
body = node.child_by_field_name("body")
140138
if not body:
141139
body = next(
142
- (c for c in node.children if c.type in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")),
140
+ (
141
+ c
142
+ for c in node.children
143
+ if c.type
144
+ in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")
145
+ ),
143146
None,
144147
)
145148
if body:
146149
for child in body.children:
147150
if child.type == "function_declaration":
@@ -232,11 +235,15 @@
232235
def walk(node):
233236
if node.type == "call_expression":
234237
func = node.child_by_field_name("function")
235238
if not func:
236239
func = next(
237
- (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")),
240
+ (
241
+ c
242
+ for c in node.children
243
+ if c.type in ("simple_identifier", "navigation_expression")
244
+ ),
238245
None,
239246
)
240247
if func:
241248
callee = _node_text(func, source).split(".")[-1]
242249
store.create_edge(
243250
--- navegador/ingestion/swift.py
+++ navegador/ingestion/swift.py
@@ -18,13 +18,11 @@
18 import tree_sitter_swift as tsswift # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsswift.language())
22 except ImportError as e:
23 raise ImportError(
24 "Install tree-sitter-swift: pip install tree-sitter-swift"
25 ) from e
26
27
28 def _node_text(node, source: bytes) -> str:
29 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
30
@@ -137,11 +135,16 @@
137
138 # Walk body for member functions
139 body = node.child_by_field_name("body")
140 if not body:
141 body = next(
142 (c for c in node.children if c.type in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")),
 
 
 
 
 
143 None,
144 )
145 if body:
146 for child in body.children:
147 if child.type == "function_declaration":
@@ -232,11 +235,15 @@
232 def walk(node):
233 if node.type == "call_expression":
234 func = node.child_by_field_name("function")
235 if not func:
236 func = next(
237 (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")),
 
 
 
 
238 None,
239 )
240 if func:
241 callee = _node_text(func, source).split(".")[-1]
242 store.create_edge(
243
--- navegador/ingestion/swift.py
+++ navegador/ingestion/swift.py
@@ -18,13 +18,11 @@
18 import tree_sitter_swift as tsswift # type: ignore[import]
19 from tree_sitter import Language
20
21 return Language(tsswift.language())
22 except ImportError as e:
23 raise ImportError("Install tree-sitter-swift: pip install tree-sitter-swift") from e
 
 
24
25
26 def _node_text(node, source: bytes) -> str:
27 return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace")
28
@@ -137,11 +135,16 @@
135
136 # Walk body for member functions
137 body = node.child_by_field_name("body")
138 if not body:
139 body = next(
140 (
141 c
142 for c in node.children
143 if c.type
144 in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")
145 ),
146 None,
147 )
148 if body:
149 for child in body.children:
150 if child.type == "function_declaration":
@@ -232,11 +235,15 @@
235 def walk(node):
236 if node.type == "call_expression":
237 func = node.child_by_field_name("function")
238 if not func:
239 func = next(
240 (
241 c
242 for c in node.children
243 if c.type in ("simple_identifier", "navigation_expression")
244 ),
245 None,
246 )
247 if func:
248 callee = _node_text(func, source).split(".")[-1]
249 store.create_edge(
250
--- navegador/intelligence/community.py
+++ navegador/intelligence/community.py
@@ -213,14 +213,11 @@
213213
member_set = set(members_ids)
214214
member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes]
215215
216216
# Density = actual internal edges / possible internal edges
217217
internal_edges = sum(
218
- 1
219
- for nid in members_ids
220
- for nb in adj.get(nid, [])
221
- if nb in member_set
218
+ 1 for nid in members_ids for nb in adj.get(nid, []) if nb in member_set
222219
)
223220
# Each undirected edge counted twice in the adjacency list
224221
internal_edges //= 2
225222
n = len(members_ids)
226223
possible = n * (n - 1) / 2
227224
--- navegador/intelligence/community.py
+++ navegador/intelligence/community.py
@@ -213,14 +213,11 @@
213 member_set = set(members_ids)
214 member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes]
215
216 # Density = actual internal edges / possible internal edges
217 internal_edges = sum(
218 1
219 for nid in members_ids
220 for nb in adj.get(nid, [])
221 if nb in member_set
222 )
223 # Each undirected edge counted twice in the adjacency list
224 internal_edges //= 2
225 n = len(members_ids)
226 possible = n * (n - 1) / 2
227
--- navegador/intelligence/community.py
+++ navegador/intelligence/community.py
@@ -213,14 +213,11 @@
213 member_set = set(members_ids)
214 member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes]
215
216 # Density = actual internal edges / possible internal edges
217 internal_edges = sum(
218 1 for nid in members_ids for nb in adj.get(nid, []) if nb in member_set
 
 
 
219 )
220 # Each undirected edge counted twice in the adjacency list
221 internal_edges //= 2
222 n = len(members_ids)
223 possible = n * (n - 1) / 2
224
--- navegador/intelligence/docgen.py
+++ navegador/intelligence/docgen.py
@@ -103,13 +103,11 @@
103103
store: A :class:`~navegador.graph.GraphStore` instance.
104104
provider: Optional :class:`~navegador.llm.LLMProvider`. When
105105
``None`` (default) template-based generation is used.
106106
"""
107107
108
- def __init__(
109
- self, store: "GraphStore", provider: "LLMProvider | None" = None
110
- ) -> None:
108
+ def __init__(self, store: "GraphStore", provider: "LLMProvider | None" = None) -> None:
111109
self._store = store
112110
self._provider = provider
113111
114112
# ── Public API ────────────────────────────────────────────────────────────
115113
@@ -167,13 +165,11 @@
167165
if not rows:
168166
lines.append("_No symbols found in the graph for this file._")
169167
return "\n".join(lines)
170168
171169
for row in rows:
172
- sym_type, name, docstring, signature, line = (
173
- row[0], row[1], row[2], row[3], row[4]
174
- )
170
+ sym_type, name, docstring, signature, line = (row[0], row[1], row[2], row[3], row[4])
175171
lines.append(f"## {sym_type}: `{name}`")
176172
if line is not None:
177173
lines.append(f"_Line {line}_")
178174
if signature:
179175
lines += ["", f"```python\n{signature}\n```"]
@@ -200,13 +196,11 @@
200196
201197
for fp, file_rows in sorted(files.items()):
202198
lines.append(f"## `{fp}`")
203199
lines.append("")
204200
for row in file_rows:
205
- sym_type, name, _, docstring, signature = (
206
- row[0], row[1], row[2], row[3], row[4]
207
- )
201
+ sym_type, name, _, docstring, signature = (row[0], row[1], row[2], row[3], row[4])
208202
lines.append(f"### {sym_type}: `{name}`")
209203
if signature:
210204
lines += ["", f"```python\n{signature}\n```"]
211205
if docstring:
212206
lines += ["", docstring]
213207
--- navegador/intelligence/docgen.py
+++ navegador/intelligence/docgen.py
@@ -103,13 +103,11 @@
103 store: A :class:`~navegador.graph.GraphStore` instance.
104 provider: Optional :class:`~navegador.llm.LLMProvider`. When
105 ``None`` (default) template-based generation is used.
106 """
107
108 def __init__(
109 self, store: "GraphStore", provider: "LLMProvider | None" = None
110 ) -> None:
111 self._store = store
112 self._provider = provider
113
114 # ── Public API ────────────────────────────────────────────────────────────
115
@@ -167,13 +165,11 @@
167 if not rows:
168 lines.append("_No symbols found in the graph for this file._")
169 return "\n".join(lines)
170
171 for row in rows:
172 sym_type, name, docstring, signature, line = (
173 row[0], row[1], row[2], row[3], row[4]
174 )
175 lines.append(f"## {sym_type}: `{name}`")
176 if line is not None:
177 lines.append(f"_Line {line}_")
178 if signature:
179 lines += ["", f"```python\n{signature}\n```"]
@@ -200,13 +196,11 @@
200
201 for fp, file_rows in sorted(files.items()):
202 lines.append(f"## `{fp}`")
203 lines.append("")
204 for row in file_rows:
205 sym_type, name, _, docstring, signature = (
206 row[0], row[1], row[2], row[3], row[4]
207 )
208 lines.append(f"### {sym_type}: `{name}`")
209 if signature:
210 lines += ["", f"```python\n{signature}\n```"]
211 if docstring:
212 lines += ["", docstring]
213
--- navegador/intelligence/docgen.py
+++ navegador/intelligence/docgen.py
@@ -103,13 +103,11 @@
103 store: A :class:`~navegador.graph.GraphStore` instance.
104 provider: Optional :class:`~navegador.llm.LLMProvider`. When
105 ``None`` (default) template-based generation is used.
106 """
107
108 def __init__(self, store: "GraphStore", provider: "LLMProvider | None" = None) -> None:
 
 
109 self._store = store
110 self._provider = provider
111
112 # ── Public API ────────────────────────────────────────────────────────────
113
@@ -167,13 +165,11 @@
165 if not rows:
166 lines.append("_No symbols found in the graph for this file._")
167 return "\n".join(lines)
168
169 for row in rows:
170 sym_type, name, docstring, signature, line = (row[0], row[1], row[2], row[3], row[4])
 
 
171 lines.append(f"## {sym_type}: `{name}`")
172 if line is not None:
173 lines.append(f"_Line {line}_")
174 if signature:
175 lines += ["", f"```python\n{signature}\n```"]
@@ -200,13 +196,11 @@
196
197 for fp, file_rows in sorted(files.items()):
198 lines.append(f"## `{fp}`")
199 lines.append("")
200 for row in file_rows:
201 sym_type, name, _, docstring, signature = (row[0], row[1], row[2], row[3], row[4])
 
 
202 lines.append(f"### {sym_type}: `{name}`")
203 if signature:
204 lines += ["", f"```python\n{signature}\n```"]
205 if docstring:
206 lines += ["", docstring]
207
--- navegador/intelligence/nlp.py
+++ navegador/intelligence/nlp.py
@@ -128,13 +128,11 @@
128128
129129
Returns:
130130
A human-readable answer string.
131131
"""
132132
# Step 1: translate question → Cypher
133
- cypher_prompt = _NL_TO_CYPHER_PROMPT.format(
134
- schema=_SCHEMA_SUMMARY, question=question
135
- )
133
+ cypher_prompt = _NL_TO_CYPHER_PROMPT.format(schema=_SCHEMA_SUMMARY, question=question)
136134
cypher = self._provider.complete(cypher_prompt).strip()
137135
138136
# Strip any accidental markdown fences the model may still produce
139137
cypher = _strip_fences(cypher)
140138
@@ -142,19 +140,16 @@
142140
try:
143141
result = self._store.query(cypher, {})
144142
rows = result.result_set or []
145143
except Exception as exc: # noqa: BLE001
146144
return (
147
- f"Failed to execute the generated Cypher query.\n\n"
148
- f"Query: {cypher}\n\nError: {exc}"
145
+ f"Failed to execute the generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}"
149146
)
150147
151148
# Step 3: format result
152149
rows_text = json.dumps(rows[:50], indent=2, default=str)
153
- fmt_prompt = _FORMAT_RESULT_PROMPT.format(
154
- question=question, cypher=cypher, rows=rows_text
155
- )
150
+ fmt_prompt = _FORMAT_RESULT_PROMPT.format(question=question, cypher=cypher, rows=rows_text)
156151
return self._provider.complete(fmt_prompt)
157152
158153
# ── Community naming ──────────────────────────────────────────────────
159154
160155
def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]:
@@ -221,26 +216,22 @@
221216
signature = ""
222217
fp = file_path
223218
224219
if rows:
225220
row = rows[0]
226
- node_type, _, fp, docstring, signature = (
227
- row[0], row[1], row[2], row[3], row[4]
228
- )
221
+ node_type, _, fp, docstring, signature = (row[0], row[1], row[2], row[3], row[4])
229222
230223
# Fetch callers
231224
callers_result = self._store.query(
232
- "MATCH (caller)-[:CALLS]->(n {name: $name}) "
233
- "RETURN caller.name LIMIT 10",
225
+ "MATCH (caller)-[:CALLS]->(n {name: $name}) RETURN caller.name LIMIT 10",
234226
{"name": name},
235227
)
236228
callers = [r[0] for r in (callers_result.result_set or []) if r[0]]
237229
238230
# Fetch callees
239231
callees_result = self._store.query(
240
- "MATCH (n {name: $name})-[:CALLS]->(callee) "
241
- "RETURN callee.name LIMIT 10",
232
+ "MATCH (n {name: $name})-[:CALLS]->(callee) RETURN callee.name LIMIT 10",
242233
{"name": name},
243234
)
244235
callees = [r[0] for r in (callees_result.result_set or []) if r[0]]
245236
246237
prompt = _GENERATE_DOCS_PROMPT.format(
247238
--- navegador/intelligence/nlp.py
+++ navegador/intelligence/nlp.py
@@ -128,13 +128,11 @@
128
129 Returns:
130 A human-readable answer string.
131 """
132 # Step 1: translate question → Cypher
133 cypher_prompt = _NL_TO_CYPHER_PROMPT.format(
134 schema=_SCHEMA_SUMMARY, question=question
135 )
136 cypher = self._provider.complete(cypher_prompt).strip()
137
138 # Strip any accidental markdown fences the model may still produce
139 cypher = _strip_fences(cypher)
140
@@ -142,19 +140,16 @@
142 try:
143 result = self._store.query(cypher, {})
144 rows = result.result_set or []
145 except Exception as exc: # noqa: BLE001
146 return (
147 f"Failed to execute the generated Cypher query.\n\n"
148 f"Query: {cypher}\n\nError: {exc}"
149 )
150
151 # Step 3: format result
152 rows_text = json.dumps(rows[:50], indent=2, default=str)
153 fmt_prompt = _FORMAT_RESULT_PROMPT.format(
154 question=question, cypher=cypher, rows=rows_text
155 )
156 return self._provider.complete(fmt_prompt)
157
158 # ── Community naming ──────────────────────────────────────────────────
159
160 def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]:
@@ -221,26 +216,22 @@
221 signature = ""
222 fp = file_path
223
224 if rows:
225 row = rows[0]
226 node_type, _, fp, docstring, signature = (
227 row[0], row[1], row[2], row[3], row[4]
228 )
229
230 # Fetch callers
231 callers_result = self._store.query(
232 "MATCH (caller)-[:CALLS]->(n {name: $name}) "
233 "RETURN caller.name LIMIT 10",
234 {"name": name},
235 )
236 callers = [r[0] for r in (callers_result.result_set or []) if r[0]]
237
238 # Fetch callees
239 callees_result = self._store.query(
240 "MATCH (n {name: $name})-[:CALLS]->(callee) "
241 "RETURN callee.name LIMIT 10",
242 {"name": name},
243 )
244 callees = [r[0] for r in (callees_result.result_set or []) if r[0]]
245
246 prompt = _GENERATE_DOCS_PROMPT.format(
247
--- navegador/intelligence/nlp.py
+++ navegador/intelligence/nlp.py
@@ -128,13 +128,11 @@
128
129 Returns:
130 A human-readable answer string.
131 """
132 # Step 1: translate question → Cypher
133 cypher_prompt = _NL_TO_CYPHER_PROMPT.format(schema=_SCHEMA_SUMMARY, question=question)
 
 
134 cypher = self._provider.complete(cypher_prompt).strip()
135
136 # Strip any accidental markdown fences the model may still produce
137 cypher = _strip_fences(cypher)
138
@@ -142,19 +140,16 @@
140 try:
141 result = self._store.query(cypher, {})
142 rows = result.result_set or []
143 except Exception as exc: # noqa: BLE001
144 return (
145 f"Failed to execute the generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}"
 
146 )
147
148 # Step 3: format result
149 rows_text = json.dumps(rows[:50], indent=2, default=str)
150 fmt_prompt = _FORMAT_RESULT_PROMPT.format(question=question, cypher=cypher, rows=rows_text)
 
 
151 return self._provider.complete(fmt_prompt)
152
153 # ── Community naming ──────────────────────────────────────────────────
154
155 def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]:
@@ -221,26 +216,22 @@
216 signature = ""
217 fp = file_path
218
219 if rows:
220 row = rows[0]
221 node_type, _, fp, docstring, signature = (row[0], row[1], row[2], row[3], row[4])
 
 
222
223 # Fetch callers
224 callers_result = self._store.query(
225 "MATCH (caller)-[:CALLS]->(n {name: $name}) RETURN caller.name LIMIT 10",
 
226 {"name": name},
227 )
228 callers = [r[0] for r in (callers_result.result_set or []) if r[0]]
229
230 # Fetch callees
231 callees_result = self._store.query(
232 "MATCH (n {name: $name})-[:CALLS]->(callee) RETURN callee.name LIMIT 10",
 
233 {"name": name},
234 )
235 callees = [r[0] for r in (callees_result.result_set or []) if r[0]]
236
237 prompt = _GENERATE_DOCS_PROMPT.format(
238
--- navegador/intelligence/search.py
+++ navegador/intelligence/search.py
@@ -139,13 +139,11 @@
139139
result = self._store.query(_NODES_WITH_EMBEDDINGS, {})
140140
rows = result.result_set or []
141141
142142
scored: list[dict[str, Any]] = []
143143
for row in rows:
144
- node_type, name, file_path, text, emb_json = (
145
- row[0], row[1], row[2], row[3], row[4]
146
- )
144
+ node_type, name, file_path, text, emb_json = (row[0], row[1], row[2], row[3], row[4])
147145
if not emb_json:
148146
continue
149147
try:
150148
node_vec: list[float] = json.loads(emb_json)
151149
except (json.JSONDecodeError, TypeError):
152150
--- navegador/intelligence/search.py
+++ navegador/intelligence/search.py
@@ -139,13 +139,11 @@
139 result = self._store.query(_NODES_WITH_EMBEDDINGS, {})
140 rows = result.result_set or []
141
142 scored: list[dict[str, Any]] = []
143 for row in rows:
144 node_type, name, file_path, text, emb_json = (
145 row[0], row[1], row[2], row[3], row[4]
146 )
147 if not emb_json:
148 continue
149 try:
150 node_vec: list[float] = json.loads(emb_json)
151 except (json.JSONDecodeError, TypeError):
152
--- navegador/intelligence/search.py
+++ navegador/intelligence/search.py
@@ -139,13 +139,11 @@
139 result = self._store.query(_NODES_WITH_EMBEDDINGS, {})
140 rows = result.result_set or []
141
142 scored: list[dict[str, Any]] = []
143 for row in rows:
144 node_type, name, file_path, text, emb_json = (row[0], row[1], row[2], row[3], row[4])
 
 
145 if not emb_json:
146 continue
147 try:
148 node_vec: list[float] = json.loads(emb_json)
149 except (json.JSONDecodeError, TypeError):
150
--- navegador/llm.py
+++ navegador/llm.py
@@ -22,11 +22,10 @@
2222
2323
from __future__ import annotations
2424
2525
from abc import ABC, abstractmethod
2626
27
-
2827
# ── Abstract base ─────────────────────────────────────────────────────────────
2928
3029
3130
class LLMProvider(ABC):
3231
"""Abstract interface that every concrete LLM provider must satisfy."""
@@ -293,12 +292,11 @@
293292
ValueError: If *name* does not correspond to a known provider.
294293
ImportError: If the underlying SDK is not installed.
295294
"""
296295
if name not in _PROVIDER_CLASS_MAP:
297296
raise ValueError(
298
- f"Unknown LLM provider: {name!r}. "
299
- f"Valid options are: {sorted(_PROVIDER_CLASS_MAP)}"
297
+ f"Unknown LLM provider: {name!r}. Valid options are: {sorted(_PROVIDER_CLASS_MAP)}"
300298
)
301299
cls = _PROVIDER_CLASS_MAP[name]
302300
return cls(model=model)
303301
304302
305303
--- navegador/llm.py
+++ navegador/llm.py
@@ -22,11 +22,10 @@
22
23 from __future__ import annotations
24
25 from abc import ABC, abstractmethod
26
27
28 # ── Abstract base ─────────────────────────────────────────────────────────────
29
30
31 class LLMProvider(ABC):
32 """Abstract interface that every concrete LLM provider must satisfy."""
@@ -293,12 +292,11 @@
293 ValueError: If *name* does not correspond to a known provider.
294 ImportError: If the underlying SDK is not installed.
295 """
296 if name not in _PROVIDER_CLASS_MAP:
297 raise ValueError(
298 f"Unknown LLM provider: {name!r}. "
299 f"Valid options are: {sorted(_PROVIDER_CLASS_MAP)}"
300 )
301 cls = _PROVIDER_CLASS_MAP[name]
302 return cls(model=model)
303
304
305
--- navegador/llm.py
+++ navegador/llm.py
@@ -22,11 +22,10 @@
22
23 from __future__ import annotations
24
25 from abc import ABC, abstractmethod
26
 
27 # ── Abstract base ─────────────────────────────────────────────────────────────
28
29
30 class LLMProvider(ABC):
31 """Abstract interface that every concrete LLM provider must satisfy."""
@@ -293,12 +292,11 @@
292 ValueError: If *name* does not correspond to a known provider.
293 ImportError: If the underlying SDK is not installed.
294 """
295 if name not in _PROVIDER_CLASS_MAP:
296 raise ValueError(
297 f"Unknown LLM provider: {name!r}. Valid options are: {sorted(_PROVIDER_CLASS_MAP)}"
 
298 )
299 cls = _PROVIDER_CLASS_MAP[name]
300 return cls(model=model)
301
302
303
--- navegador/mcp/security.py
+++ navegador/mcp/security.py
@@ -65,25 +65,19 @@
6565
6666
# Check for write-operation keywords as whole words
6767
for kw in _WRITE_KEYWORDS:
6868
pattern = re.compile(rf"\b{kw}\b")
6969
if pattern.search(upper):
70
- raise QueryValidationError(
71
- f"Write operation '{kw}' is not allowed in read-only mode."
72
- )
70
+ raise QueryValidationError(f"Write operation '{kw}' is not allowed in read-only mode.")
7371
7472
# Check for CALL procedure injection
7573
if _CALL_RE.search(stripped):
76
- raise QueryValidationError(
77
- "CALL procedures are not allowed in read-only mode."
78
- )
74
+ raise QueryValidationError("CALL procedures are not allowed in read-only mode.")
7975
8076
# Check for nested / sub-query patterns
8177
if _SUBQUERY_RE.search(stripped):
82
- raise QueryValidationError(
83
- "Nested sub-queries are not allowed in read-only mode."
84
- )
78
+ raise QueryValidationError("Nested sub-queries are not allowed in read-only mode.")
8579
8680
8781
def check_complexity(
8882
query: str,
8983
max_depth: int = 5,
9084
--- navegador/mcp/security.py
+++ navegador/mcp/security.py
@@ -65,25 +65,19 @@
65
66 # Check for write-operation keywords as whole words
67 for kw in _WRITE_KEYWORDS:
68 pattern = re.compile(rf"\b{kw}\b")
69 if pattern.search(upper):
70 raise QueryValidationError(
71 f"Write operation '{kw}' is not allowed in read-only mode."
72 )
73
74 # Check for CALL procedure injection
75 if _CALL_RE.search(stripped):
76 raise QueryValidationError(
77 "CALL procedures are not allowed in read-only mode."
78 )
79
80 # Check for nested / sub-query patterns
81 if _SUBQUERY_RE.search(stripped):
82 raise QueryValidationError(
83 "Nested sub-queries are not allowed in read-only mode."
84 )
85
86
87 def check_complexity(
88 query: str,
89 max_depth: int = 5,
90
--- navegador/mcp/security.py
+++ navegador/mcp/security.py
@@ -65,25 +65,19 @@
65
66 # Check for write-operation keywords as whole words
67 for kw in _WRITE_KEYWORDS:
68 pattern = re.compile(rf"\b{kw}\b")
69 if pattern.search(upper):
70 raise QueryValidationError(f"Write operation '{kw}' is not allowed in read-only mode.")
 
 
71
72 # Check for CALL procedure injection
73 if _CALL_RE.search(stripped):
74 raise QueryValidationError("CALL procedures are not allowed in read-only mode.")
 
 
75
76 # Check for nested / sub-query patterns
77 if _SUBQUERY_RE.search(stripped):
78 raise QueryValidationError("Nested sub-queries are not allowed in read-only mode.")
 
 
79
80
81 def check_complexity(
82 query: str,
83 max_depth: int = 5,
84
--- navegador/mcp/server.py
+++ navegador/mcp/server.py
@@ -143,11 +143,11 @@
143143
description="Return node and edge counts for the current graph.",
144144
inputSchema={"type": "object", "properties": {}},
145145
),
146146
Tool(
147147
name="get_rationale",
148
- description="Return the rationale, alternatives, and status of an architectural decision.",
148
+ description="Return rationale, alternatives, and status of a decision.",
149149
inputSchema={
150150
"type": "object",
151151
"properties": {
152152
"name": {"type": "string", "description": "Decision name."},
153153
"format": {
@@ -175,11 +175,11 @@
175175
"required": ["name"],
176176
},
177177
),
178178
Tool(
179179
name="search_knowledge",
180
- description="Search concepts, rules, decisions, and wiki pages by name or description.",
180
+ description="Search concepts, rules, decisions, and wiki pages.",
181181
inputSchema={
182182
"type": "object",
183183
"properties": {
184184
"query": {"type": "string", "description": "Search query."},
185185
"limit": {"type": "integer", "default": 20},
@@ -217,14 +217,16 @@
217217
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
218218
loader = _get_loader()
219219
220220
if name == "ingest_repo":
221221
if read_only:
222
- return [TextContent(
223
- type="text",
224
- text="Error: ingest_repo is disabled in read-only mode.",
225
- )]
222
+ return [
223
+ TextContent(
224
+ type="text",
225
+ text="Error: ingest_repo is disabled in read-only mode.",
226
+ )
227
+ ]
226228
from navegador.ingestion import RepoIngester
227229
228230
ingester = RepoIngester(loader.store)
229231
stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False))
230232
return [TextContent(type="text", text=json.dumps(stats, indent=2))]
@@ -289,25 +291,18 @@
289291
results = loader.find_owners(
290292
arguments["name"], file_path=arguments.get("file_path", "")
291293
)
292294
if not results:
293295
return [TextContent(type="text", text="No owners found.")]
294
- lines = [
295
- f"- **{r.name}** ({r.description})" for r in results
296
- ]
296
+ lines = [f"- **{r.name}** ({r.description})" for r in results]
297297
return [TextContent(type="text", text="\n".join(lines))]
298298
299299
elif name == "search_knowledge":
300
- results = loader.search_knowledge(
301
- arguments["query"], limit=arguments.get("limit", 20)
302
- )
300
+ results = loader.search_knowledge(arguments["query"], limit=arguments.get("limit", 20))
303301
if not results:
304302
return [TextContent(type="text", text="No results.")]
305
- lines = [
306
- f"- **{r.type}** `{r.name}` — {r.description or ''}"
307
- for r in results
308
- ]
303
+ lines = [f"- **{r.type}** `{r.name}` — {r.description or ''}" for r in results]
309304
return [TextContent(type="text", text="\n".join(lines))]
310305
311306
elif name == "blast_radius":
312307
from navegador.analysis.impact import ImpactAnalyzer
313308
314309
--- navegador/mcp/server.py
+++ navegador/mcp/server.py
@@ -143,11 +143,11 @@
143 description="Return node and edge counts for the current graph.",
144 inputSchema={"type": "object", "properties": {}},
145 ),
146 Tool(
147 name="get_rationale",
148 description="Return the rationale, alternatives, and status of an architectural decision.",
149 inputSchema={
150 "type": "object",
151 "properties": {
152 "name": {"type": "string", "description": "Decision name."},
153 "format": {
@@ -175,11 +175,11 @@
175 "required": ["name"],
176 },
177 ),
178 Tool(
179 name="search_knowledge",
180 description="Search concepts, rules, decisions, and wiki pages by name or description.",
181 inputSchema={
182 "type": "object",
183 "properties": {
184 "query": {"type": "string", "description": "Search query."},
185 "limit": {"type": "integer", "default": 20},
@@ -217,14 +217,16 @@
217 async def call_tool(name: str, arguments: dict) -> list[TextContent]:
218 loader = _get_loader()
219
220 if name == "ingest_repo":
221 if read_only:
222 return [TextContent(
223 type="text",
224 text="Error: ingest_repo is disabled in read-only mode.",
225 )]
 
 
226 from navegador.ingestion import RepoIngester
227
228 ingester = RepoIngester(loader.store)
229 stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False))
230 return [TextContent(type="text", text=json.dumps(stats, indent=2))]
@@ -289,25 +291,18 @@
289 results = loader.find_owners(
290 arguments["name"], file_path=arguments.get("file_path", "")
291 )
292 if not results:
293 return [TextContent(type="text", text="No owners found.")]
294 lines = [
295 f"- **{r.name}** ({r.description})" for r in results
296 ]
297 return [TextContent(type="text", text="\n".join(lines))]
298
299 elif name == "search_knowledge":
300 results = loader.search_knowledge(
301 arguments["query"], limit=arguments.get("limit", 20)
302 )
303 if not results:
304 return [TextContent(type="text", text="No results.")]
305 lines = [
306 f"- **{r.type}** `{r.name}` — {r.description or ''}"
307 for r in results
308 ]
309 return [TextContent(type="text", text="\n".join(lines))]
310
311 elif name == "blast_radius":
312 from navegador.analysis.impact import ImpactAnalyzer
313
314
--- navegador/mcp/server.py
+++ navegador/mcp/server.py
@@ -143,11 +143,11 @@
143 description="Return node and edge counts for the current graph.",
144 inputSchema={"type": "object", "properties": {}},
145 ),
146 Tool(
147 name="get_rationale",
148 description="Return rationale, alternatives, and status of a decision.",
149 inputSchema={
150 "type": "object",
151 "properties": {
152 "name": {"type": "string", "description": "Decision name."},
153 "format": {
@@ -175,11 +175,11 @@
175 "required": ["name"],
176 },
177 ),
178 Tool(
179 name="search_knowledge",
180 description="Search concepts, rules, decisions, and wiki pages.",
181 inputSchema={
182 "type": "object",
183 "properties": {
184 "query": {"type": "string", "description": "Search query."},
185 "limit": {"type": "integer", "default": 20},
@@ -217,14 +217,16 @@
217 async def call_tool(name: str, arguments: dict) -> list[TextContent]:
218 loader = _get_loader()
219
220 if name == "ingest_repo":
221 if read_only:
222 return [
223 TextContent(
224 type="text",
225 text="Error: ingest_repo is disabled in read-only mode.",
226 )
227 ]
228 from navegador.ingestion import RepoIngester
229
230 ingester = RepoIngester(loader.store)
231 stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False))
232 return [TextContent(type="text", text=json.dumps(stats, indent=2))]
@@ -289,25 +291,18 @@
291 results = loader.find_owners(
292 arguments["name"], file_path=arguments.get("file_path", "")
293 )
294 if not results:
295 return [TextContent(type="text", text="No owners found.")]
296 lines = [f"- **{r.name}** ({r.description})" for r in results]
 
 
297 return [TextContent(type="text", text="\n".join(lines))]
298
299 elif name == "search_knowledge":
300 results = loader.search_knowledge(arguments["query"], limit=arguments.get("limit", 20))
 
 
301 if not results:
302 return [TextContent(type="text", text="No results.")]
303 lines = [f"- **{r.type}** `{r.name}` — {r.description or ''}" for r in results]
 
 
 
304 return [TextContent(type="text", text="\n".join(lines))]
305
306 elif name == "blast_radius":
307 from navegador.analysis.impact import ImpactAnalyzer
308
309
--- navegador/monorepo.py
+++ navegador/monorepo.py
@@ -14,11 +14,11 @@
1414
from __future__ import annotations
1515
1616
import fnmatch
1717
import json
1818
import logging
19
-from dataclasses import dataclass, field
19
+from dataclasses import dataclass
2020
from pathlib import Path
2121
from typing import Any
2222
2323
from navegador.graph.schema import EdgeType, NodeLabel
2424
from navegador.graph.store import GraphStore
@@ -136,13 +136,13 @@
136136
or declare them in nx.json under "projects".
137137
"""
138138
# Try reading nx.json for explicit projects
139139
nx_json = root / "nx.json"
140140
try:
141
- data = json.loads(nx_json.read_text(encoding="utf-8"))
141
+ json.loads(nx_json.read_text(encoding="utf-8"))
142142
except (OSError, json.JSONDecodeError):
143
- data = {}
143
+ pass
144144
145145
# Nx 16+ uses workspaceLayout or projects in project.json files
146146
packages: list[Path] = []
147147
for subdir in ("apps", "libs", "packages"):
148148
base = root / subdir
@@ -447,13 +447,11 @@
447447
to_label=NodeLabel.Repository,
448448
to_key={"name": target},
449449
)
450450
edges_created += 1
451451
except Exception:
452
- logger.debug(
453
- "Could not create DEPENDS_ON edge %s → %s", pkg_name, target
454
- )
452
+ logger.debug("Could not create DEPENDS_ON edge %s → %s", pkg_name, target)
455453
456454
return edges_created
457455
458456
def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]:
459457
"""Return a flat list of declared dependency names for a package."""
460458
--- navegador/monorepo.py
+++ navegador/monorepo.py
@@ -14,11 +14,11 @@
14 from __future__ import annotations
15
16 import fnmatch
17 import json
18 import logging
19 from dataclasses import dataclass, field
20 from pathlib import Path
21 from typing import Any
22
23 from navegador.graph.schema import EdgeType, NodeLabel
24 from navegador.graph.store import GraphStore
@@ -136,13 +136,13 @@
136 or declare them in nx.json under "projects".
137 """
138 # Try reading nx.json for explicit projects
139 nx_json = root / "nx.json"
140 try:
141 data = json.loads(nx_json.read_text(encoding="utf-8"))
142 except (OSError, json.JSONDecodeError):
143 data = {}
144
145 # Nx 16+ uses workspaceLayout or projects in project.json files
146 packages: list[Path] = []
147 for subdir in ("apps", "libs", "packages"):
148 base = root / subdir
@@ -447,13 +447,11 @@
447 to_label=NodeLabel.Repository,
448 to_key={"name": target},
449 )
450 edges_created += 1
451 except Exception:
452 logger.debug(
453 "Could not create DEPENDS_ON edge %s → %s", pkg_name, target
454 )
455
456 return edges_created
457
458 def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]:
459 """Return a flat list of declared dependency names for a package."""
460
--- navegador/monorepo.py
+++ navegador/monorepo.py
@@ -14,11 +14,11 @@
14 from __future__ import annotations
15
16 import fnmatch
17 import json
18 import logging
19 from dataclasses import dataclass
20 from pathlib import Path
21 from typing import Any
22
23 from navegador.graph.schema import EdgeType, NodeLabel
24 from navegador.graph.store import GraphStore
@@ -136,13 +136,13 @@
136 or declare them in nx.json under "projects".
137 """
138 # Try reading nx.json for explicit projects
139 nx_json = root / "nx.json"
140 try:
141 json.loads(nx_json.read_text(encoding="utf-8"))
142 except (OSError, json.JSONDecodeError):
143 pass
144
145 # Nx 16+ uses workspaceLayout or projects in project.json files
146 packages: list[Path] = []
147 for subdir in ("apps", "libs", "packages"):
148 base = root / subdir
@@ -447,13 +447,11 @@
447 to_label=NodeLabel.Repository,
448 to_key={"name": target},
449 )
450 edges_created += 1
451 except Exception:
452 logger.debug("Could not create DEPENDS_ON edge %s → %s", pkg_name, target)
 
 
453
454 return edges_created
455
456 def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]:
457 """Return a flat list of declared dependency names for a package."""
458
--- navegador/multirepo.py
+++ navegador/multirepo.py
@@ -27,17 +27,16 @@
2727
results = ws_fed.search("authenticate")
2828
"""
2929
3030
from __future__ import annotations
3131
32
-import json
3332
import logging
3433
from enum import Enum
3534
from pathlib import Path
3635
from typing import Any
3736
38
-from navegador.graph.schema import EdgeType, NodeLabel
37
+from navegador.graph.schema import NodeLabel
3938
from navegador.graph.store import GraphStore
4039
4140
logger = logging.getLogger(__name__)
4241
4342
# Key used to store repo registry as a special node in the graph
@@ -103,13 +102,11 @@
103102
"description": f"workspace:{self.mode.value}",
104103
"language": "",
105104
"file_path": resolved,
106105
},
107106
)
108
- logger.info(
109
- "WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved
110
- )
107
+ logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved)
111108
112109
def list_repos(self) -> list[dict[str, str]]:
113110
"""Return all registered repositories."""
114111
return [
115112
{"name": name, "path": info["path"], "graph_name": info["graph_name"]}
@@ -153,13 +150,11 @@
153150
try:
154151
ingester = RepoIngester(target_store)
155152
stats = ingester.ingest(path, clear=False)
156153
summary[name] = stats
157154
except Exception as exc: # noqa: BLE001
158
- logger.error(
159
- "WorkspaceManager: failed to ingest %s: %s", name, exc
160
- )
155
+ logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc)
161156
summary[name] = {"error": str(exc)}
162157
163158
return summary
164159
165160
# ── Search ────────────────────────────────────────────────────────────────
@@ -191,13 +186,11 @@
191186
if key not in seen:
192187
seen.add(key)
193188
r["repo"] = name
194189
all_results.append(r)
195190
except Exception:
196
- logger.debug(
197
- "WorkspaceManager: search failed for repo %s", name, exc_info=True
198
- )
191
+ logger.debug("WorkspaceManager: search failed for repo %s", name, exc_info=True)
199192
200193
return all_results[:limit]
201194
202195
# ── Helpers ───────────────────────────────────────────────────────────────
203196
@@ -263,13 +256,11 @@
263256
264257
# ── Query ─────────────────────────────────────────────────────────────────
265258
266259
def list_repos(self) -> list[dict[str, Any]]:
267260
"""Return all registered repositories."""
268
- result = self.store.query(
269
- "MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name"
270
- )
261
+ result = self.store.query("MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name")
271262
rows = result.result_set or []
272263
return [{"name": row[0], "path": row[1]} for row in rows]
273264
274265
# ── Ingestion ─────────────────────────────────────────────────────────────
275266
@@ -320,9 +311,6 @@
320311
" coalesce(n.file_path, n.path, '') AS file_path "
321312
f"LIMIT {int(limit)}"
322313
)
323314
result = self.store.query(cypher, {"q": query})
324315
rows = result.result_set or []
325
- return [
326
- {"label": row[0], "name": row[1], "file_path": row[2]}
327
- for row in rows
328
- ]
316
+ return [{"label": row[0], "name": row[1], "file_path": row[2]} for row in rows]
329317
--- navegador/multirepo.py
+++ navegador/multirepo.py
@@ -27,17 +27,16 @@
27 results = ws_fed.search("authenticate")
28 """
29
30 from __future__ import annotations
31
32 import json
33 import logging
34 from enum import Enum
35 from pathlib import Path
36 from typing import Any
37
38 from navegador.graph.schema import EdgeType, NodeLabel
39 from navegador.graph.store import GraphStore
40
41 logger = logging.getLogger(__name__)
42
43 # Key used to store repo registry as a special node in the graph
@@ -103,13 +102,11 @@
103 "description": f"workspace:{self.mode.value}",
104 "language": "",
105 "file_path": resolved,
106 },
107 )
108 logger.info(
109 "WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved
110 )
111
112 def list_repos(self) -> list[dict[str, str]]:
113 """Return all registered repositories."""
114 return [
115 {"name": name, "path": info["path"], "graph_name": info["graph_name"]}
@@ -153,13 +150,11 @@
153 try:
154 ingester = RepoIngester(target_store)
155 stats = ingester.ingest(path, clear=False)
156 summary[name] = stats
157 except Exception as exc: # noqa: BLE001
158 logger.error(
159 "WorkspaceManager: failed to ingest %s: %s", name, exc
160 )
161 summary[name] = {"error": str(exc)}
162
163 return summary
164
165 # ── Search ────────────────────────────────────────────────────────────────
@@ -191,13 +186,11 @@
191 if key not in seen:
192 seen.add(key)
193 r["repo"] = name
194 all_results.append(r)
195 except Exception:
196 logger.debug(
197 "WorkspaceManager: search failed for repo %s", name, exc_info=True
198 )
199
200 return all_results[:limit]
201
202 # ── Helpers ───────────────────────────────────────────────────────────────
203
@@ -263,13 +256,11 @@
263
264 # ── Query ─────────────────────────────────────────────────────────────────
265
266 def list_repos(self) -> list[dict[str, Any]]:
267 """Return all registered repositories."""
268 result = self.store.query(
269 "MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name"
270 )
271 rows = result.result_set or []
272 return [{"name": row[0], "path": row[1]} for row in rows]
273
274 # ── Ingestion ─────────────────────────────────────────────────────────────
275
@@ -320,9 +311,6 @@
320 " coalesce(n.file_path, n.path, '') AS file_path "
321 f"LIMIT {int(limit)}"
322 )
323 result = self.store.query(cypher, {"q": query})
324 rows = result.result_set or []
325 return [
326 {"label": row[0], "name": row[1], "file_path": row[2]}
327 for row in rows
328 ]
329
--- navegador/multirepo.py
+++ navegador/multirepo.py
@@ -27,17 +27,16 @@
27 results = ws_fed.search("authenticate")
28 """
29
30 from __future__ import annotations
31
 
32 import logging
33 from enum import Enum
34 from pathlib import Path
35 from typing import Any
36
37 from navegador.graph.schema import NodeLabel
38 from navegador.graph.store import GraphStore
39
40 logger = logging.getLogger(__name__)
41
42 # Key used to store repo registry as a special node in the graph
@@ -103,13 +102,11 @@
102 "description": f"workspace:{self.mode.value}",
103 "language": "",
104 "file_path": resolved,
105 },
106 )
107 logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved)
 
 
108
109 def list_repos(self) -> list[dict[str, str]]:
110 """Return all registered repositories."""
111 return [
112 {"name": name, "path": info["path"], "graph_name": info["graph_name"]}
@@ -153,13 +150,11 @@
150 try:
151 ingester = RepoIngester(target_store)
152 stats = ingester.ingest(path, clear=False)
153 summary[name] = stats
154 except Exception as exc: # noqa: BLE001
155 logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc)
 
 
156 summary[name] = {"error": str(exc)}
157
158 return summary
159
160 # ── Search ────────────────────────────────────────────────────────────────
@@ -191,13 +186,11 @@
186 if key not in seen:
187 seen.add(key)
188 r["repo"] = name
189 all_results.append(r)
190 except Exception:
191 logger.debug("WorkspaceManager: search failed for repo %s", name, exc_info=True)
 
 
192
193 return all_results[:limit]
194
195 # ── Helpers ───────────────────────────────────────────────────────────────
196
@@ -263,13 +256,11 @@
256
257 # ── Query ─────────────────────────────────────────────────────────────────
258
259 def list_repos(self) -> list[dict[str, Any]]:
260 """Return all registered repositories."""
261 result = self.store.query("MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name")
 
 
262 rows = result.result_set or []
263 return [{"name": row[0], "path": row[1]} for row in rows]
264
265 # ── Ingestion ─────────────────────────────────────────────────────────────
266
@@ -320,9 +311,6 @@
311 " coalesce(n.file_path, n.path, '') AS file_path "
312 f"LIMIT {int(limit)}"
313 )
314 result = self.store.query(cypher, {"q": query})
315 rows = result.result_set or []
316 return [{"label": row[0], "name": row[1], "file_path": row[2]} for row in rows]
 
 
 
317
--- navegador/planopticon_pipeline.py
+++ navegador/planopticon_pipeline.py
@@ -18,15 +18,14 @@
1818
"""
1919
2020
from __future__ import annotations
2121
2222
import logging
23
-from dataclasses import dataclass, field
23
+from dataclasses import dataclass
2424
from pathlib import Path
2525
from typing import Any
2626
27
-from navegador.graph.schema import EdgeType, NodeLabel
2827
from navegador.graph.store import GraphStore
2928
3029
logger = logging.getLogger(__name__)
3130
3231
@@ -135,13 +134,13 @@
135134
knowledge_graph.json, or any combination that may contain an
136135
``action_items`` list or ``entities``/``nodes`` with task types.
137136
"""
138137
items: list[ActionItem] = []
139138
140
- source = kg_data.get("video", {}).get("title", "") or kg_data.get(
141
- "project", {}
142
- ).get("name", "")
139
+ source = kg_data.get("video", {}).get("title", "") or kg_data.get("project", {}).get(
140
+ "name", ""
141
+ )
143142
144143
# Explicit action_items list (manifest format)
145144
for raw in kg_data.get("action_items", []):
146145
action = (raw.get("action") or "").strip()
147146
if not action:
@@ -201,18 +200,20 @@
201200
logger.warning("build_decision_timeline: query failed", exc_info=True)
202201
return []
203202
204203
timeline = []
205204
for row in rows:
206
- timeline.append({
207
- "name": row[0] or "",
208
- "description": row[1] or "",
209
- "domain": row[2] or "",
210
- "status": row[3] or "",
211
- "rationale": row[4] or "",
212
- "date": row[5] or "",
213
- })
205
+ timeline.append(
206
+ {
207
+ "name": row[0] or "",
208
+ "description": row[1] or "",
209
+ "domain": row[2] or "",
210
+ "status": row[3] or "",
211
+ "rationale": row[4] or "",
212
+ "date": row[5] or "",
213
+ }
214
+ )
214215
return timeline
215216
216217
# ── Auto-link to code ─────────────────────────────────────────────────────
217218
218219
@staticmethod
@@ -230,18 +231,14 @@
230231
-------
231232
int — number of new ANNOTATES edges created
232233
"""
233234
# Fetch all knowledge nodes
234235
knowledge_cypher = (
235
- "MATCH (k) "
236
- "WHERE k:Concept OR k:Decision OR k:Rule "
237
- "RETURN labels(k)[0], k.name"
236
+ "MATCH (k) WHERE k:Concept OR k:Decision OR k:Rule RETURN labels(k)[0], k.name"
238237
)
239238
code_cypher = (
240
- "MATCH (c) "
241
- "WHERE c:Function OR c:Class OR c:Method "
242
- "RETURN labels(c)[0], c.name"
239
+ "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name"
243240
)
244241
245242
try:
246243
k_result = store.query(knowledge_cypher)
247244
c_result = store.query(code_cypher)
@@ -248,53 +245,41 @@
248245
except Exception:
249246
logger.warning("auto_link_to_code: initial queries failed", exc_info=True)
250247
return 0
251248
252249
knowledge_nodes: list[tuple[str, str]] = [
253
- (str(row[0]), str(row[1]))
254
- for row in (k_result.result_set or [])
255
- if row[0] and row[1]
250
+ (str(row[0]), str(row[1])) for row in (k_result.result_set or []) if row[0] and row[1]
256251
]
257252
code_nodes: list[tuple[str, str]] = [
258
- (str(row[0]), str(row[1]))
259
- for row in (c_result.result_set or [])
260
- if row[0] and row[1]
253
+ (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1]
261254
]
262255
263256
if not knowledge_nodes or not code_nodes:
264257
return 0
265258
266259
linked = 0
267260
for k_label, k_name in knowledge_nodes:
268261
# Extract significant tokens (length >= 4) from the knowledge name
269262
tokens = [
270
- w.lower()
271
- for w in k_name.replace("_", " ").replace("-", " ").split()
272
- if len(w) >= 4
263
+ w.lower() for w in k_name.replace("_", " ").replace("-", " ").split() if len(w) >= 4
273264
]
274265
if not tokens:
275266
continue
276267
277268
for c_label, c_name in code_nodes:
278269
c_lower = c_name.lower()
279270
if any(tok in c_lower for tok in tokens):
280271
# Create ANNOTATES edge from knowledge node to code node
281272
cypher = (
282
- "MATCH (k:"
283
- + k_label
284
- + " {name: $kn}), (c:"
285
- + c_label
286
- + " {name: $cn}) "
273
+ "MATCH (k:" + k_label + " {name: $kn}), (c:" + c_label + " {name: $cn}) "
287274
"MERGE (k)-[r:ANNOTATES]->(c)"
288275
)
289276
try:
290277
store.query(cypher, {"kn": k_name, "cn": c_name})
291278
linked += 1
292279
except Exception:
293
- logger.debug(
294
- "auto_link_to_code: could not link %s → %s", k_name, c_name
295
- )
280
+ logger.debug("auto_link_to_code: could not link %s → %s", k_name, c_name)
296281
297282
return linked
298283
299284
# ── Helpers ───────────────────────────────────────────────────────────────
300285
301286
--- navegador/planopticon_pipeline.py
+++ navegador/planopticon_pipeline.py
@@ -18,15 +18,14 @@
18 """
19
20 from __future__ import annotations
21
22 import logging
23 from dataclasses import dataclass, field
24 from pathlib import Path
25 from typing import Any
26
27 from navegador.graph.schema import EdgeType, NodeLabel
28 from navegador.graph.store import GraphStore
29
30 logger = logging.getLogger(__name__)
31
32
@@ -135,13 +134,13 @@
135 knowledge_graph.json, or any combination that may contain an
136 ``action_items`` list or ``entities``/``nodes`` with task types.
137 """
138 items: list[ActionItem] = []
139
140 source = kg_data.get("video", {}).get("title", "") or kg_data.get(
141 "project", {}
142 ).get("name", "")
143
144 # Explicit action_items list (manifest format)
145 for raw in kg_data.get("action_items", []):
146 action = (raw.get("action") or "").strip()
147 if not action:
@@ -201,18 +200,20 @@
201 logger.warning("build_decision_timeline: query failed", exc_info=True)
202 return []
203
204 timeline = []
205 for row in rows:
206 timeline.append({
207 "name": row[0] or "",
208 "description": row[1] or "",
209 "domain": row[2] or "",
210 "status": row[3] or "",
211 "rationale": row[4] or "",
212 "date": row[5] or "",
213 })
 
 
214 return timeline
215
216 # ── Auto-link to code ─────────────────────────────────────────────────────
217
218 @staticmethod
@@ -230,18 +231,14 @@
230 -------
231 int — number of new ANNOTATES edges created
232 """
233 # Fetch all knowledge nodes
234 knowledge_cypher = (
235 "MATCH (k) "
236 "WHERE k:Concept OR k:Decision OR k:Rule "
237 "RETURN labels(k)[0], k.name"
238 )
239 code_cypher = (
240 "MATCH (c) "
241 "WHERE c:Function OR c:Class OR c:Method "
242 "RETURN labels(c)[0], c.name"
243 )
244
245 try:
246 k_result = store.query(knowledge_cypher)
247 c_result = store.query(code_cypher)
@@ -248,53 +245,41 @@
248 except Exception:
249 logger.warning("auto_link_to_code: initial queries failed", exc_info=True)
250 return 0
251
252 knowledge_nodes: list[tuple[str, str]] = [
253 (str(row[0]), str(row[1]))
254 for row in (k_result.result_set or [])
255 if row[0] and row[1]
256 ]
257 code_nodes: list[tuple[str, str]] = [
258 (str(row[0]), str(row[1]))
259 for row in (c_result.result_set or [])
260 if row[0] and row[1]
261 ]
262
263 if not knowledge_nodes or not code_nodes:
264 return 0
265
266 linked = 0
267 for k_label, k_name in knowledge_nodes:
268 # Extract significant tokens (length >= 4) from the knowledge name
269 tokens = [
270 w.lower()
271 for w in k_name.replace("_", " ").replace("-", " ").split()
272 if len(w) >= 4
273 ]
274 if not tokens:
275 continue
276
277 for c_label, c_name in code_nodes:
278 c_lower = c_name.lower()
279 if any(tok in c_lower for tok in tokens):
280 # Create ANNOTATES edge from knowledge node to code node
281 cypher = (
282 "MATCH (k:"
283 + k_label
284 + " {name: $kn}), (c:"
285 + c_label
286 + " {name: $cn}) "
287 "MERGE (k)-[r:ANNOTATES]->(c)"
288 )
289 try:
290 store.query(cypher, {"kn": k_name, "cn": c_name})
291 linked += 1
292 except Exception:
293 logger.debug(
294 "auto_link_to_code: could not link %s → %s", k_name, c_name
295 )
296
297 return linked
298
299 # ── Helpers ───────────────────────────────────────────────────────────────
300
301
--- navegador/planopticon_pipeline.py
+++ navegador/planopticon_pipeline.py
@@ -18,15 +18,14 @@
18 """
19
20 from __future__ import annotations
21
22 import logging
23 from dataclasses import dataclass
24 from pathlib import Path
25 from typing import Any
26
 
27 from navegador.graph.store import GraphStore
28
29 logger = logging.getLogger(__name__)
30
31
@@ -135,13 +134,13 @@
134 knowledge_graph.json, or any combination that may contain an
135 ``action_items`` list or ``entities``/``nodes`` with task types.
136 """
137 items: list[ActionItem] = []
138
139 source = kg_data.get("video", {}).get("title", "") or kg_data.get("project", {}).get(
140 "name", ""
141 )
142
143 # Explicit action_items list (manifest format)
144 for raw in kg_data.get("action_items", []):
145 action = (raw.get("action") or "").strip()
146 if not action:
@@ -201,18 +200,20 @@
200 logger.warning("build_decision_timeline: query failed", exc_info=True)
201 return []
202
203 timeline = []
204 for row in rows:
205 timeline.append(
206 {
207 "name": row[0] or "",
208 "description": row[1] or "",
209 "domain": row[2] or "",
210 "status": row[3] or "",
211 "rationale": row[4] or "",
212 "date": row[5] or "",
213 }
214 )
215 return timeline
216
217 # ── Auto-link to code ─────────────────────────────────────────────────────
218
219 @staticmethod
@@ -230,18 +231,14 @@
231 -------
232 int — number of new ANNOTATES edges created
233 """
234 # Fetch all knowledge nodes
235 knowledge_cypher = (
236 "MATCH (k) WHERE k:Concept OR k:Decision OR k:Rule RETURN labels(k)[0], k.name"
 
 
237 )
238 code_cypher = (
239 "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name"
 
 
240 )
241
242 try:
243 k_result = store.query(knowledge_cypher)
244 c_result = store.query(code_cypher)
@@ -248,53 +245,41 @@
245 except Exception:
246 logger.warning("auto_link_to_code: initial queries failed", exc_info=True)
247 return 0
248
249 knowledge_nodes: list[tuple[str, str]] = [
250 (str(row[0]), str(row[1])) for row in (k_result.result_set or []) if row[0] and row[1]
 
 
251 ]
252 code_nodes: list[tuple[str, str]] = [
253 (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1]
 
 
254 ]
255
256 if not knowledge_nodes or not code_nodes:
257 return 0
258
259 linked = 0
260 for k_label, k_name in knowledge_nodes:
261 # Extract significant tokens (length >= 4) from the knowledge name
262 tokens = [
263 w.lower() for w in k_name.replace("_", " ").replace("-", " ").split() if len(w) >= 4
 
 
264 ]
265 if not tokens:
266 continue
267
268 for c_label, c_name in code_nodes:
269 c_lower = c_name.lower()
270 if any(tok in c_lower for tok in tokens):
271 # Create ANNOTATES edge from knowledge node to code node
272 cypher = (
273 "MATCH (k:" + k_label + " {name: $kn}), (c:" + c_label + " {name: $cn}) "
 
 
 
 
274 "MERGE (k)-[r:ANNOTATES]->(c)"
275 )
276 try:
277 store.query(cypher, {"kn": k_name, "cn": c_name})
278 linked += 1
279 except Exception:
280 logger.debug("auto_link_to_code: could not link %s → %s", k_name, c_name)
 
 
281
282 return linked
283
284 # ── Helpers ───────────────────────────────────────────────────────────────
285
286
+8 -27
--- navegador/pm.py
+++ navegador/pm.py
@@ -101,14 +101,11 @@
101101
}
102102
if token:
103103
headers["Authorization"] = f"Bearer {token}"
104104
105105
per_page = min(limit, 100)
106
- url = (
107
- f"https://api.github.com/repos/{repo}/issues"
108
- f"?state={state}&per_page={per_page}&page=1"
109
- )
106
+ url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1"
110107
111108
try:
112109
req = urllib.request.Request(url, headers=headers)
113110
with urllib.request.urlopen(req, timeout=15) as resp:
114111
import json
@@ -226,17 +223,13 @@
226223
227224
Returns
228225
-------
229226
int — number of edges created
230227
"""
231
- ticket_cypher = (
232
- "MATCH (t:Rule) WHERE t.domain = $domain "
233
- "RETURN t.name, t.description"
234
- )
228
+ ticket_cypher = "MATCH (t:Rule) WHERE t.domain = $domain RETURN t.name, t.description"
235229
code_cypher = (
236
- "MATCH (c) WHERE c:Function OR c:Class OR c:Method "
237
- "RETURN labels(c)[0], c.name"
230
+ "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name"
238231
)
239232
240233
try:
241234
t_result = self.store.query(ticket_cypher, {"domain": domain})
242235
c_result = self.store.query(code_cypher)
@@ -243,49 +236,37 @@
243236
except Exception:
244237
logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True)
245238
return 0
246239
247240
tickets = [
248
- (str(row[0]), str(row[1] or ""))
249
- for row in (t_result.result_set or [])
250
- if row[0]
241
+ (str(row[0]), str(row[1] or "")) for row in (t_result.result_set or []) if row[0]
251242
]
252243
code_nodes = [
253
- (str(row[0]), str(row[1]))
254
- for row in (c_result.result_set or [])
255
- if row[0] and row[1]
244
+ (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1]
256245
]
257246
258247
if not tickets or not code_nodes:
259248
return 0
260249
261250
linked = 0
262251
for t_name, t_desc in tickets:
263252
combined = f"{t_name} {t_desc}"
264
- tokens = {
265
- w.lower()
266
- for w in re.split(r"[\s\W]+", combined)
267
- if len(w) >= 4
268
- }
253
+ tokens = {w.lower() for w in re.split(r"[\s\W]+", combined) if len(w) >= 4}
269254
if not tokens:
270255
continue
271256
272257
for c_label, c_name in code_nodes:
273258
if any(tok in c_name.lower() for tok in tokens):
274259
cypher = (
275
- "MATCH (t:Rule {name: $tn}), (c:"
276
- + c_label
277
- + " {name: $cn}) "
260
+ "MATCH (t:Rule {name: $tn}), (c:" + c_label + " {name: $cn}) "
278261
"MERGE (t)-[r:ANNOTATES]->(c)"
279262
)
280263
try:
281264
self.store.query(cypher, {"tn": t_name, "cn": c_name})
282265
linked += 1
283266
except Exception:
284
- logger.debug(
285
- "TicketIngester: could not link %s → %s", t_name, c_name
286
- )
267
+ logger.debug("TicketIngester: could not link %s → %s", t_name, c_name)
287268
return linked
288269
289270
@staticmethod
290271
def _github_severity(labels: list[str]) -> str:
291272
"""Map GitHub label names to navegador severity levels."""
292273
--- navegador/pm.py
+++ navegador/pm.py
@@ -101,14 +101,11 @@
101 }
102 if token:
103 headers["Authorization"] = f"Bearer {token}"
104
105 per_page = min(limit, 100)
106 url = (
107 f"https://api.github.com/repos/{repo}/issues"
108 f"?state={state}&per_page={per_page}&page=1"
109 )
110
111 try:
112 req = urllib.request.Request(url, headers=headers)
113 with urllib.request.urlopen(req, timeout=15) as resp:
114 import json
@@ -226,17 +223,13 @@
226
227 Returns
228 -------
229 int — number of edges created
230 """
231 ticket_cypher = (
232 "MATCH (t:Rule) WHERE t.domain = $domain "
233 "RETURN t.name, t.description"
234 )
235 code_cypher = (
236 "MATCH (c) WHERE c:Function OR c:Class OR c:Method "
237 "RETURN labels(c)[0], c.name"
238 )
239
240 try:
241 t_result = self.store.query(ticket_cypher, {"domain": domain})
242 c_result = self.store.query(code_cypher)
@@ -243,49 +236,37 @@
243 except Exception:
244 logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True)
245 return 0
246
247 tickets = [
248 (str(row[0]), str(row[1] or ""))
249 for row in (t_result.result_set or [])
250 if row[0]
251 ]
252 code_nodes = [
253 (str(row[0]), str(row[1]))
254 for row in (c_result.result_set or [])
255 if row[0] and row[1]
256 ]
257
258 if not tickets or not code_nodes:
259 return 0
260
261 linked = 0
262 for t_name, t_desc in tickets:
263 combined = f"{t_name} {t_desc}"
264 tokens = {
265 w.lower()
266 for w in re.split(r"[\s\W]+", combined)
267 if len(w) >= 4
268 }
269 if not tokens:
270 continue
271
272 for c_label, c_name in code_nodes:
273 if any(tok in c_name.lower() for tok in tokens):
274 cypher = (
275 "MATCH (t:Rule {name: $tn}), (c:"
276 + c_label
277 + " {name: $cn}) "
278 "MERGE (t)-[r:ANNOTATES]->(c)"
279 )
280 try:
281 self.store.query(cypher, {"tn": t_name, "cn": c_name})
282 linked += 1
283 except Exception:
284 logger.debug(
285 "TicketIngester: could not link %s → %s", t_name, c_name
286 )
287 return linked
288
289 @staticmethod
290 def _github_severity(labels: list[str]) -> str:
291 """Map GitHub label names to navegador severity levels."""
292
--- navegador/pm.py
+++ navegador/pm.py
@@ -101,14 +101,11 @@
101 }
102 if token:
103 headers["Authorization"] = f"Bearer {token}"
104
105 per_page = min(limit, 100)
106 url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1"
 
 
 
107
108 try:
109 req = urllib.request.Request(url, headers=headers)
110 with urllib.request.urlopen(req, timeout=15) as resp:
111 import json
@@ -226,17 +223,13 @@
223
224 Returns
225 -------
226 int — number of edges created
227 """
228 ticket_cypher = "MATCH (t:Rule) WHERE t.domain = $domain RETURN t.name, t.description"
 
 
 
229 code_cypher = (
230 "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name"
 
231 )
232
233 try:
234 t_result = self.store.query(ticket_cypher, {"domain": domain})
235 c_result = self.store.query(code_cypher)
@@ -243,49 +236,37 @@
236 except Exception:
237 logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True)
238 return 0
239
240 tickets = [
241 (str(row[0]), str(row[1] or "")) for row in (t_result.result_set or []) if row[0]
 
 
242 ]
243 code_nodes = [
244 (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1]
 
 
245 ]
246
247 if not tickets or not code_nodes:
248 return 0
249
250 linked = 0
251 for t_name, t_desc in tickets:
252 combined = f"{t_name} {t_desc}"
253 tokens = {w.lower() for w in re.split(r"[\s\W]+", combined) if len(w) >= 4}
 
 
 
 
254 if not tokens:
255 continue
256
257 for c_label, c_name in code_nodes:
258 if any(tok in c_name.lower() for tok in tokens):
259 cypher = (
260 "MATCH (t:Rule {name: $tn}), (c:" + c_label + " {name: $cn}) "
 
 
261 "MERGE (t)-[r:ANNOTATES]->(c)"
262 )
263 try:
264 self.store.query(cypher, {"tn": t_name, "cn": c_name})
265 linked += 1
266 except Exception:
267 logger.debug("TicketIngester: could not link %s → %s", t_name, c_name)
 
 
268 return linked
269
270 @staticmethod
271 def _github_severity(labels: list[str]) -> str:
272 """Map GitHub label names to navegador severity levels."""
273
--- navegador/refactor.py
+++ navegador/refactor.py
@@ -61,13 +61,11 @@
6161
def __init__(self, store: GraphStore) -> None:
6262
self.store = store
6363
6464
# ── Public API ────────────────────────────────────────────────────────────
6565
66
- def find_references(
67
- self, name: str, file_path: str = ""
68
- ) -> list[dict[str, Any]]:
66
+ def find_references(self, name: str, file_path: str = "") -> list[dict[str, Any]]:
6967
"""
7068
Return all graph nodes whose name matches *name*.
7169
7270
Optionally filter to a specific file with *file_path*.
7371
"""
@@ -151,13 +149,11 @@
151149
152150
# ── Helpers ───────────────────────────────────────────────────────────────
153151
154152
def _count_edges(self, name: str) -> int:
155153
"""Count edges incident on nodes named *name*."""
156
- cypher = (
157
- "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c"
158
- )
154
+ cypher = "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c"
159155
result = self.store.query(cypher, {"name": name})
160156
rows = result.result_set or []
161157
if rows:
162158
return rows[0][0] or 0
163159
return 0
164160
--- navegador/refactor.py
+++ navegador/refactor.py
@@ -61,13 +61,11 @@
61 def __init__(self, store: GraphStore) -> None:
62 self.store = store
63
64 # ── Public API ────────────────────────────────────────────────────────────
65
66 def find_references(
67 self, name: str, file_path: str = ""
68 ) -> list[dict[str, Any]]:
69 """
70 Return all graph nodes whose name matches *name*.
71
72 Optionally filter to a specific file with *file_path*.
73 """
@@ -151,13 +149,11 @@
151
152 # ── Helpers ───────────────────────────────────────────────────────────────
153
154 def _count_edges(self, name: str) -> int:
155 """Count edges incident on nodes named *name*."""
156 cypher = (
157 "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c"
158 )
159 result = self.store.query(cypher, {"name": name})
160 rows = result.result_set or []
161 if rows:
162 return rows[0][0] or 0
163 return 0
164
--- navegador/refactor.py
+++ navegador/refactor.py
@@ -61,13 +61,11 @@
61 def __init__(self, store: GraphStore) -> None:
62 self.store = store
63
64 # ── Public API ────────────────────────────────────────────────────────────
65
66 def find_references(self, name: str, file_path: str = "") -> list[dict[str, Any]]:
 
 
67 """
68 Return all graph nodes whose name matches *name*.
69
70 Optionally filter to a specific file with *file_path*.
71 """
@@ -151,13 +149,11 @@
149
150 # ── Helpers ───────────────────────────────────────────────────────────────
151
152 def _count_edges(self, name: str) -> int:
153 """Count edges incident on nodes named *name*."""
154 cypher = "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c"
 
 
155 result = self.store.query(cypher, {"name": name})
156 rows = result.result_set or []
157 if rows:
158 return rows[0][0] or 0
159 return 0
160
--- navegador/sdk.py
+++ navegador/sdk.py
@@ -94,13 +94,11 @@
9494
Returns:
9595
Dict with counts: files, functions, classes, edges, skipped.
9696
"""
9797
from navegador.ingestion import RepoIngester
9898
99
- return RepoIngester(self._store).ingest(
100
- repo_path, clear=clear, incremental=incremental
101
- )
99
+ return RepoIngester(self._store).ingest(repo_path, clear=clear, incremental=incremental)
102100
103101
# ── Context loading ───────────────────────────────────────────────────────
104102
105103
def file_context(self, file_path: str) -> Any:
106104
"""
@@ -114,13 +112,11 @@
114112
"""
115113
from navegador.context.loader import ContextLoader
116114
117115
return ContextLoader(self._store).load_file(file_path)
118116
119
- def function_context(
120
- self, name: str, file_path: str = "", depth: int = 2
121
- ) -> Any:
117
+ def function_context(self, name: str, file_path: str = "", depth: int = 2) -> Any:
122118
"""
123119
Return a ContextBundle for a function — callers, callees, decorators.
124120
125121
Args:
126122
name: Function name.
@@ -130,13 +126,11 @@
130126
Returns:
131127
:class:`~navegador.context.loader.ContextBundle`
132128
"""
133129
from navegador.context.loader import ContextLoader
134130
135
- return ContextLoader(self._store).load_function(
136
- name, file_path=file_path, depth=depth
137
- )
131
+ return ContextLoader(self._store).load_function(name, file_path=file_path, depth=depth)
138132
139133
def class_context(self, name: str, file_path: str = "") -> Any:
140134
"""
141135
Return a ContextBundle for a class — methods, inheritance, references.
142136
143137
--- navegador/sdk.py
+++ navegador/sdk.py
@@ -94,13 +94,11 @@
94 Returns:
95 Dict with counts: files, functions, classes, edges, skipped.
96 """
97 from navegador.ingestion import RepoIngester
98
99 return RepoIngester(self._store).ingest(
100 repo_path, clear=clear, incremental=incremental
101 )
102
103 # ── Context loading ───────────────────────────────────────────────────────
104
105 def file_context(self, file_path: str) -> Any:
106 """
@@ -114,13 +112,11 @@
114 """
115 from navegador.context.loader import ContextLoader
116
117 return ContextLoader(self._store).load_file(file_path)
118
119 def function_context(
120 self, name: str, file_path: str = "", depth: int = 2
121 ) -> Any:
122 """
123 Return a ContextBundle for a function — callers, callees, decorators.
124
125 Args:
126 name: Function name.
@@ -130,13 +126,11 @@
130 Returns:
131 :class:`~navegador.context.loader.ContextBundle`
132 """
133 from navegador.context.loader import ContextLoader
134
135 return ContextLoader(self._store).load_function(
136 name, file_path=file_path, depth=depth
137 )
138
139 def class_context(self, name: str, file_path: str = "") -> Any:
140 """
141 Return a ContextBundle for a class — methods, inheritance, references.
142
143
--- navegador/sdk.py
+++ navegador/sdk.py
@@ -94,13 +94,11 @@
94 Returns:
95 Dict with counts: files, functions, classes, edges, skipped.
96 """
97 from navegador.ingestion import RepoIngester
98
99 return RepoIngester(self._store).ingest(repo_path, clear=clear, incremental=incremental)
 
 
100
101 # ── Context loading ───────────────────────────────────────────────────────
102
103 def file_context(self, file_path: str) -> Any:
104 """
@@ -114,13 +112,11 @@
112 """
113 from navegador.context.loader import ContextLoader
114
115 return ContextLoader(self._store).load_file(file_path)
116
117 def function_context(self, name: str, file_path: str = "", depth: int = 2) -> Any:
 
 
118 """
119 Return a ContextBundle for a function — callers, callees, decorators.
120
121 Args:
122 name: Function name.
@@ -130,13 +126,11 @@
126 Returns:
127 :class:`~navegador.context.loader.ContextBundle`
128 """
129 from navegador.context.loader import ContextLoader
130
131 return ContextLoader(self._store).load_function(name, file_path=file_path, depth=depth)
 
 
132
133 def class_context(self, name: str, file_path: str = "") -> Any:
134 """
135 Return a ContextBundle for a class — methods, inheritance, references.
136
137
--- navegador/security.py
+++ navegador/security.py
@@ -21,11 +21,11 @@
2121
"""A single sensitive-content finding."""
2222
2323
pattern_name: str
2424
line_number: int
2525
match_text: str # the matched text — stored already-redacted
26
- severity: str # "high" or "medium"
26
+ severity: str # "high" or "medium"
2727
2828
2929
# ---------------------------------------------------------------------------
3030
# Pattern registry
3131
# ---------------------------------------------------------------------------
@@ -68,13 +68,11 @@
6868
"high",
6969
),
7070
# Password in assignment
7171
(
7272
"password_assignment",
73
- re.compile(
74
- r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'
75
- ),
73
+ re.compile(r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'),
7674
"high",
7775
),
7876
# PEM private key header
7977
(
8078
"private_key_pem",
@@ -121,11 +119,10 @@
121119
Returns a list of :class:`SensitiveMatch` objects, one per finding.
122120
The ``match_text`` field contains the matched string already rendered as
123121
``[REDACTED]`` so callers never need to touch the raw secret.
124122
"""
125123
findings: list[SensitiveMatch] = []
126
- lines = text.splitlines()
127124
128125
for pattern_name, regex, severity in _PATTERNS:
129126
for m in regex.finditer(text):
130127
# Determine line number (1-based) by counting newlines before match start
131128
line_number = text.count("\n", 0, m.start()) + 1
132129
--- navegador/security.py
+++ navegador/security.py
@@ -21,11 +21,11 @@
21 """A single sensitive-content finding."""
22
23 pattern_name: str
24 line_number: int
25 match_text: str # the matched text — stored already-redacted
26 severity: str # "high" or "medium"
27
28
29 # ---------------------------------------------------------------------------
30 # Pattern registry
31 # ---------------------------------------------------------------------------
@@ -68,13 +68,11 @@
68 "high",
69 ),
70 # Password in assignment
71 (
72 "password_assignment",
73 re.compile(
74 r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'
75 ),
76 "high",
77 ),
78 # PEM private key header
79 (
80 "private_key_pem",
@@ -121,11 +119,10 @@
121 Returns a list of :class:`SensitiveMatch` objects, one per finding.
122 The ``match_text`` field contains the matched string already rendered as
123 ``[REDACTED]`` so callers never need to touch the raw secret.
124 """
125 findings: list[SensitiveMatch] = []
126 lines = text.splitlines()
127
128 for pattern_name, regex, severity in _PATTERNS:
129 for m in regex.finditer(text):
130 # Determine line number (1-based) by counting newlines before match start
131 line_number = text.count("\n", 0, m.start()) + 1
132
--- navegador/security.py
+++ navegador/security.py
@@ -21,11 +21,11 @@
21 """A single sensitive-content finding."""
22
23 pattern_name: str
24 line_number: int
25 match_text: str # the matched text — stored already-redacted
26 severity: str # "high" or "medium"
27
28
29 # ---------------------------------------------------------------------------
30 # Pattern registry
31 # ---------------------------------------------------------------------------
@@ -68,13 +68,11 @@
68 "high",
69 ),
70 # Password in assignment
71 (
72 "password_assignment",
73 re.compile(r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'),
 
 
74 "high",
75 ),
76 # PEM private key header
77 (
78 "private_key_pem",
@@ -121,11 +119,10 @@
119 Returns a list of :class:`SensitiveMatch` objects, one per finding.
120 The ``match_text`` field contains the matched string already rendered as
121 ``[REDACTED]`` so callers never need to touch the raw secret.
122 """
123 findings: list[SensitiveMatch] = []
 
124
125 for pattern_name, regex, severity in _PATTERNS:
126 for m in regex.finditer(text):
127 # Determine line number (1-based) by counting newlines before match start
128 line_number = text.count("\n", 0, m.start()) + 1
129
+54 -43
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -17,11 +17,10 @@
1717
1818
import subprocess
1919
from abc import ABC, abstractmethod
2020
from pathlib import Path
2121
22
-
2322
# ── Abstract base ──────────────────────────────────────────────────────────────
2423
2524
2625
class VCSAdapter(ABC):
2726
"""Abstract base class for VCS backends."""
@@ -123,30 +122,34 @@
123122
Return up to *limit* log entries for *file_path*.
124123
125124
Each entry has the keys: ``hash``, ``author``, ``date``, ``message``.
126125
"""
127126
fmt = "%H%x1f%an%x1f%ai%x1f%s"
128
- result = self._run([
129
- "log",
130
- f"--max-count={limit}",
131
- f"--format={fmt}",
132
- "--",
133
- file_path,
134
- ])
127
+ result = self._run(
128
+ [
129
+ "log",
130
+ f"--max-count={limit}",
131
+ f"--format={fmt}",
132
+ "--",
133
+ file_path,
134
+ ]
135
+ )
135136
136137
entries: list[dict] = []
137138
for line in result.stdout.strip().splitlines():
138139
if not line:
139140
continue
140141
parts = line.split("\x1f", 3)
141142
if len(parts) == 4:
142
- entries.append({
143
- "hash": parts[0],
144
- "author": parts[1],
145
- "date": parts[2],
146
- "message": parts[3],
147
- })
143
+ entries.append(
144
+ {
145
+ "hash": parts[0],
146
+ "author": parts[1],
147
+ "date": parts[2],
148
+ "message": parts[3],
149
+ }
150
+ )
148151
return entries
149152
150153
def blame(self, file_path: str) -> list[dict]:
151154
"""
152155
Return per-line blame data for *file_path*.
@@ -182,21 +185,23 @@
182185
i += 1
183186
# Read key-value pairs until we hit the content line (starts with \t)
184187
while i < len(lines) and not lines[i].startswith("\t"):
185188
kv = lines[i]
186189
if kv.startswith("author "):
187
- current_author = kv[len("author "):]
190
+ current_author = kv[len("author ") :]
188191
i += 1
189192
# The content line starts with a tab
190193
if i < len(lines) and lines[i].startswith("\t"):
191194
content = lines[i][1:] # strip leading tab
192
- entries.append({
193
- "line": line_number,
194
- "hash": current_hash,
195
- "author": current_author,
196
- "content": content,
197
- })
195
+ entries.append(
196
+ {
197
+ "line": line_number,
198
+ "hash": current_hash,
199
+ "author": current_author,
200
+ "content": content,
201
+ }
202
+ )
198203
i += 1
199204
else:
200205
i += 1
201206
202207
return entries
@@ -223,14 +228,11 @@
223228
check=check,
224229
)
225230
226231
def is_repo(self) -> bool:
227232
"""Return True when *repo_path* looks like a Fossil checkout."""
228
- return (
229
- (self.repo_path / ".fslckout").exists()
230
- or (self.repo_path / "_FOSSIL_").exists()
231
- )
233
+ return (self.repo_path / ".fslckout").exists() or (self.repo_path / "_FOSSIL_").exists()
232234
233235
def current_branch(self) -> str:
234236
"""
235237
Return the name of the current Fossil branch.
236238
@@ -265,16 +267,21 @@
265267
266268
Runs ``fossil timeline --limit <n> --type ci --path <file>`` and
267269
parses the output into a list of dicts with keys:
268270
``hash``, ``author``, ``date``, ``message``.
269271
"""
270
- result = self._run([
271
- "timeline",
272
- "--limit", str(limit),
273
- "--type", "ci",
274
- "--path", file_path,
275
- ])
272
+ result = self._run(
273
+ [
274
+ "timeline",
275
+ "--limit",
276
+ str(limit),
277
+ "--type",
278
+ "ci",
279
+ "--path",
280
+ file_path,
281
+ ]
282
+ )
276283
return _parse_fossil_timeline(result.stdout)
277284
278285
def blame(self, file_path: str) -> list[dict]:
279286
"""
280287
Return per-line blame data for *file_path*.
@@ -317,16 +324,18 @@
317324
r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$",
318325
line,
319326
)
320327
if m:
321328
time_part, hash_part, message, author = m.groups()
322
- entries.append({
323
- "hash": hash_part,
324
- "author": author or "",
325
- "date": f"{current_date} {time_part}".strip(),
326
- "message": message.rstrip(),
327
- })
329
+ entries.append(
330
+ {
331
+ "hash": hash_part,
332
+ "author": author or "",
333
+ "date": f"{current_date} {time_part}".strip(),
334
+ "message": message.rstrip(),
335
+ }
336
+ )
328337
329338
return entries
330339
331340
332341
def _parse_fossil_annotate(output: str) -> list[dict]:
@@ -349,16 +358,18 @@
349358
# Pattern: "<version> <author> <date>: <content>"
350359
m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw)
351360
if m:
352361
version, author, content = m.groups()
353362
line_number += 1
354
- entries.append({
355
- "line": line_number,
356
- "hash": version,
357
- "author": author,
358
- "content": content,
359
- })
363
+ entries.append(
364
+ {
365
+ "line": line_number,
366
+ "hash": version,
367
+ "author": author,
368
+ "content": content,
369
+ }
370
+ )
360371
361372
return entries
362373
363374
364375
# ── Factory ────────────────────────────────────────────────────────────────────
365376
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -17,11 +17,10 @@
17
18 import subprocess
19 from abc import ABC, abstractmethod
20 from pathlib import Path
21
22
23 # ── Abstract base ──────────────────────────────────────────────────────────────
24
25
26 class VCSAdapter(ABC):
27 """Abstract base class for VCS backends."""
@@ -123,30 +122,34 @@
123 Return up to *limit* log entries for *file_path*.
124
125 Each entry has the keys: ``hash``, ``author``, ``date``, ``message``.
126 """
127 fmt = "%H%x1f%an%x1f%ai%x1f%s"
128 result = self._run([
129 "log",
130 f"--max-count={limit}",
131 f"--format={fmt}",
132 "--",
133 file_path,
134 ])
 
 
135
136 entries: list[dict] = []
137 for line in result.stdout.strip().splitlines():
138 if not line:
139 continue
140 parts = line.split("\x1f", 3)
141 if len(parts) == 4:
142 entries.append({
143 "hash": parts[0],
144 "author": parts[1],
145 "date": parts[2],
146 "message": parts[3],
147 })
 
 
148 return entries
149
150 def blame(self, file_path: str) -> list[dict]:
151 """
152 Return per-line blame data for *file_path*.
@@ -182,21 +185,23 @@
182 i += 1
183 # Read key-value pairs until we hit the content line (starts with \t)
184 while i < len(lines) and not lines[i].startswith("\t"):
185 kv = lines[i]
186 if kv.startswith("author "):
187 current_author = kv[len("author "):]
188 i += 1
189 # The content line starts with a tab
190 if i < len(lines) and lines[i].startswith("\t"):
191 content = lines[i][1:] # strip leading tab
192 entries.append({
193 "line": line_number,
194 "hash": current_hash,
195 "author": current_author,
196 "content": content,
197 })
 
 
198 i += 1
199 else:
200 i += 1
201
202 return entries
@@ -223,14 +228,11 @@
223 check=check,
224 )
225
226 def is_repo(self) -> bool:
227 """Return True when *repo_path* looks like a Fossil checkout."""
228 return (
229 (self.repo_path / ".fslckout").exists()
230 or (self.repo_path / "_FOSSIL_").exists()
231 )
232
233 def current_branch(self) -> str:
234 """
235 Return the name of the current Fossil branch.
236
@@ -265,16 +267,21 @@
265
266 Runs ``fossil timeline --limit <n> --type ci --path <file>`` and
267 parses the output into a list of dicts with keys:
268 ``hash``, ``author``, ``date``, ``message``.
269 """
270 result = self._run([
271 "timeline",
272 "--limit", str(limit),
273 "--type", "ci",
274 "--path", file_path,
275 ])
 
 
 
 
 
276 return _parse_fossil_timeline(result.stdout)
277
278 def blame(self, file_path: str) -> list[dict]:
279 """
280 Return per-line blame data for *file_path*.
@@ -317,16 +324,18 @@
317 r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$",
318 line,
319 )
320 if m:
321 time_part, hash_part, message, author = m.groups()
322 entries.append({
323 "hash": hash_part,
324 "author": author or "",
325 "date": f"{current_date} {time_part}".strip(),
326 "message": message.rstrip(),
327 })
 
 
328
329 return entries
330
331
332 def _parse_fossil_annotate(output: str) -> list[dict]:
@@ -349,16 +358,18 @@
349 # Pattern: "<version> <author> <date>: <content>"
350 m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw)
351 if m:
352 version, author, content = m.groups()
353 line_number += 1
354 entries.append({
355 "line": line_number,
356 "hash": version,
357 "author": author,
358 "content": content,
359 })
 
 
360
361 return entries
362
363
364 # ── Factory ────────────────────────────────────────────────────────────────────
365
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -17,11 +17,10 @@
17
18 import subprocess
19 from abc import ABC, abstractmethod
20 from pathlib import Path
21
 
22 # ── Abstract base ──────────────────────────────────────────────────────────────
23
24
25 class VCSAdapter(ABC):
26 """Abstract base class for VCS backends."""
@@ -123,30 +122,34 @@
122 Return up to *limit* log entries for *file_path*.
123
124 Each entry has the keys: ``hash``, ``author``, ``date``, ``message``.
125 """
126 fmt = "%H%x1f%an%x1f%ai%x1f%s"
127 result = self._run(
128 [
129 "log",
130 f"--max-count={limit}",
131 f"--format={fmt}",
132 "--",
133 file_path,
134 ]
135 )
136
137 entries: list[dict] = []
138 for line in result.stdout.strip().splitlines():
139 if not line:
140 continue
141 parts = line.split("\x1f", 3)
142 if len(parts) == 4:
143 entries.append(
144 {
145 "hash": parts[0],
146 "author": parts[1],
147 "date": parts[2],
148 "message": parts[3],
149 }
150 )
151 return entries
152
153 def blame(self, file_path: str) -> list[dict]:
154 """
155 Return per-line blame data for *file_path*.
@@ -182,21 +185,23 @@
185 i += 1
186 # Read key-value pairs until we hit the content line (starts with \t)
187 while i < len(lines) and not lines[i].startswith("\t"):
188 kv = lines[i]
189 if kv.startswith("author "):
190 current_author = kv[len("author ") :]
191 i += 1
192 # The content line starts with a tab
193 if i < len(lines) and lines[i].startswith("\t"):
194 content = lines[i][1:] # strip leading tab
195 entries.append(
196 {
197 "line": line_number,
198 "hash": current_hash,
199 "author": current_author,
200 "content": content,
201 }
202 )
203 i += 1
204 else:
205 i += 1
206
207 return entries
@@ -223,14 +228,11 @@
228 check=check,
229 )
230
231 def is_repo(self) -> bool:
232 """Return True when *repo_path* looks like a Fossil checkout."""
233 return (self.repo_path / ".fslckout").exists() or (self.repo_path / "_FOSSIL_").exists()
 
 
 
234
235 def current_branch(self) -> str:
236 """
237 Return the name of the current Fossil branch.
238
@@ -265,16 +267,21 @@
267
268 Runs ``fossil timeline --limit <n> --type ci --path <file>`` and
269 parses the output into a list of dicts with keys:
270 ``hash``, ``author``, ``date``, ``message``.
271 """
272 result = self._run(
273 [
274 "timeline",
275 "--limit",
276 str(limit),
277 "--type",
278 "ci",
279 "--path",
280 file_path,
281 ]
282 )
283 return _parse_fossil_timeline(result.stdout)
284
285 def blame(self, file_path: str) -> list[dict]:
286 """
287 Return per-line blame data for *file_path*.
@@ -317,16 +324,18 @@
324 r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$",
325 line,
326 )
327 if m:
328 time_part, hash_part, message, author = m.groups()
329 entries.append(
330 {
331 "hash": hash_part,
332 "author": author or "",
333 "date": f"{current_date} {time_part}".strip(),
334 "message": message.rstrip(),
335 }
336 )
337
338 return entries
339
340
341 def _parse_fossil_annotate(output: str) -> list[dict]:
@@ -349,16 +358,18 @@
358 # Pattern: "<version> <author> <date>: <content>"
359 m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw)
360 if m:
361 version, author, content = m.groups()
362 line_number += 1
363 entries.append(
364 {
365 "line": line_number,
366 "hash": version,
367 "author": author,
368 "content": content,
369 }
370 )
371
372 return entries
373
374
375 # ── Factory ────────────────────────────────────────────────────────────────────
376
+1 -1
--- pyproject.toml
+++ pyproject.toml
@@ -2,11 +2,11 @@
22
requires = ["setuptools>=69.0", "wheel"]
33
build-backend = "setuptools.build_meta"
44
55
[project]
66
name = "navegador"
7
-version = "0.7.1"
7
+version = "0.7.2"
88
description = "AST + knowledge graph context engine for AI coding agents"
99
readme = "README.md"
1010
license = "MIT"
1111
requires-python = ">=3.12"
1212
authors = [
1313
--- pyproject.toml
+++ pyproject.toml
@@ -2,11 +2,11 @@
2 requires = ["setuptools>=69.0", "wheel"]
3 build-backend = "setuptools.build_meta"
4
5 [project]
6 name = "navegador"
7 version = "0.7.1"
8 description = "AST + knowledge graph context engine for AI coding agents"
9 readme = "README.md"
10 license = "MIT"
11 requires-python = ">=3.12"
12 authors = [
13
--- pyproject.toml
+++ pyproject.toml
@@ -2,11 +2,11 @@
2 requires = ["setuptools>=69.0", "wheel"]
3 build-backend = "setuptools.build_meta"
4
5 [project]
6 name = "navegador"
7 version = "0.7.2"
8 description = "AST + knowledge graph context engine for AI coding agents"
9 readme = "README.md"
10 license = "MIT"
11 requires-python = ">=3.12"
12 authors = [
13

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button