Navegador
fix: ruff lint — format all files, remove unused vars, shorten long lines Bumps to 0.7.2.
Commit
e5e1a3b325ee737fd1bef364f6fa2a0ff1611a9ed5eee4ca0901a1d361dd72fd
Parent
a3b55eec36382d8…
53 files changed
+1
-1
+1
-3
+4
-9
+2
-4
+2
-6
+1
-5
+67
-65
+21
-14
+15
-18
+1
-4
+1
-1
+3
-4
+6
-4
+1
-1
+27
-18
+1
-4
+12
-4
+1
-3
+1
-4
+2
-4
+55
-33
+2
-4
+43
-28
+35
-25
+17
-13
+1
-3
+18
-13
+2
-6
+6
-8
+9
-13
+8
-13
+10
-16
+5
-7
+1
+1
-3
+2
-6
+12
-5
+1
-4
+3
-9
+6
-15
+1
-3
+1
-3
+3
-9
+11
-16
+4
-6
+6
-18
+21
-36
+8
-27
+2
-6
+3
-9
+2
-5
+54
-43
+1
-1
~
navegador/__init__.py
~
navegador/adr.py
~
navegador/analysis/testmap.py
~
navegador/api_schema.py
~
navegador/churn.py
~
navegador/cicd.py
~
navegador/cli/commands.py
~
navegador/cluster/core.py
~
navegador/cluster/fossil_live.py
~
navegador/cluster/locking.py
~
navegador/cluster/messaging.py
~
navegador/cluster/partitioning.py
~
navegador/cluster/pubsub.py
~
navegador/cluster/sessions.py
~
navegador/cluster/taskqueue.py
~
navegador/codeowners.py
~
navegador/completions.py
~
navegador/context/loader.py
~
navegador/diff.py
~
navegador/editor.py
~
navegador/enrichment/express.py
~
navegador/enrichment/fastapi.py
~
navegador/enrichment/react.py
~
navegador/enrichment/react_native.py
~
navegador/explorer/server.py
~
navegador/explorer/templates.py
~
navegador/graph/export.py
~
navegador/graph/migrations.py
~
navegador/ingestion/c.py
~
navegador/ingestion/cpp.py
~
navegador/ingestion/csharp.py
~
navegador/ingestion/kotlin.py
~
navegador/ingestion/optimization.py
~
navegador/ingestion/parser.py
~
navegador/ingestion/php.py
~
navegador/ingestion/ruby.py
~
navegador/ingestion/swift.py
~
navegador/intelligence/community.py
~
navegador/intelligence/docgen.py
~
navegador/intelligence/nlp.py
~
navegador/intelligence/search.py
~
navegador/llm.py
~
navegador/mcp/security.py
~
navegador/mcp/server.py
~
navegador/monorepo.py
~
navegador/multirepo.py
~
navegador/planopticon_pipeline.py
~
navegador/pm.py
~
navegador/refactor.py
~
navegador/sdk.py
~
navegador/security.py
~
navegador/vcs.py
~
pyproject.toml
+1
-1
| --- navegador/__init__.py | ||
| +++ navegador/__init__.py | ||
| @@ -1,10 +1,10 @@ | ||
| 1 | 1 | """ |
| 2 | 2 | Navegador — AST + knowledge graph context engine for AI coding agents. |
| 3 | 3 | """ |
| 4 | 4 | |
| 5 | -__version__ = "0.7.1" | |
| 5 | +__version__ = "0.7.2" | |
| 6 | 6 | __author__ = "CONFLICT LLC" |
| 7 | 7 | |
| 8 | 8 | from navegador.sdk import Navegador |
| 9 | 9 | |
| 10 | 10 | __all__ = ["Navegador"] |
| 11 | 11 |
| --- navegador/__init__.py | |
| +++ navegador/__init__.py | |
| @@ -1,10 +1,10 @@ | |
| 1 | """ |
| 2 | Navegador — AST + knowledge graph context engine for AI coding agents. |
| 3 | """ |
| 4 | |
| 5 | __version__ = "0.7.1" |
| 6 | __author__ = "CONFLICT LLC" |
| 7 | |
| 8 | from navegador.sdk import Navegador |
| 9 | |
| 10 | __all__ = ["Navegador"] |
| 11 |
| --- navegador/__init__.py | |
| +++ navegador/__init__.py | |
| @@ -1,10 +1,10 @@ | |
| 1 | """ |
| 2 | Navegador — AST + knowledge graph context engine for AI coding agents. |
| 3 | """ |
| 4 | |
| 5 | __version__ = "0.7.2" |
| 6 | __author__ = "CONFLICT LLC" |
| 7 | |
| 8 | from navegador.sdk import Navegador |
| 9 | |
| 10 | __all__ = ["Navegador"] |
| 11 |
+1
-3
| --- navegador/adr.py | ||
| +++ navegador/adr.py | ||
| @@ -24,13 +24,11 @@ | ||
| 24 | 24 | logger = logging.getLogger(__name__) |
| 25 | 25 | |
| 26 | 26 | # ── Regex helpers ───────────────────────────────────────────────────────────── |
| 27 | 27 | |
| 28 | 28 | _H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE) |
| 29 | -_STATUS = re.compile( | |
| 30 | - r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL | |
| 31 | -) | |
| 29 | +_STATUS = re.compile(r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL) | |
| 32 | 30 | _RATIONALE = re.compile( |
| 33 | 31 | r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", |
| 34 | 32 | re.MULTILINE | re.DOTALL, |
| 35 | 33 | ) |
| 36 | 34 | _RATIONALE_FALLBACK = re.compile( |
| 37 | 35 |
| --- navegador/adr.py | |
| +++ navegador/adr.py | |
| @@ -24,13 +24,11 @@ | |
| 24 | logger = logging.getLogger(__name__) |
| 25 | |
| 26 | # ── Regex helpers ───────────────────────────────────────────────────────────── |
| 27 | |
| 28 | _H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE) |
| 29 | _STATUS = re.compile( |
| 30 | r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL |
| 31 | ) |
| 32 | _RATIONALE = re.compile( |
| 33 | r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", |
| 34 | re.MULTILINE | re.DOTALL, |
| 35 | ) |
| 36 | _RATIONALE_FALLBACK = re.compile( |
| 37 |
| --- navegador/adr.py | |
| +++ navegador/adr.py | |
| @@ -24,13 +24,11 @@ | |
| 24 | logger = logging.getLogger(__name__) |
| 25 | |
| 26 | # ── Regex helpers ───────────────────────────────────────────────────────────── |
| 27 | |
| 28 | _H1 = re.compile(r"^#\s+(.+)$", re.MULTILINE) |
| 29 | _STATUS = re.compile(r"^#{1,3}\s+Status\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", re.MULTILINE | re.DOTALL) |
| 30 | _RATIONALE = re.compile( |
| 31 | r"^#{1,3}\s+Rationale\s*\n+(.+?)(?=\n#{1,3}\s|\Z)", |
| 32 | re.MULTILINE | re.DOTALL, |
| 33 | ) |
| 34 | _RATIONALE_FALLBACK = re.compile( |
| 35 |
+4
-9
| --- navegador/analysis/testmap.py | ||
| +++ navegador/analysis/testmap.py | ||
| @@ -146,13 +146,11 @@ | ||
| 146 | 146 | test_file=test_file, |
| 147 | 147 | prod_name=prod_name, |
| 148 | 148 | prod_file=prod_file, |
| 149 | 149 | prod_type=prod_type, |
| 150 | 150 | source=( |
| 151 | - "calls" | |
| 152 | - if self._resolve_via_calls(test_name, test_file) | |
| 153 | - else "heuristic" | |
| 151 | + "calls" if self._resolve_via_calls(test_name, test_file) else "heuristic" | |
| 154 | 152 | ), |
| 155 | 153 | ) |
| 156 | 154 | links.append(link) |
| 157 | 155 | # Persist the TESTS edge |
| 158 | 156 | try: |
| @@ -183,17 +181,14 @@ | ||
| 183 | 181 | rows = result.result_set or [] |
| 184 | 182 | except Exception: |
| 185 | 183 | return [] |
| 186 | 184 | |
| 187 | 185 | return [ |
| 188 | - {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} | |
| 189 | - for row in rows | |
| 186 | + {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} for row in rows | |
| 190 | 187 | ] |
| 191 | 188 | |
| 192 | - def _resolve_via_calls( | |
| 193 | - self, test_name: str, test_file: str | |
| 194 | - ) -> tuple[str, str, str] | None: | |
| 189 | + def _resolve_via_calls(self, test_name: str, test_file: str) -> tuple[str, str, str] | None: | |
| 195 | 190 | """Return (type, name, file_path) of the first non-test callee, or None.""" |
| 196 | 191 | try: |
| 197 | 192 | result = self.store.query( |
| 198 | 193 | _CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file} |
| 199 | 194 | ) |
| @@ -213,11 +208,11 @@ | ||
| 213 | 208 | test_validate_token → validate_token, then validate |
| 214 | 209 | """ |
| 215 | 210 | if not test_name.startswith("test_"): |
| 216 | 211 | return None |
| 217 | 212 | |
| 218 | - stripped = test_name[len("test_"):] | |
| 213 | + stripped = test_name[len("test_") :] | |
| 219 | 214 | parts = stripped.split("_") |
| 220 | 215 | |
| 221 | 216 | # Try full stripped name first, then progressively shorter prefixes |
| 222 | 217 | candidates = [] |
| 223 | 218 | for i in range(len(parts), 0, -1): |
| 224 | 219 |
| --- navegador/analysis/testmap.py | |
| +++ navegador/analysis/testmap.py | |
| @@ -146,13 +146,11 @@ | |
| 146 | test_file=test_file, |
| 147 | prod_name=prod_name, |
| 148 | prod_file=prod_file, |
| 149 | prod_type=prod_type, |
| 150 | source=( |
| 151 | "calls" |
| 152 | if self._resolve_via_calls(test_name, test_file) |
| 153 | else "heuristic" |
| 154 | ), |
| 155 | ) |
| 156 | links.append(link) |
| 157 | # Persist the TESTS edge |
| 158 | try: |
| @@ -183,17 +181,14 @@ | |
| 183 | rows = result.result_set or [] |
| 184 | except Exception: |
| 185 | return [] |
| 186 | |
| 187 | return [ |
| 188 | {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} |
| 189 | for row in rows |
| 190 | ] |
| 191 | |
| 192 | def _resolve_via_calls( |
| 193 | self, test_name: str, test_file: str |
| 194 | ) -> tuple[str, str, str] | None: |
| 195 | """Return (type, name, file_path) of the first non-test callee, or None.""" |
| 196 | try: |
| 197 | result = self.store.query( |
| 198 | _CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file} |
| 199 | ) |
| @@ -213,11 +208,11 @@ | |
| 213 | test_validate_token → validate_token, then validate |
| 214 | """ |
| 215 | if not test_name.startswith("test_"): |
| 216 | return None |
| 217 | |
| 218 | stripped = test_name[len("test_"):] |
| 219 | parts = stripped.split("_") |
| 220 | |
| 221 | # Try full stripped name first, then progressively shorter prefixes |
| 222 | candidates = [] |
| 223 | for i in range(len(parts), 0, -1): |
| 224 |
| --- navegador/analysis/testmap.py | |
| +++ navegador/analysis/testmap.py | |
| @@ -146,13 +146,11 @@ | |
| 146 | test_file=test_file, |
| 147 | prod_name=prod_name, |
| 148 | prod_file=prod_file, |
| 149 | prod_type=prod_type, |
| 150 | source=( |
| 151 | "calls" if self._resolve_via_calls(test_name, test_file) else "heuristic" |
| 152 | ), |
| 153 | ) |
| 154 | links.append(link) |
| 155 | # Persist the TESTS edge |
| 156 | try: |
| @@ -183,17 +181,14 @@ | |
| 181 | rows = result.result_set or [] |
| 182 | except Exception: |
| 183 | return [] |
| 184 | |
| 185 | return [ |
| 186 | {"name": row[0] or "", "file_path": row[1] or "", "line_start": row[2]} for row in rows |
| 187 | ] |
| 188 | |
| 189 | def _resolve_via_calls(self, test_name: str, test_file: str) -> tuple[str, str, str] | None: |
| 190 | """Return (type, name, file_path) of the first non-test callee, or None.""" |
| 191 | try: |
| 192 | result = self.store.query( |
| 193 | _CALLS_FROM_TEST, {"test_name": test_name, "file_path": test_file} |
| 194 | ) |
| @@ -213,11 +208,11 @@ | |
| 208 | test_validate_token → validate_token, then validate |
| 209 | """ |
| 210 | if not test_name.startswith("test_"): |
| 211 | return None |
| 212 | |
| 213 | stripped = test_name[len("test_") :] |
| 214 | parts = stripped.split("_") |
| 215 | |
| 216 | # Try full stripped name first, then progressively shorter prefixes |
| 217 | candidates = [] |
| 218 | for i in range(len(parts), 0, -1): |
| 219 |
+2
-4
| --- navegador/api_schema.py | ||
| +++ navegador/api_schema.py | ||
| @@ -18,11 +18,10 @@ | ||
| 18 | 18 | import logging |
| 19 | 19 | import re |
| 20 | 20 | from pathlib import Path |
| 21 | 21 | from typing import Any |
| 22 | 22 | |
| 23 | -from navegador.graph.schema import EdgeType, NodeLabel | |
| 24 | 23 | from navegador.graph.store import GraphStore |
| 25 | 24 | |
| 26 | 25 | logger = logging.getLogger(__name__) |
| 27 | 26 | |
| 28 | 27 | # ── New node label for API endpoints ───────────────────────────────────────── |
| @@ -94,13 +93,11 @@ | ||
| 94 | 93 | ) |
| 95 | 94 | endpoints += 1 |
| 96 | 95 | |
| 97 | 96 | # ── Component schemas / definitions ─────────────────────────────────── |
| 98 | 97 | component_schemas = ( |
| 99 | - (spec.get("components") or {}).get("schemas") | |
| 100 | - or spec.get("definitions") | |
| 101 | - or {} | |
| 98 | + (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {} | |
| 102 | 99 | ) |
| 103 | 100 | for schema_name, schema_body in component_schemas.items(): |
| 104 | 101 | if not isinstance(schema_body, dict): |
| 105 | 102 | continue |
| 106 | 103 | description = schema_body.get("description") or "" |
| @@ -231,10 +228,11 @@ | ||
| 231 | 228 | Sufficient for the simple flat/nested structure of OpenAPI specs. |
| 232 | 229 | Falls back to PyYAML if available. |
| 233 | 230 | """ |
| 234 | 231 | try: |
| 235 | 232 | import yaml # type: ignore[import] |
| 233 | + | |
| 236 | 234 | return yaml.safe_load(text) |
| 237 | 235 | except ImportError: |
| 238 | 236 | pass |
| 239 | 237 | |
| 240 | 238 | # Minimal hand-rolled YAML → dict for simple key: value structures |
| 241 | 239 |
| --- navegador/api_schema.py | |
| +++ navegador/api_schema.py | |
| @@ -18,11 +18,10 @@ | |
| 18 | import logging |
| 19 | import re |
| 20 | from pathlib import Path |
| 21 | from typing import Any |
| 22 | |
| 23 | from navegador.graph.schema import EdgeType, NodeLabel |
| 24 | from navegador.graph.store import GraphStore |
| 25 | |
| 26 | logger = logging.getLogger(__name__) |
| 27 | |
| 28 | # ── New node label for API endpoints ───────────────────────────────────────── |
| @@ -94,13 +93,11 @@ | |
| 94 | ) |
| 95 | endpoints += 1 |
| 96 | |
| 97 | # ── Component schemas / definitions ─────────────────────────────────── |
| 98 | component_schemas = ( |
| 99 | (spec.get("components") or {}).get("schemas") |
| 100 | or spec.get("definitions") |
| 101 | or {} |
| 102 | ) |
| 103 | for schema_name, schema_body in component_schemas.items(): |
| 104 | if not isinstance(schema_body, dict): |
| 105 | continue |
| 106 | description = schema_body.get("description") or "" |
| @@ -231,10 +228,11 @@ | |
| 231 | Sufficient for the simple flat/nested structure of OpenAPI specs. |
| 232 | Falls back to PyYAML if available. |
| 233 | """ |
| 234 | try: |
| 235 | import yaml # type: ignore[import] |
| 236 | return yaml.safe_load(text) |
| 237 | except ImportError: |
| 238 | pass |
| 239 | |
| 240 | # Minimal hand-rolled YAML → dict for simple key: value structures |
| 241 |
| --- navegador/api_schema.py | |
| +++ navegador/api_schema.py | |
| @@ -18,11 +18,10 @@ | |
| 18 | import logging |
| 19 | import re |
| 20 | from pathlib import Path |
| 21 | from typing import Any |
| 22 | |
| 23 | from navegador.graph.store import GraphStore |
| 24 | |
| 25 | logger = logging.getLogger(__name__) |
| 26 | |
| 27 | # ── New node label for API endpoints ───────────────────────────────────────── |
| @@ -94,13 +93,11 @@ | |
| 93 | ) |
| 94 | endpoints += 1 |
| 95 | |
| 96 | # ── Component schemas / definitions ─────────────────────────────────── |
| 97 | component_schemas = ( |
| 98 | (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {} |
| 99 | ) |
| 100 | for schema_name, schema_body in component_schemas.items(): |
| 101 | if not isinstance(schema_body, dict): |
| 102 | continue |
| 103 | description = schema_body.get("description") or "" |
| @@ -231,10 +228,11 @@ | |
| 228 | Sufficient for the simple flat/nested structure of OpenAPI specs. |
| 229 | Falls back to PyYAML if available. |
| 230 | """ |
| 231 | try: |
| 232 | import yaml # type: ignore[import] |
| 233 | |
| 234 | return yaml.safe_load(text) |
| 235 | except ImportError: |
| 236 | pass |
| 237 | |
| 238 | # Minimal hand-rolled YAML → dict for simple key: value structures |
| 239 |
+2
-6
| --- navegador/churn.py | ||
| +++ navegador/churn.py | ||
| @@ -25,11 +25,10 @@ | ||
| 25 | 25 | from collections import defaultdict |
| 26 | 26 | from dataclasses import dataclass |
| 27 | 27 | from itertools import combinations |
| 28 | 28 | from pathlib import Path |
| 29 | 29 | |
| 30 | - | |
| 31 | 30 | # ── Data models ─────────────────────────────────────────────────────────────── |
| 32 | 31 | |
| 33 | 32 | |
| 34 | 33 | @dataclass |
| 35 | 34 | class ChurnEntry: |
| @@ -255,21 +254,18 @@ | ||
| 255 | 254 | couplings_written = 0 |
| 256 | 255 | |
| 257 | 256 | # -- Update File node churn scores ------------------------------------ |
| 258 | 257 | for entry in self.file_churn(): |
| 259 | 258 | cypher = ( |
| 260 | - "MATCH (f:File {file_path: $fp}) " | |
| 261 | - "SET f.churn_score = $score, f.lines_changed = $lc" | |
| 259 | + "MATCH (f:File {file_path: $fp}) SET f.churn_score = $score, f.lines_changed = $lc" | |
| 262 | 260 | ) |
| 263 | 261 | result = store.query( |
| 264 | 262 | cypher, |
| 265 | 263 | {"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed}, |
| 266 | 264 | ) |
| 267 | 265 | # FalkorDB returns stats; count rows affected if available |
| 268 | - if getattr(result, "nodes_modified", None) or getattr( | |
| 269 | - result, "properties_set", None | |
| 270 | - ): | |
| 266 | + if getattr(result, "nodes_modified", None) or getattr(result, "properties_set", None): | |
| 271 | 267 | churn_updated += 1 |
| 272 | 268 | else: |
| 273 | 269 | # Fallback: assume the match succeeded if no error was raised |
| 274 | 270 | churn_updated += 1 |
| 275 | 271 | |
| 276 | 272 |
| --- navegador/churn.py | |
| +++ navegador/churn.py | |
| @@ -25,11 +25,10 @@ | |
| 25 | from collections import defaultdict |
| 26 | from dataclasses import dataclass |
| 27 | from itertools import combinations |
| 28 | from pathlib import Path |
| 29 | |
| 30 | |
| 31 | # ── Data models ─────────────────────────────────────────────────────────────── |
| 32 | |
| 33 | |
| 34 | @dataclass |
| 35 | class ChurnEntry: |
| @@ -255,21 +254,18 @@ | |
| 255 | couplings_written = 0 |
| 256 | |
| 257 | # -- Update File node churn scores ------------------------------------ |
| 258 | for entry in self.file_churn(): |
| 259 | cypher = ( |
| 260 | "MATCH (f:File {file_path: $fp}) " |
| 261 | "SET f.churn_score = $score, f.lines_changed = $lc" |
| 262 | ) |
| 263 | result = store.query( |
| 264 | cypher, |
| 265 | {"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed}, |
| 266 | ) |
| 267 | # FalkorDB returns stats; count rows affected if available |
| 268 | if getattr(result, "nodes_modified", None) or getattr( |
| 269 | result, "properties_set", None |
| 270 | ): |
| 271 | churn_updated += 1 |
| 272 | else: |
| 273 | # Fallback: assume the match succeeded if no error was raised |
| 274 | churn_updated += 1 |
| 275 | |
| 276 |
| --- navegador/churn.py | |
| +++ navegador/churn.py | |
| @@ -25,11 +25,10 @@ | |
| 25 | from collections import defaultdict |
| 26 | from dataclasses import dataclass |
| 27 | from itertools import combinations |
| 28 | from pathlib import Path |
| 29 | |
| 30 | # ── Data models ─────────────────────────────────────────────────────────────── |
| 31 | |
| 32 | |
| 33 | @dataclass |
| 34 | class ChurnEntry: |
| @@ -255,21 +254,18 @@ | |
| 254 | couplings_written = 0 |
| 255 | |
| 256 | # -- Update File node churn scores ------------------------------------ |
| 257 | for entry in self.file_churn(): |
| 258 | cypher = ( |
| 259 | "MATCH (f:File {file_path: $fp}) SET f.churn_score = $score, f.lines_changed = $lc" |
| 260 | ) |
| 261 | result = store.query( |
| 262 | cypher, |
| 263 | {"fp": entry.file_path, "score": entry.commit_count, "lc": entry.lines_changed}, |
| 264 | ) |
| 265 | # FalkorDB returns stats; count rows affected if available |
| 266 | if getattr(result, "nodes_modified", None) or getattr(result, "properties_set", None): |
| 267 | churn_updated += 1 |
| 268 | else: |
| 269 | # Fallback: assume the match succeeded if no error was raised |
| 270 | churn_updated += 1 |
| 271 | |
| 272 |
+1
-5
| --- navegador/cicd.py | ||
| +++ navegador/cicd.py | ||
| @@ -10,13 +10,11 @@ | ||
| 10 | 10 | |
| 11 | 11 | import json |
| 12 | 12 | import os |
| 13 | 13 | import sys |
| 14 | 14 | from dataclasses import dataclass, field |
| 15 | -from pathlib import Path | |
| 16 | 15 | from typing import Any |
| 17 | - | |
| 18 | 16 | |
| 19 | 17 | # ── Exit codes ──────────────────────────────────────────────────────────────── |
| 20 | 18 | |
| 21 | 19 | EXIT_SUCCESS = 0 |
| 22 | 20 | EXIT_ERROR = 1 |
| @@ -176,13 +174,11 @@ | ||
| 176 | 174 | summary_path = os.environ.get("GITHUB_STEP_SUMMARY") |
| 177 | 175 | if not summary_path: |
| 178 | 176 | return |
| 179 | 177 | |
| 180 | 178 | lines: list[str] = [] |
| 181 | - status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get( | |
| 182 | - payload["status"], "ℹ️" | |
| 183 | - ) | |
| 179 | + status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(payload["status"], "ℹ️") | |
| 184 | 180 | lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n") |
| 185 | 181 | |
| 186 | 182 | if payload.get("data"): |
| 187 | 183 | lines.append("### Stats\n") |
| 188 | 184 | lines.append("| Key | Value |") |
| 189 | 185 |
| --- navegador/cicd.py | |
| +++ navegador/cicd.py | |
| @@ -10,13 +10,11 @@ | |
| 10 | |
| 11 | import json |
| 12 | import os |
| 13 | import sys |
| 14 | from dataclasses import dataclass, field |
| 15 | from pathlib import Path |
| 16 | from typing import Any |
| 17 | |
| 18 | |
| 19 | # ── Exit codes ──────────────────────────────────────────────────────────────── |
| 20 | |
| 21 | EXIT_SUCCESS = 0 |
| 22 | EXIT_ERROR = 1 |
| @@ -176,13 +174,11 @@ | |
| 176 | summary_path = os.environ.get("GITHUB_STEP_SUMMARY") |
| 177 | if not summary_path: |
| 178 | return |
| 179 | |
| 180 | lines: list[str] = [] |
| 181 | status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get( |
| 182 | payload["status"], "ℹ️" |
| 183 | ) |
| 184 | lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n") |
| 185 | |
| 186 | if payload.get("data"): |
| 187 | lines.append("### Stats\n") |
| 188 | lines.append("| Key | Value |") |
| 189 |
| --- navegador/cicd.py | |
| +++ navegador/cicd.py | |
| @@ -10,13 +10,11 @@ | |
| 10 | |
| 11 | import json |
| 12 | import os |
| 13 | import sys |
| 14 | from dataclasses import dataclass, field |
| 15 | from typing import Any |
| 16 | |
| 17 | # ── Exit codes ──────────────────────────────────────────────────────────────── |
| 18 | |
| 19 | EXIT_SUCCESS = 0 |
| 20 | EXIT_ERROR = 1 |
| @@ -176,13 +174,11 @@ | |
| 174 | summary_path = os.environ.get("GITHUB_STEP_SUMMARY") |
| 175 | if not summary_path: |
| 176 | return |
| 177 | |
| 178 | lines: list[str] = [] |
| 179 | status_icon = {"success": "✅", "warning": "⚠️", "error": "❌"}.get(payload["status"], "ℹ️") |
| 180 | lines.append(f"## Navegador — {status_icon} {payload['status'].capitalize()}\n") |
| 181 | |
| 182 | if payload.get("data"): |
| 183 | lines.append("### Stats\n") |
| 184 | lines.append("| Key | Value |") |
| 185 |
+67
-65
| --- navegador/cli/commands.py | ||
| +++ navegador/cli/commands.py | ||
| @@ -142,12 +142,21 @@ | ||
| 142 | 142 | @click.option( |
| 143 | 143 | "--monorepo", |
| 144 | 144 | is_flag=True, |
| 145 | 145 | help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).", |
| 146 | 146 | ) |
| 147 | -def ingest(repo_path: str, db: str, clear: bool, incremental: bool, watch: bool, | |
| 148 | - interval: float, as_json: bool, redact: bool, monorepo: bool): | |
| 147 | +def ingest( | |
| 148 | + repo_path: str, | |
| 149 | + db: str, | |
| 150 | + clear: bool, | |
| 151 | + incremental: bool, | |
| 152 | + watch: bool, | |
| 153 | + interval: float, | |
| 154 | + as_json: bool, | |
| 155 | + redact: bool, | |
| 156 | + monorepo: bool, | |
| 157 | +): | |
| 149 | 158 | """Ingest a repository's code into the graph (AST + call graph).""" |
| 150 | 159 | if monorepo: |
| 151 | 160 | from navegador.monorepo import MonorepoIngester |
| 152 | 161 | |
| 153 | 162 | store = _get_store(db) |
| @@ -177,13 +186,11 @@ | ||
| 177 | 186 | |
| 178 | 187 | def _on_cycle(stats): |
| 179 | 188 | changed = stats["files"] |
| 180 | 189 | skipped = stats["skipped"] |
| 181 | 190 | if changed: |
| 182 | - console.print( | |
| 183 | - f" [green]{changed} changed[/green], {skipped} unchanged" | |
| 184 | - ) | |
| 191 | + console.print(f" [green]{changed} changed[/green], {skipped} unchanged") | |
| 185 | 192 | return True # keep watching |
| 186 | 193 | |
| 187 | 194 | try: |
| 188 | 195 | ingester.watch(repo_path, interval=interval, callback=_on_cycle) |
| 189 | 196 | except KeyboardInterrupt: |
| @@ -727,12 +734,14 @@ | ||
| 727 | 734 | def migrate(db: str, check: bool): |
| 728 | 735 | """Apply pending schema migrations to the graph.""" |
| 729 | 736 | from navegador.graph.migrations import ( |
| 730 | 737 | CURRENT_SCHEMA_VERSION, |
| 731 | 738 | get_schema_version, |
| 732 | - migrate as do_migrate, | |
| 733 | 739 | needs_migration, |
| 740 | + ) | |
| 741 | + from navegador.graph.migrations import ( | |
| 742 | + migrate as do_migrate, | |
| 734 | 743 | ) |
| 735 | 744 | |
| 736 | 745 | store = _get_store(db) |
| 737 | 746 | |
| 738 | 747 | if check: |
| @@ -875,15 +884,11 @@ | ||
| 875 | 884 | param_hint="--framework", |
| 876 | 885 | ) |
| 877 | 886 | targets = {framework_name: available[framework_name]} |
| 878 | 887 | else: |
| 879 | 888 | # Auto-detect: only run enrichers whose detect() returns True. |
| 880 | - targets = { | |
| 881 | - name: cls | |
| 882 | - for name, cls in available.items() | |
| 883 | - if cls(store).detect() | |
| 884 | - } | |
| 889 | + targets = {name: cls for name, cls in available.items() if cls(store).detect()} | |
| 885 | 890 | if not targets and not as_json: |
| 886 | 891 | console.print("[yellow]No frameworks detected in the graph.[/yellow]") |
| 887 | 892 | return |
| 888 | 893 | |
| 889 | 894 | all_results: dict[str, dict] = {} |
| @@ -1099,13 +1104,11 @@ | ||
| 1099 | 1104 | try: |
| 1100 | 1105 | store = _get_store(db) |
| 1101 | 1106 | current = get_schema_version(store) |
| 1102 | 1107 | data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION} |
| 1103 | 1108 | if needs_migration(store): |
| 1104 | - reporter.add_warning( | |
| 1105 | - f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}" | |
| 1106 | - ) | |
| 1109 | + reporter.add_warning(f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}") | |
| 1107 | 1110 | except Exception as exc: # noqa: BLE001 |
| 1108 | 1111 | reporter.add_error(str(exc)) |
| 1109 | 1112 | |
| 1110 | 1113 | reporter.emit(data=data or None) |
| 1111 | 1114 | sys.exit(reporter.exit_code()) |
| @@ -1151,13 +1154,11 @@ | ||
| 1151 | 1154 | else: |
| 1152 | 1155 | line = get_eval_line(shell) |
| 1153 | 1156 | rc = rc_path or get_rc_path(shell) |
| 1154 | 1157 | console.print(f"Add the following line to [bold]{rc}[/bold]:\n") |
| 1155 | 1158 | click.echo(f" {line}") |
| 1156 | - console.print( | |
| 1157 | - f"\nOr run: [bold]navegador completions {shell} --install[/bold]" | |
| 1158 | - ) | |
| 1159 | + console.print(f"\nOr run: [bold]navegador completions {shell} --install[/bold]") | |
| 1159 | 1160 | |
| 1160 | 1161 | |
| 1161 | 1162 | # ── Churn / behavioural coupling ───────────────────────────────────────────── |
| 1162 | 1163 | |
| 1163 | 1164 | |
| @@ -1293,12 +1294,11 @@ | ||
| 1293 | 1294 | "--read-only", |
| 1294 | 1295 | "read_only", |
| 1295 | 1296 | is_flag=True, |
| 1296 | 1297 | default=False, |
| 1297 | 1298 | help=( |
| 1298 | - "Start in read-only mode: disables ingest_repo and blocks write " | |
| 1299 | - "operations in query_graph." | |
| 1299 | + "Start in read-only mode: disables ingest_repo and blocks write operations in query_graph." | |
| 1300 | 1300 | ), |
| 1301 | 1301 | ) |
| 1302 | 1302 | def mcp(db: str, read_only: bool): |
| 1303 | 1303 | """Start the MCP server for AI agent integration (stdio).""" |
| 1304 | 1304 | from mcp.server.stdio import stdio_server # type: ignore[import] |
| @@ -1337,13 +1337,11 @@ | ||
| 1337 | 1337 | |
| 1338 | 1338 | if as_json: |
| 1339 | 1339 | click.echo(json.dumps(result.to_dict(), indent=2)) |
| 1340 | 1340 | return |
| 1341 | 1341 | |
| 1342 | - console.print( | |
| 1343 | - f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})" | |
| 1344 | - ) | |
| 1342 | + console.print(f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})") | |
| 1345 | 1343 | if not result.affected_nodes: |
| 1346 | 1344 | console.print("[yellow]No affected nodes found.[/yellow]") |
| 1347 | 1345 | return |
| 1348 | 1346 | |
| 1349 | 1347 | table = Table(title=f"Affected nodes ({len(result.affected_nodes)})") |
| @@ -1350,13 +1348,11 @@ | ||
| 1350 | 1348 | table.add_column("Type", style="cyan") |
| 1351 | 1349 | table.add_column("Name", style="bold") |
| 1352 | 1350 | table.add_column("File") |
| 1353 | 1351 | table.add_column("Line", justify="right") |
| 1354 | 1352 | for node in result.affected_nodes: |
| 1355 | - table.add_row( | |
| 1356 | - node["type"], node["name"], node["file_path"], str(node["line_start"] or "") | |
| 1357 | - ) | |
| 1353 | + table.add_row(node["type"], node["name"], node["file_path"], str(node["line_start"] or "")) | |
| 1358 | 1354 | console.print(table) |
| 1359 | 1355 | |
| 1360 | 1356 | if result.affected_files: |
| 1361 | 1357 | console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]") |
| 1362 | 1358 | for fp in result.affected_files: |
| @@ -1393,18 +1389,16 @@ | ||
| 1393 | 1389 | |
| 1394 | 1390 | if not chains: |
| 1395 | 1391 | console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].") |
| 1396 | 1392 | return |
| 1397 | 1393 | |
| 1398 | - console.print( | |
| 1399 | - f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)" | |
| 1400 | - ) | |
| 1394 | + console.print(f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)") | |
| 1401 | 1395 | for i, chain in enumerate(chains, 1): |
| 1402 | 1396 | steps = chain.to_list() |
| 1403 | - path_str = " → ".join( | |
| 1404 | - [steps[0]["caller"]] + [s["callee"] for s in steps] | |
| 1405 | - ) if steps else name | |
| 1397 | + path_str = ( | |
| 1398 | + " → ".join([steps[0]["caller"]] + [s["callee"] for s in steps]) if steps else name | |
| 1399 | + ) | |
| 1406 | 1400 | console.print(f" {i}. {path_str}") |
| 1407 | 1401 | |
| 1408 | 1402 | |
| 1409 | 1403 | # ── ANALYSIS: dead code ─────────────────────────────────────────────────────── |
| 1410 | 1404 | |
| @@ -1433,11 +1427,13 @@ | ||
| 1433 | 1427 | f"{summary['unreachable_classes']} dead classes, " |
| 1434 | 1428 | f"{summary['orphan_files']} orphan files" |
| 1435 | 1429 | ) |
| 1436 | 1430 | |
| 1437 | 1431 | if report.unreachable_functions: |
| 1438 | - fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})") | |
| 1432 | + fn_table = Table( | |
| 1433 | + title=f"Unreachable functions/methods ({len(report.unreachable_functions)})" | |
| 1434 | + ) | |
| 1439 | 1435 | fn_table.add_column("Type", style="cyan") |
| 1440 | 1436 | fn_table.add_column("Name", style="bold") |
| 1441 | 1437 | fn_table.add_column("File") |
| 1442 | 1438 | fn_table.add_column("Line", justify="right") |
| 1443 | 1439 | for fn in report.unreachable_functions: |
| @@ -1507,14 +1503,14 @@ | ||
| 1507 | 1503 | # ── ANALYSIS: cycles ────────────────────────────────────────────────────────── |
| 1508 | 1504 | |
| 1509 | 1505 | |
| 1510 | 1506 | @main.command() |
| 1511 | 1507 | @DB_OPTION |
| 1512 | -@click.option("--imports", "check_imports", is_flag=True, default=False, | |
| 1513 | - help="Check import cycles only.") | |
| 1514 | -@click.option("--calls", "check_calls", is_flag=True, default=False, | |
| 1515 | - help="Check call cycles only.") | |
| 1508 | +@click.option( | |
| 1509 | + "--imports", "check_imports", is_flag=True, default=False, help="Check import cycles only." | |
| 1510 | +) | |
| 1511 | +@click.option("--calls", "check_calls", is_flag=True, default=False, help="Check call cycles only.") | |
| 1516 | 1512 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON.") |
| 1517 | 1513 | def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool): |
| 1518 | 1514 | """Detect circular dependencies in import and call graphs. |
| 1519 | 1515 | |
| 1520 | 1516 | By default checks both import cycles and call cycles. |
| @@ -1529,13 +1525,11 @@ | ||
| 1529 | 1525 | import_cycles = detector.detect_import_cycles() if run_imports else [] |
| 1530 | 1526 | call_cycles = detector.detect_call_cycles() if run_calls else [] |
| 1531 | 1527 | |
| 1532 | 1528 | if as_json: |
| 1533 | 1529 | click.echo( |
| 1534 | - json.dumps( | |
| 1535 | - {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2 | |
| 1536 | - ) | |
| 1530 | + json.dumps({"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2) | |
| 1537 | 1531 | ) |
| 1538 | 1532 | return |
| 1539 | 1533 | |
| 1540 | 1534 | if not import_cycles and not call_cycles: |
| 1541 | 1535 | console.print("[green]No circular dependencies found.[/green]") |
| @@ -1816,20 +1810,26 @@ | ||
| 1816 | 1810 | def pm(): |
| 1817 | 1811 | """Ingest project management tickets (GitHub Issues, Linear, Jira).""" |
| 1818 | 1812 | |
| 1819 | 1813 | |
| 1820 | 1814 | @pm.command("ingest") |
| 1821 | -@click.option("--github", "github_repo", default="", metavar="OWNER/REPO", | |
| 1822 | - help="GitHub repository in owner/repo format.") | |
| 1823 | -@click.option("--token", default="", envvar="GITHUB_TOKEN", | |
| 1824 | - help="GitHub personal access token.") | |
| 1825 | -@click.option("--state", default="open", | |
| 1826 | - type=click.Choice(["open", "closed", "all"]), | |
| 1827 | - show_default=True, | |
| 1828 | - help="GitHub issue state filter.") | |
| 1829 | -@click.option("--limit", default=100, show_default=True, | |
| 1830 | - help="Maximum number of issues to fetch.") | |
| 1815 | +@click.option( | |
| 1816 | + "--github", | |
| 1817 | + "github_repo", | |
| 1818 | + default="", | |
| 1819 | + metavar="OWNER/REPO", | |
| 1820 | + help="GitHub repository in owner/repo format.", | |
| 1821 | +) | |
| 1822 | +@click.option("--token", default="", envvar="GITHUB_TOKEN", help="GitHub personal access token.") | |
| 1823 | +@click.option( | |
| 1824 | + "--state", | |
| 1825 | + default="open", | |
| 1826 | + type=click.Choice(["open", "closed", "all"]), | |
| 1827 | + show_default=True, | |
| 1828 | + help="GitHub issue state filter.", | |
| 1829 | +) | |
| 1830 | +@click.option("--limit", default=100, show_default=True, help="Maximum number of issues to fetch.") | |
| 1831 | 1831 | @DB_OPTION |
| 1832 | 1832 | @click.option("--json", "as_json", is_flag=True) |
| 1833 | 1833 | def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool): |
| 1834 | 1834 | """Ingest tickets from a PM tool into the knowledge graph. |
| 1835 | 1835 | |
| @@ -1838,11 +1838,13 @@ | ||
| 1838 | 1838 | navegador pm ingest --github owner/repo |
| 1839 | 1839 | navegador pm ingest --github owner/repo --token ghp_... |
| 1840 | 1840 | navegador pm ingest --github owner/repo --state all --limit 200 |
| 1841 | 1841 | """ |
| 1842 | 1842 | if not github_repo: |
| 1843 | - raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).") | |
| 1843 | + raise click.UsageError( | |
| 1844 | + "Provide --github <owner/repo> (more backends coming in a future release)." | |
| 1845 | + ) | |
| 1844 | 1846 | |
| 1845 | 1847 | from navegador.pm import TicketIngester |
| 1846 | 1848 | |
| 1847 | 1849 | ing = TicketIngester(_get_store(db)) |
| 1848 | 1850 | stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit) |
| @@ -1867,11 +1869,12 @@ | ||
| 1867 | 1869 | |
| 1868 | 1870 | |
| 1869 | 1871 | @deps.command("ingest") |
| 1870 | 1872 | @click.argument("path", type=click.Path(exists=True)) |
| 1871 | 1873 | @click.option( |
| 1872 | - "--type", "dep_type", | |
| 1874 | + "--type", | |
| 1875 | + "dep_type", | |
| 1873 | 1876 | type=click.Choice(["auto", "npm", "pip", "cargo"]), |
| 1874 | 1877 | default="auto", |
| 1875 | 1878 | show_default=True, |
| 1876 | 1879 | help="Manifest type. auto detects from filename.", |
| 1877 | 1880 | ) |
| @@ -1922,12 +1925,11 @@ | ||
| 1922 | 1925 | |
| 1923 | 1926 | if as_json: |
| 1924 | 1927 | click.echo(json.dumps(stats, indent=2)) |
| 1925 | 1928 | else: |
| 1926 | 1929 | console.print( |
| 1927 | - f"[green]Dependencies ingested[/green] ({dep_type}): " | |
| 1928 | - f"{stats['packages']} packages" | |
| 1930 | + f"[green]Dependencies ingested[/green] ({dep_type}): {stats['packages']} packages" | |
| 1929 | 1931 | ) |
| 1930 | 1932 | |
| 1931 | 1933 | |
| 1932 | 1934 | # ── Submodules: ingest parent + submodules (#61) ────────────────────────────── |
| 1933 | 1935 | |
| @@ -2029,13 +2031,11 @@ | ||
| 2029 | 2031 | raise click.UsageError("Provide at least one NAME=PATH repo.") |
| 2030 | 2032 | |
| 2031 | 2033 | wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode)) |
| 2032 | 2034 | for repo_spec in repos: |
| 2033 | 2035 | if "=" not in repo_spec: |
| 2034 | - raise click.UsageError( | |
| 2035 | - f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format." | |
| 2036 | - ) | |
| 2036 | + raise click.UsageError(f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format.") | |
| 2037 | 2037 | name, path = repo_spec.split("=", 1) |
| 2038 | 2038 | wm.add_repo(name.strip(), path.strip()) |
| 2039 | 2039 | |
| 2040 | 2040 | stats = wm.ingest_all(clear=clear) |
| 2041 | 2041 | |
| @@ -2096,11 +2096,13 @@ | ||
| 2096 | 2096 | from navegador.intelligence.search import SemanticSearch |
| 2097 | 2097 | from navegador.llm import auto_provider, get_provider |
| 2098 | 2098 | |
| 2099 | 2099 | store = _get_store(db) |
| 2100 | 2100 | provider = ( |
| 2101 | - get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) | |
| 2101 | + get_provider(llm_provider, model=llm_model) | |
| 2102 | + if llm_provider | |
| 2103 | + else auto_provider(model=llm_model) | |
| 2102 | 2104 | ) |
| 2103 | 2105 | ss = SemanticSearch(store, provider) |
| 2104 | 2106 | |
| 2105 | 2107 | if do_index: |
| 2106 | 2108 | n = ss.index() |
| @@ -2220,11 +2222,13 @@ | ||
| 2220 | 2222 | from navegador.intelligence.nlp import NLPEngine |
| 2221 | 2223 | from navegador.llm import auto_provider, get_provider |
| 2222 | 2224 | |
| 2223 | 2225 | store = _get_store(db) |
| 2224 | 2226 | provider = ( |
| 2225 | - get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) | |
| 2227 | + get_provider(llm_provider, model=llm_model) | |
| 2228 | + if llm_provider | |
| 2229 | + else auto_provider(model=llm_model) | |
| 2226 | 2230 | ) |
| 2227 | 2231 | engine = NLPEngine(store, provider) |
| 2228 | 2232 | |
| 2229 | 2233 | with console.status("[bold]Thinking...[/bold]"): |
| 2230 | 2234 | answer = engine.natural_query(question) |
| @@ -2244,13 +2248,11 @@ | ||
| 2244 | 2248 | default="", |
| 2245 | 2249 | help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.", |
| 2246 | 2250 | ) |
| 2247 | 2251 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2248 | 2252 | @click.option("--file", "file_path", default="", help="Narrow to a specific file.") |
| 2249 | -def generate_docs_cmd( | |
| 2250 | - name: str, db: str, llm_provider: str, llm_model: str, file_path: str | |
| 2251 | -): | |
| 2253 | +def generate_docs_cmd(name: str, db: str, llm_provider: str, llm_model: str, file_path: str): | |
| 2252 | 2254 | """Generate LLM-powered documentation for a named symbol. |
| 2253 | 2255 | |
| 2254 | 2256 | \b |
| 2255 | 2257 | Examples: |
| 2256 | 2258 | navegador generate-docs authenticate_user |
| @@ -2259,11 +2261,13 @@ | ||
| 2259 | 2261 | from navegador.intelligence.nlp import NLPEngine |
| 2260 | 2262 | from navegador.llm import auto_provider, get_provider |
| 2261 | 2263 | |
| 2262 | 2264 | store = _get_store(db) |
| 2263 | 2265 | provider = ( |
| 2264 | - get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) | |
| 2266 | + get_provider(llm_provider, model=llm_model) | |
| 2267 | + if llm_provider | |
| 2268 | + else auto_provider(model=llm_model) | |
| 2265 | 2269 | ) |
| 2266 | 2270 | engine = NLPEngine(store, provider) |
| 2267 | 2271 | |
| 2268 | 2272 | with console.status("[bold]Generating docs...[/bold]"): |
| 2269 | 2273 | docs = engine.generate_docs(name, file_path=file_path) |
| @@ -2284,13 +2288,11 @@ | ||
| 2284 | 2288 | default="", |
| 2285 | 2289 | help="LLM provider (anthropic, openai, ollama). Template mode if omitted.", |
| 2286 | 2290 | ) |
| 2287 | 2291 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2288 | 2292 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).") |
| 2289 | -def docs( | |
| 2290 | - target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool | |
| 2291 | -): | |
| 2293 | +def docs(target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool): | |
| 2292 | 2294 | """Generate markdown documentation from the graph. |
| 2293 | 2295 | |
| 2294 | 2296 | TARGET can be a file path or a module name (dotted or partial). |
| 2295 | 2297 | Use --project to generate full project docs instead. |
| 2296 | 2298 | |
| 2297 | 2299 |
| --- navegador/cli/commands.py | |
| +++ navegador/cli/commands.py | |
| @@ -142,12 +142,21 @@ | |
| 142 | @click.option( |
| 143 | "--monorepo", |
| 144 | is_flag=True, |
| 145 | help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).", |
| 146 | ) |
| 147 | def ingest(repo_path: str, db: str, clear: bool, incremental: bool, watch: bool, |
| 148 | interval: float, as_json: bool, redact: bool, monorepo: bool): |
| 149 | """Ingest a repository's code into the graph (AST + call graph).""" |
| 150 | if monorepo: |
| 151 | from navegador.monorepo import MonorepoIngester |
| 152 | |
| 153 | store = _get_store(db) |
| @@ -177,13 +186,11 @@ | |
| 177 | |
| 178 | def _on_cycle(stats): |
| 179 | changed = stats["files"] |
| 180 | skipped = stats["skipped"] |
| 181 | if changed: |
| 182 | console.print( |
| 183 | f" [green]{changed} changed[/green], {skipped} unchanged" |
| 184 | ) |
| 185 | return True # keep watching |
| 186 | |
| 187 | try: |
| 188 | ingester.watch(repo_path, interval=interval, callback=_on_cycle) |
| 189 | except KeyboardInterrupt: |
| @@ -727,12 +734,14 @@ | |
| 727 | def migrate(db: str, check: bool): |
| 728 | """Apply pending schema migrations to the graph.""" |
| 729 | from navegador.graph.migrations import ( |
| 730 | CURRENT_SCHEMA_VERSION, |
| 731 | get_schema_version, |
| 732 | migrate as do_migrate, |
| 733 | needs_migration, |
| 734 | ) |
| 735 | |
| 736 | store = _get_store(db) |
| 737 | |
| 738 | if check: |
| @@ -875,15 +884,11 @@ | |
| 875 | param_hint="--framework", |
| 876 | ) |
| 877 | targets = {framework_name: available[framework_name]} |
| 878 | else: |
| 879 | # Auto-detect: only run enrichers whose detect() returns True. |
| 880 | targets = { |
| 881 | name: cls |
| 882 | for name, cls in available.items() |
| 883 | if cls(store).detect() |
| 884 | } |
| 885 | if not targets and not as_json: |
| 886 | console.print("[yellow]No frameworks detected in the graph.[/yellow]") |
| 887 | return |
| 888 | |
| 889 | all_results: dict[str, dict] = {} |
| @@ -1099,13 +1104,11 @@ | |
| 1099 | try: |
| 1100 | store = _get_store(db) |
| 1101 | current = get_schema_version(store) |
| 1102 | data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION} |
| 1103 | if needs_migration(store): |
| 1104 | reporter.add_warning( |
| 1105 | f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}" |
| 1106 | ) |
| 1107 | except Exception as exc: # noqa: BLE001 |
| 1108 | reporter.add_error(str(exc)) |
| 1109 | |
| 1110 | reporter.emit(data=data or None) |
| 1111 | sys.exit(reporter.exit_code()) |
| @@ -1151,13 +1154,11 @@ | |
| 1151 | else: |
| 1152 | line = get_eval_line(shell) |
| 1153 | rc = rc_path or get_rc_path(shell) |
| 1154 | console.print(f"Add the following line to [bold]{rc}[/bold]:\n") |
| 1155 | click.echo(f" {line}") |
| 1156 | console.print( |
| 1157 | f"\nOr run: [bold]navegador completions {shell} --install[/bold]" |
| 1158 | ) |
| 1159 | |
| 1160 | |
| 1161 | # ── Churn / behavioural coupling ───────────────────────────────────────────── |
| 1162 | |
| 1163 | |
| @@ -1293,12 +1294,11 @@ | |
| 1293 | "--read-only", |
| 1294 | "read_only", |
| 1295 | is_flag=True, |
| 1296 | default=False, |
| 1297 | help=( |
| 1298 | "Start in read-only mode: disables ingest_repo and blocks write " |
| 1299 | "operations in query_graph." |
| 1300 | ), |
| 1301 | ) |
| 1302 | def mcp(db: str, read_only: bool): |
| 1303 | """Start the MCP server for AI agent integration (stdio).""" |
| 1304 | from mcp.server.stdio import stdio_server # type: ignore[import] |
| @@ -1337,13 +1337,11 @@ | |
| 1337 | |
| 1338 | if as_json: |
| 1339 | click.echo(json.dumps(result.to_dict(), indent=2)) |
| 1340 | return |
| 1341 | |
| 1342 | console.print( |
| 1343 | f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})" |
| 1344 | ) |
| 1345 | if not result.affected_nodes: |
| 1346 | console.print("[yellow]No affected nodes found.[/yellow]") |
| 1347 | return |
| 1348 | |
| 1349 | table = Table(title=f"Affected nodes ({len(result.affected_nodes)})") |
| @@ -1350,13 +1348,11 @@ | |
| 1350 | table.add_column("Type", style="cyan") |
| 1351 | table.add_column("Name", style="bold") |
| 1352 | table.add_column("File") |
| 1353 | table.add_column("Line", justify="right") |
| 1354 | for node in result.affected_nodes: |
| 1355 | table.add_row( |
| 1356 | node["type"], node["name"], node["file_path"], str(node["line_start"] or "") |
| 1357 | ) |
| 1358 | console.print(table) |
| 1359 | |
| 1360 | if result.affected_files: |
| 1361 | console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]") |
| 1362 | for fp in result.affected_files: |
| @@ -1393,18 +1389,16 @@ | |
| 1393 | |
| 1394 | if not chains: |
| 1395 | console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].") |
| 1396 | return |
| 1397 | |
| 1398 | console.print( |
| 1399 | f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)" |
| 1400 | ) |
| 1401 | for i, chain in enumerate(chains, 1): |
| 1402 | steps = chain.to_list() |
| 1403 | path_str = " → ".join( |
| 1404 | [steps[0]["caller"]] + [s["callee"] for s in steps] |
| 1405 | ) if steps else name |
| 1406 | console.print(f" {i}. {path_str}") |
| 1407 | |
| 1408 | |
| 1409 | # ── ANALYSIS: dead code ─────────────────────────────────────────────────────── |
| 1410 | |
| @@ -1433,11 +1427,13 @@ | |
| 1433 | f"{summary['unreachable_classes']} dead classes, " |
| 1434 | f"{summary['orphan_files']} orphan files" |
| 1435 | ) |
| 1436 | |
| 1437 | if report.unreachable_functions: |
| 1438 | fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})") |
| 1439 | fn_table.add_column("Type", style="cyan") |
| 1440 | fn_table.add_column("Name", style="bold") |
| 1441 | fn_table.add_column("File") |
| 1442 | fn_table.add_column("Line", justify="right") |
| 1443 | for fn in report.unreachable_functions: |
| @@ -1507,14 +1503,14 @@ | |
| 1507 | # ── ANALYSIS: cycles ────────────────────────────────────────────────────────── |
| 1508 | |
| 1509 | |
| 1510 | @main.command() |
| 1511 | @DB_OPTION |
| 1512 | @click.option("--imports", "check_imports", is_flag=True, default=False, |
| 1513 | help="Check import cycles only.") |
| 1514 | @click.option("--calls", "check_calls", is_flag=True, default=False, |
| 1515 | help="Check call cycles only.") |
| 1516 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON.") |
| 1517 | def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool): |
| 1518 | """Detect circular dependencies in import and call graphs. |
| 1519 | |
| 1520 | By default checks both import cycles and call cycles. |
| @@ -1529,13 +1525,11 @@ | |
| 1529 | import_cycles = detector.detect_import_cycles() if run_imports else [] |
| 1530 | call_cycles = detector.detect_call_cycles() if run_calls else [] |
| 1531 | |
| 1532 | if as_json: |
| 1533 | click.echo( |
| 1534 | json.dumps( |
| 1535 | {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2 |
| 1536 | ) |
| 1537 | ) |
| 1538 | return |
| 1539 | |
| 1540 | if not import_cycles and not call_cycles: |
| 1541 | console.print("[green]No circular dependencies found.[/green]") |
| @@ -1816,20 +1810,26 @@ | |
| 1816 | def pm(): |
| 1817 | """Ingest project management tickets (GitHub Issues, Linear, Jira).""" |
| 1818 | |
| 1819 | |
| 1820 | @pm.command("ingest") |
| 1821 | @click.option("--github", "github_repo", default="", metavar="OWNER/REPO", |
| 1822 | help="GitHub repository in owner/repo format.") |
| 1823 | @click.option("--token", default="", envvar="GITHUB_TOKEN", |
| 1824 | help="GitHub personal access token.") |
| 1825 | @click.option("--state", default="open", |
| 1826 | type=click.Choice(["open", "closed", "all"]), |
| 1827 | show_default=True, |
| 1828 | help="GitHub issue state filter.") |
| 1829 | @click.option("--limit", default=100, show_default=True, |
| 1830 | help="Maximum number of issues to fetch.") |
| 1831 | @DB_OPTION |
| 1832 | @click.option("--json", "as_json", is_flag=True) |
| 1833 | def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool): |
| 1834 | """Ingest tickets from a PM tool into the knowledge graph. |
| 1835 | |
| @@ -1838,11 +1838,13 @@ | |
| 1838 | navegador pm ingest --github owner/repo |
| 1839 | navegador pm ingest --github owner/repo --token ghp_... |
| 1840 | navegador pm ingest --github owner/repo --state all --limit 200 |
| 1841 | """ |
| 1842 | if not github_repo: |
| 1843 | raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).") |
| 1844 | |
| 1845 | from navegador.pm import TicketIngester |
| 1846 | |
| 1847 | ing = TicketIngester(_get_store(db)) |
| 1848 | stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit) |
| @@ -1867,11 +1869,12 @@ | |
| 1867 | |
| 1868 | |
| 1869 | @deps.command("ingest") |
| 1870 | @click.argument("path", type=click.Path(exists=True)) |
| 1871 | @click.option( |
| 1872 | "--type", "dep_type", |
| 1873 | type=click.Choice(["auto", "npm", "pip", "cargo"]), |
| 1874 | default="auto", |
| 1875 | show_default=True, |
| 1876 | help="Manifest type. auto detects from filename.", |
| 1877 | ) |
| @@ -1922,12 +1925,11 @@ | |
| 1922 | |
| 1923 | if as_json: |
| 1924 | click.echo(json.dumps(stats, indent=2)) |
| 1925 | else: |
| 1926 | console.print( |
| 1927 | f"[green]Dependencies ingested[/green] ({dep_type}): " |
| 1928 | f"{stats['packages']} packages" |
| 1929 | ) |
| 1930 | |
| 1931 | |
| 1932 | # ── Submodules: ingest parent + submodules (#61) ────────────────────────────── |
| 1933 | |
| @@ -2029,13 +2031,11 @@ | |
| 2029 | raise click.UsageError("Provide at least one NAME=PATH repo.") |
| 2030 | |
| 2031 | wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode)) |
| 2032 | for repo_spec in repos: |
| 2033 | if "=" not in repo_spec: |
| 2034 | raise click.UsageError( |
| 2035 | f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format." |
| 2036 | ) |
| 2037 | name, path = repo_spec.split("=", 1) |
| 2038 | wm.add_repo(name.strip(), path.strip()) |
| 2039 | |
| 2040 | stats = wm.ingest_all(clear=clear) |
| 2041 | |
| @@ -2096,11 +2096,13 @@ | |
| 2096 | from navegador.intelligence.search import SemanticSearch |
| 2097 | from navegador.llm import auto_provider, get_provider |
| 2098 | |
| 2099 | store = _get_store(db) |
| 2100 | provider = ( |
| 2101 | get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) |
| 2102 | ) |
| 2103 | ss = SemanticSearch(store, provider) |
| 2104 | |
| 2105 | if do_index: |
| 2106 | n = ss.index() |
| @@ -2220,11 +2222,13 @@ | |
| 2220 | from navegador.intelligence.nlp import NLPEngine |
| 2221 | from navegador.llm import auto_provider, get_provider |
| 2222 | |
| 2223 | store = _get_store(db) |
| 2224 | provider = ( |
| 2225 | get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) |
| 2226 | ) |
| 2227 | engine = NLPEngine(store, provider) |
| 2228 | |
| 2229 | with console.status("[bold]Thinking...[/bold]"): |
| 2230 | answer = engine.natural_query(question) |
| @@ -2244,13 +2248,11 @@ | |
| 2244 | default="", |
| 2245 | help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.", |
| 2246 | ) |
| 2247 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2248 | @click.option("--file", "file_path", default="", help="Narrow to a specific file.") |
| 2249 | def generate_docs_cmd( |
| 2250 | name: str, db: str, llm_provider: str, llm_model: str, file_path: str |
| 2251 | ): |
| 2252 | """Generate LLM-powered documentation for a named symbol. |
| 2253 | |
| 2254 | \b |
| 2255 | Examples: |
| 2256 | navegador generate-docs authenticate_user |
| @@ -2259,11 +2261,13 @@ | |
| 2259 | from navegador.intelligence.nlp import NLPEngine |
| 2260 | from navegador.llm import auto_provider, get_provider |
| 2261 | |
| 2262 | store = _get_store(db) |
| 2263 | provider = ( |
| 2264 | get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model) |
| 2265 | ) |
| 2266 | engine = NLPEngine(store, provider) |
| 2267 | |
| 2268 | with console.status("[bold]Generating docs...[/bold]"): |
| 2269 | docs = engine.generate_docs(name, file_path=file_path) |
| @@ -2284,13 +2288,11 @@ | |
| 2284 | default="", |
| 2285 | help="LLM provider (anthropic, openai, ollama). Template mode if omitted.", |
| 2286 | ) |
| 2287 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2288 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).") |
| 2289 | def docs( |
| 2290 | target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool |
| 2291 | ): |
| 2292 | """Generate markdown documentation from the graph. |
| 2293 | |
| 2294 | TARGET can be a file path or a module name (dotted or partial). |
| 2295 | Use --project to generate full project docs instead. |
| 2296 | |
| 2297 |
| --- navegador/cli/commands.py | |
| +++ navegador/cli/commands.py | |
| @@ -142,12 +142,21 @@ | |
| 142 | @click.option( |
| 143 | "--monorepo", |
| 144 | is_flag=True, |
| 145 | help="Detect and ingest as a monorepo workspace (Turborepo, Nx, Yarn, pnpm, Cargo, Go).", |
| 146 | ) |
| 147 | def ingest( |
| 148 | repo_path: str, |
| 149 | db: str, |
| 150 | clear: bool, |
| 151 | incremental: bool, |
| 152 | watch: bool, |
| 153 | interval: float, |
| 154 | as_json: bool, |
| 155 | redact: bool, |
| 156 | monorepo: bool, |
| 157 | ): |
| 158 | """Ingest a repository's code into the graph (AST + call graph).""" |
| 159 | if monorepo: |
| 160 | from navegador.monorepo import MonorepoIngester |
| 161 | |
| 162 | store = _get_store(db) |
| @@ -177,13 +186,11 @@ | |
| 186 | |
| 187 | def _on_cycle(stats): |
| 188 | changed = stats["files"] |
| 189 | skipped = stats["skipped"] |
| 190 | if changed: |
| 191 | console.print(f" [green]{changed} changed[/green], {skipped} unchanged") |
| 192 | return True # keep watching |
| 193 | |
| 194 | try: |
| 195 | ingester.watch(repo_path, interval=interval, callback=_on_cycle) |
| 196 | except KeyboardInterrupt: |
| @@ -727,12 +734,14 @@ | |
| 734 | def migrate(db: str, check: bool): |
| 735 | """Apply pending schema migrations to the graph.""" |
| 736 | from navegador.graph.migrations import ( |
| 737 | CURRENT_SCHEMA_VERSION, |
| 738 | get_schema_version, |
| 739 | needs_migration, |
| 740 | ) |
| 741 | from navegador.graph.migrations import ( |
| 742 | migrate as do_migrate, |
| 743 | ) |
| 744 | |
| 745 | store = _get_store(db) |
| 746 | |
| 747 | if check: |
| @@ -875,15 +884,11 @@ | |
| 884 | param_hint="--framework", |
| 885 | ) |
| 886 | targets = {framework_name: available[framework_name]} |
| 887 | else: |
| 888 | # Auto-detect: only run enrichers whose detect() returns True. |
| 889 | targets = {name: cls for name, cls in available.items() if cls(store).detect()} |
| 890 | if not targets and not as_json: |
| 891 | console.print("[yellow]No frameworks detected in the graph.[/yellow]") |
| 892 | return |
| 893 | |
| 894 | all_results: dict[str, dict] = {} |
| @@ -1099,13 +1104,11 @@ | |
| 1104 | try: |
| 1105 | store = _get_store(db) |
| 1106 | current = get_schema_version(store) |
| 1107 | data = {"schema_version": current, "current_schema_version": CURRENT_SCHEMA_VERSION} |
| 1108 | if needs_migration(store): |
| 1109 | reporter.add_warning(f"Schema migration needed: v{current} → v{CURRENT_SCHEMA_VERSION}") |
| 1110 | except Exception as exc: # noqa: BLE001 |
| 1111 | reporter.add_error(str(exc)) |
| 1112 | |
| 1113 | reporter.emit(data=data or None) |
| 1114 | sys.exit(reporter.exit_code()) |
| @@ -1151,13 +1154,11 @@ | |
| 1154 | else: |
| 1155 | line = get_eval_line(shell) |
| 1156 | rc = rc_path or get_rc_path(shell) |
| 1157 | console.print(f"Add the following line to [bold]{rc}[/bold]:\n") |
| 1158 | click.echo(f" {line}") |
| 1159 | console.print(f"\nOr run: [bold]navegador completions {shell} --install[/bold]") |
| 1160 | |
| 1161 | |
| 1162 | # ── Churn / behavioural coupling ───────────────────────────────────────────── |
| 1163 | |
| 1164 | |
| @@ -1293,12 +1294,11 @@ | |
| 1294 | "--read-only", |
| 1295 | "read_only", |
| 1296 | is_flag=True, |
| 1297 | default=False, |
| 1298 | help=( |
| 1299 | "Start in read-only mode: disables ingest_repo and blocks write operations in query_graph." |
| 1300 | ), |
| 1301 | ) |
| 1302 | def mcp(db: str, read_only: bool): |
| 1303 | """Start the MCP server for AI agent integration (stdio).""" |
| 1304 | from mcp.server.stdio import stdio_server # type: ignore[import] |
| @@ -1337,13 +1337,11 @@ | |
| 1337 | |
| 1338 | if as_json: |
| 1339 | click.echo(json.dumps(result.to_dict(), indent=2)) |
| 1340 | return |
| 1341 | |
| 1342 | console.print(f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})") |
| 1343 | if not result.affected_nodes: |
| 1344 | console.print("[yellow]No affected nodes found.[/yellow]") |
| 1345 | return |
| 1346 | |
| 1347 | table = Table(title=f"Affected nodes ({len(result.affected_nodes)})") |
| @@ -1350,13 +1348,11 @@ | |
| 1348 | table.add_column("Type", style="cyan") |
| 1349 | table.add_column("Name", style="bold") |
| 1350 | table.add_column("File") |
| 1351 | table.add_column("Line", justify="right") |
| 1352 | for node in result.affected_nodes: |
| 1353 | table.add_row(node["type"], node["name"], node["file_path"], str(node["line_start"] or "")) |
| 1354 | console.print(table) |
| 1355 | |
| 1356 | if result.affected_files: |
| 1357 | console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]") |
| 1358 | for fp in result.affected_files: |
| @@ -1393,18 +1389,16 @@ | |
| 1389 | |
| 1390 | if not chains: |
| 1391 | console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].") |
| 1392 | return |
| 1393 | |
| 1394 | console.print(f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)") |
| 1395 | for i, chain in enumerate(chains, 1): |
| 1396 | steps = chain.to_list() |
| 1397 | path_str = ( |
| 1398 | " → ".join([steps[0]["caller"]] + [s["callee"] for s in steps]) if steps else name |
| 1399 | ) |
| 1400 | console.print(f" {i}. {path_str}") |
| 1401 | |
| 1402 | |
| 1403 | # ── ANALYSIS: dead code ─────────────────────────────────────────────────────── |
| 1404 | |
| @@ -1433,11 +1427,13 @@ | |
| 1427 | f"{summary['unreachable_classes']} dead classes, " |
| 1428 | f"{summary['orphan_files']} orphan files" |
| 1429 | ) |
| 1430 | |
| 1431 | if report.unreachable_functions: |
| 1432 | fn_table = Table( |
| 1433 | title=f"Unreachable functions/methods ({len(report.unreachable_functions)})" |
| 1434 | ) |
| 1435 | fn_table.add_column("Type", style="cyan") |
| 1436 | fn_table.add_column("Name", style="bold") |
| 1437 | fn_table.add_column("File") |
| 1438 | fn_table.add_column("Line", justify="right") |
| 1439 | for fn in report.unreachable_functions: |
| @@ -1507,14 +1503,14 @@ | |
| 1503 | # ── ANALYSIS: cycles ────────────────────────────────────────────────────────── |
| 1504 | |
| 1505 | |
| 1506 | @main.command() |
| 1507 | @DB_OPTION |
| 1508 | @click.option( |
| 1509 | "--imports", "check_imports", is_flag=True, default=False, help="Check import cycles only." |
| 1510 | ) |
| 1511 | @click.option("--calls", "check_calls", is_flag=True, default=False, help="Check call cycles only.") |
| 1512 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON.") |
| 1513 | def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool): |
| 1514 | """Detect circular dependencies in import and call graphs. |
| 1515 | |
| 1516 | By default checks both import cycles and call cycles. |
| @@ -1529,13 +1525,11 @@ | |
| 1525 | import_cycles = detector.detect_import_cycles() if run_imports else [] |
| 1526 | call_cycles = detector.detect_call_cycles() if run_calls else [] |
| 1527 | |
| 1528 | if as_json: |
| 1529 | click.echo( |
| 1530 | json.dumps({"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2) |
| 1531 | ) |
| 1532 | return |
| 1533 | |
| 1534 | if not import_cycles and not call_cycles: |
| 1535 | console.print("[green]No circular dependencies found.[/green]") |
| @@ -1816,20 +1810,26 @@ | |
| 1810 | def pm(): |
| 1811 | """Ingest project management tickets (GitHub Issues, Linear, Jira).""" |
| 1812 | |
| 1813 | |
| 1814 | @pm.command("ingest") |
| 1815 | @click.option( |
| 1816 | "--github", |
| 1817 | "github_repo", |
| 1818 | default="", |
| 1819 | metavar="OWNER/REPO", |
| 1820 | help="GitHub repository in owner/repo format.", |
| 1821 | ) |
| 1822 | @click.option("--token", default="", envvar="GITHUB_TOKEN", help="GitHub personal access token.") |
| 1823 | @click.option( |
| 1824 | "--state", |
| 1825 | default="open", |
| 1826 | type=click.Choice(["open", "closed", "all"]), |
| 1827 | show_default=True, |
| 1828 | help="GitHub issue state filter.", |
| 1829 | ) |
| 1830 | @click.option("--limit", default=100, show_default=True, help="Maximum number of issues to fetch.") |
| 1831 | @DB_OPTION |
| 1832 | @click.option("--json", "as_json", is_flag=True) |
| 1833 | def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool): |
| 1834 | """Ingest tickets from a PM tool into the knowledge graph. |
| 1835 | |
| @@ -1838,11 +1838,13 @@ | |
| 1838 | navegador pm ingest --github owner/repo |
| 1839 | navegador pm ingest --github owner/repo --token ghp_... |
| 1840 | navegador pm ingest --github owner/repo --state all --limit 200 |
| 1841 | """ |
| 1842 | if not github_repo: |
| 1843 | raise click.UsageError( |
| 1844 | "Provide --github <owner/repo> (more backends coming in a future release)." |
| 1845 | ) |
| 1846 | |
| 1847 | from navegador.pm import TicketIngester |
| 1848 | |
| 1849 | ing = TicketIngester(_get_store(db)) |
| 1850 | stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit) |
| @@ -1867,11 +1869,12 @@ | |
| 1869 | |
| 1870 | |
| 1871 | @deps.command("ingest") |
| 1872 | @click.argument("path", type=click.Path(exists=True)) |
| 1873 | @click.option( |
| 1874 | "--type", |
| 1875 | "dep_type", |
| 1876 | type=click.Choice(["auto", "npm", "pip", "cargo"]), |
| 1877 | default="auto", |
| 1878 | show_default=True, |
| 1879 | help="Manifest type. auto detects from filename.", |
| 1880 | ) |
| @@ -1922,12 +1925,11 @@ | |
| 1925 | |
| 1926 | if as_json: |
| 1927 | click.echo(json.dumps(stats, indent=2)) |
| 1928 | else: |
| 1929 | console.print( |
| 1930 | f"[green]Dependencies ingested[/green] ({dep_type}): {stats['packages']} packages" |
| 1931 | ) |
| 1932 | |
| 1933 | |
| 1934 | # ── Submodules: ingest parent + submodules (#61) ────────────────────────────── |
| 1935 | |
| @@ -2029,13 +2031,11 @@ | |
| 2031 | raise click.UsageError("Provide at least one NAME=PATH repo.") |
| 2032 | |
| 2033 | wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode)) |
| 2034 | for repo_spec in repos: |
| 2035 | if "=" not in repo_spec: |
| 2036 | raise click.UsageError(f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format.") |
| 2037 | name, path = repo_spec.split("=", 1) |
| 2038 | wm.add_repo(name.strip(), path.strip()) |
| 2039 | |
| 2040 | stats = wm.ingest_all(clear=clear) |
| 2041 | |
| @@ -2096,11 +2096,13 @@ | |
| 2096 | from navegador.intelligence.search import SemanticSearch |
| 2097 | from navegador.llm import auto_provider, get_provider |
| 2098 | |
| 2099 | store = _get_store(db) |
| 2100 | provider = ( |
| 2101 | get_provider(llm_provider, model=llm_model) |
| 2102 | if llm_provider |
| 2103 | else auto_provider(model=llm_model) |
| 2104 | ) |
| 2105 | ss = SemanticSearch(store, provider) |
| 2106 | |
| 2107 | if do_index: |
| 2108 | n = ss.index() |
| @@ -2220,11 +2222,13 @@ | |
| 2222 | from navegador.intelligence.nlp import NLPEngine |
| 2223 | from navegador.llm import auto_provider, get_provider |
| 2224 | |
| 2225 | store = _get_store(db) |
| 2226 | provider = ( |
| 2227 | get_provider(llm_provider, model=llm_model) |
| 2228 | if llm_provider |
| 2229 | else auto_provider(model=llm_model) |
| 2230 | ) |
| 2231 | engine = NLPEngine(store, provider) |
| 2232 | |
| 2233 | with console.status("[bold]Thinking...[/bold]"): |
| 2234 | answer = engine.natural_query(question) |
| @@ -2244,13 +2248,11 @@ | |
| 2248 | default="", |
| 2249 | help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.", |
| 2250 | ) |
| 2251 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2252 | @click.option("--file", "file_path", default="", help="Narrow to a specific file.") |
| 2253 | def generate_docs_cmd(name: str, db: str, llm_provider: str, llm_model: str, file_path: str): |
| 2254 | """Generate LLM-powered documentation for a named symbol. |
| 2255 | |
| 2256 | \b |
| 2257 | Examples: |
| 2258 | navegador generate-docs authenticate_user |
| @@ -2259,11 +2261,13 @@ | |
| 2261 | from navegador.intelligence.nlp import NLPEngine |
| 2262 | from navegador.llm import auto_provider, get_provider |
| 2263 | |
| 2264 | store = _get_store(db) |
| 2265 | provider = ( |
| 2266 | get_provider(llm_provider, model=llm_model) |
| 2267 | if llm_provider |
| 2268 | else auto_provider(model=llm_model) |
| 2269 | ) |
| 2270 | engine = NLPEngine(store, provider) |
| 2271 | |
| 2272 | with console.status("[bold]Generating docs...[/bold]"): |
| 2273 | docs = engine.generate_docs(name, file_path=file_path) |
| @@ -2284,13 +2288,11 @@ | |
| 2288 | default="", |
| 2289 | help="LLM provider (anthropic, openai, ollama). Template mode if omitted.", |
| 2290 | ) |
| 2291 | @click.option("--model", "llm_model", default="", help="LLM model name.") |
| 2292 | @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).") |
| 2293 | def docs(target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool): |
| 2294 | """Generate markdown documentation from the graph. |
| 2295 | |
| 2296 | TARGET can be a file path or a module name (dotted or partial). |
| 2297 | Use --project to generate full project docs instead. |
| 2298 | |
| 2299 |
+21
-14
| --- navegador/cluster/core.py | ||
| +++ navegador/cluster/core.py | ||
| @@ -107,18 +107,20 @@ | ||
| 107 | 107 | |
| 108 | 108 | edges = [] |
| 109 | 109 | if edges_result.result_set: |
| 110 | 110 | for row in edges_result.result_set: |
| 111 | 111 | src, rel_type, rel, dst = row |
| 112 | - edges.append({ | |
| 113 | - "src_labels": list(src.labels), | |
| 114 | - "src_props": dict(src.properties), | |
| 115 | - "rel_type": rel_type, | |
| 116 | - "rel_props": dict(rel.properties) if rel.properties else {}, | |
| 117 | - "dst_labels": list(dst.labels), | |
| 118 | - "dst_props": dict(dst.properties), | |
| 119 | - }) | |
| 112 | + edges.append( | |
| 113 | + { | |
| 114 | + "src_labels": list(src.labels), | |
| 115 | + "src_props": dict(src.properties), | |
| 116 | + "rel_type": rel_type, | |
| 117 | + "rel_props": dict(rel.properties) if rel.properties else {}, | |
| 118 | + "dst_labels": list(dst.labels), | |
| 119 | + "dst_props": dict(dst.properties), | |
| 120 | + } | |
| 121 | + ) | |
| 120 | 122 | |
| 121 | 123 | return {"nodes": nodes, "edges": edges} |
| 122 | 124 | |
| 123 | 125 | def _import_to_local_graph(self, data: dict[str, Any]) -> None: |
| 124 | 126 | """Write snapshot data into the local SQLite graph.""" |
| @@ -167,15 +169,18 @@ | ||
| 167 | 169 | serialized = json.dumps(data) |
| 168 | 170 | pipe = self._redis.pipeline() |
| 169 | 171 | pipe.set(_SNAPSHOT_KEY, serialized) |
| 170 | 172 | new_version = self._redis_version() + 1 |
| 171 | 173 | pipe.set(_VERSION_KEY, new_version) |
| 172 | - pipe.hset(_META_KEY, mapping={ | |
| 173 | - "last_push": time.time(), | |
| 174 | - "node_count": len(data["nodes"]), | |
| 175 | - "edge_count": len(data["edges"]), | |
| 176 | - }) | |
| 174 | + pipe.hset( | |
| 175 | + _META_KEY, | |
| 176 | + mapping={ | |
| 177 | + "last_push": time.time(), | |
| 178 | + "node_count": len(data["nodes"]), | |
| 179 | + "edge_count": len(data["edges"]), | |
| 180 | + }, | |
| 181 | + ) | |
| 177 | 182 | pipe.execute() |
| 178 | 183 | self._set_local_version(new_version) |
| 179 | 184 | logger.info( |
| 180 | 185 | "Pushed local graph to Redis (version %d): %d nodes, %d edges", |
| 181 | 186 | new_version, |
| @@ -196,11 +201,13 @@ | ||
| 196 | 201 | |
| 197 | 202 | if shared_ver > local_ver: |
| 198 | 203 | logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver) |
| 199 | 204 | self.snapshot_to_local() |
| 200 | 205 | else: |
| 201 | - logger.info("Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver) | |
| 206 | + logger.info( | |
| 207 | + "Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver | |
| 208 | + ) | |
| 202 | 209 | self.push_to_shared() |
| 203 | 210 | |
| 204 | 211 | def status(self) -> dict[str, Any]: |
| 205 | 212 | """ |
| 206 | 213 | Return a dict describing the sync state. |
| 207 | 214 |
| --- navegador/cluster/core.py | |
| +++ navegador/cluster/core.py | |
| @@ -107,18 +107,20 @@ | |
| 107 | |
| 108 | edges = [] |
| 109 | if edges_result.result_set: |
| 110 | for row in edges_result.result_set: |
| 111 | src, rel_type, rel, dst = row |
| 112 | edges.append({ |
| 113 | "src_labels": list(src.labels), |
| 114 | "src_props": dict(src.properties), |
| 115 | "rel_type": rel_type, |
| 116 | "rel_props": dict(rel.properties) if rel.properties else {}, |
| 117 | "dst_labels": list(dst.labels), |
| 118 | "dst_props": dict(dst.properties), |
| 119 | }) |
| 120 | |
| 121 | return {"nodes": nodes, "edges": edges} |
| 122 | |
| 123 | def _import_to_local_graph(self, data: dict[str, Any]) -> None: |
| 124 | """Write snapshot data into the local SQLite graph.""" |
| @@ -167,15 +169,18 @@ | |
| 167 | serialized = json.dumps(data) |
| 168 | pipe = self._redis.pipeline() |
| 169 | pipe.set(_SNAPSHOT_KEY, serialized) |
| 170 | new_version = self._redis_version() + 1 |
| 171 | pipe.set(_VERSION_KEY, new_version) |
| 172 | pipe.hset(_META_KEY, mapping={ |
| 173 | "last_push": time.time(), |
| 174 | "node_count": len(data["nodes"]), |
| 175 | "edge_count": len(data["edges"]), |
| 176 | }) |
| 177 | pipe.execute() |
| 178 | self._set_local_version(new_version) |
| 179 | logger.info( |
| 180 | "Pushed local graph to Redis (version %d): %d nodes, %d edges", |
| 181 | new_version, |
| @@ -196,11 +201,13 @@ | |
| 196 | |
| 197 | if shared_ver > local_ver: |
| 198 | logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver) |
| 199 | self.snapshot_to_local() |
| 200 | else: |
| 201 | logger.info("Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver) |
| 202 | self.push_to_shared() |
| 203 | |
| 204 | def status(self) -> dict[str, Any]: |
| 205 | """ |
| 206 | Return a dict describing the sync state. |
| 207 |
| --- navegador/cluster/core.py | |
| +++ navegador/cluster/core.py | |
| @@ -107,18 +107,20 @@ | |
| 107 | |
| 108 | edges = [] |
| 109 | if edges_result.result_set: |
| 110 | for row in edges_result.result_set: |
| 111 | src, rel_type, rel, dst = row |
| 112 | edges.append( |
| 113 | { |
| 114 | "src_labels": list(src.labels), |
| 115 | "src_props": dict(src.properties), |
| 116 | "rel_type": rel_type, |
| 117 | "rel_props": dict(rel.properties) if rel.properties else {}, |
| 118 | "dst_labels": list(dst.labels), |
| 119 | "dst_props": dict(dst.properties), |
| 120 | } |
| 121 | ) |
| 122 | |
| 123 | return {"nodes": nodes, "edges": edges} |
| 124 | |
| 125 | def _import_to_local_graph(self, data: dict[str, Any]) -> None: |
| 126 | """Write snapshot data into the local SQLite graph.""" |
| @@ -167,15 +169,18 @@ | |
| 169 | serialized = json.dumps(data) |
| 170 | pipe = self._redis.pipeline() |
| 171 | pipe.set(_SNAPSHOT_KEY, serialized) |
| 172 | new_version = self._redis_version() + 1 |
| 173 | pipe.set(_VERSION_KEY, new_version) |
| 174 | pipe.hset( |
| 175 | _META_KEY, |
| 176 | mapping={ |
| 177 | "last_push": time.time(), |
| 178 | "node_count": len(data["nodes"]), |
| 179 | "edge_count": len(data["edges"]), |
| 180 | }, |
| 181 | ) |
| 182 | pipe.execute() |
| 183 | self._set_local_version(new_version) |
| 184 | logger.info( |
| 185 | "Pushed local graph to Redis (version %d): %d nodes, %d edges", |
| 186 | new_version, |
| @@ -196,11 +201,13 @@ | |
| 201 | |
| 202 | if shared_ver > local_ver: |
| 203 | logger.info("Shared graph is newer (%d > %d); pulling.", shared_ver, local_ver) |
| 204 | self.snapshot_to_local() |
| 205 | else: |
| 206 | logger.info( |
| 207 | "Local graph is current or ahead (%d >= %d); pushing.", local_ver, shared_ver |
| 208 | ) |
| 209 | self.push_to_shared() |
| 210 | |
| 211 | def status(self) -> dict[str, Any]: |
| 212 | """ |
| 213 | Return a dict describing the sync state. |
| 214 |
+15
-18
| --- navegador/cluster/fossil_live.py | ||
| +++ navegador/cluster/fossil_live.py | ||
| @@ -7,13 +7,11 @@ | ||
| 7 | 7 | SQLite-backed. |
| 8 | 8 | """ |
| 9 | 9 | |
| 10 | 10 | from __future__ import annotations |
| 11 | 11 | |
| 12 | -import json | |
| 13 | 12 | import logging |
| 14 | -import time | |
| 15 | 13 | from pathlib import Path |
| 16 | 14 | from typing import TYPE_CHECKING, Any |
| 17 | 15 | |
| 18 | 16 | if TYPE_CHECKING: |
| 19 | 17 | from navegador.graph.store import GraphStore |
| @@ -71,10 +69,11 @@ | ||
| 71 | 69 | client = store._client # type: ignore[attr-defined] |
| 72 | 70 | for attr in ("_db", "connection", "_connection", "db"): |
| 73 | 71 | conn = getattr(client, attr, None) |
| 74 | 72 | if conn is not None: |
| 75 | 73 | import sqlite3 |
| 74 | + | |
| 76 | 75 | if isinstance(conn, sqlite3.Connection): |
| 77 | 76 | return conn |
| 78 | 77 | except Exception: |
| 79 | 78 | pass |
| 80 | 79 | return None |
| @@ -94,13 +93,11 @@ | ||
| 94 | 93 | if self._attached: |
| 95 | 94 | return |
| 96 | 95 | |
| 97 | 96 | native_conn = self._extract_sqlite_conn(store) |
| 98 | 97 | if native_conn is not None: |
| 99 | - native_conn.execute( | |
| 100 | - f"ATTACH DATABASE ? AS fossil", (str(self._fossil_path),) | |
| 101 | - ) | |
| 98 | + native_conn.execute("ATTACH DATABASE ? AS fossil", (str(self._fossil_path),)) | |
| 102 | 99 | self._conn = native_conn |
| 103 | 100 | self._attached = True |
| 104 | 101 | logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path) |
| 105 | 102 | else: |
| 106 | 103 | logger.warning( |
| @@ -139,20 +136,22 @@ | ||
| 139 | 136 | result = [] |
| 140 | 137 | for row in rows: |
| 141 | 138 | if hasattr(row, "keys"): |
| 142 | 139 | result.append(dict(row)) |
| 143 | 140 | else: |
| 144 | - result.append({ | |
| 145 | - "type": row[0], | |
| 146 | - "mtime": row[1], | |
| 147 | - "objid": row[2], | |
| 148 | - "uid": row[3], | |
| 149 | - "user": row[4], | |
| 150 | - "euser": row[5], | |
| 151 | - "comment": row[6], | |
| 152 | - "ecomment": row[7], | |
| 153 | - }) | |
| 141 | + result.append( | |
| 142 | + { | |
| 143 | + "type": row[0], | |
| 144 | + "mtime": row[1], | |
| 145 | + "objid": row[2], | |
| 146 | + "uid": row[3], | |
| 147 | + "user": row[4], | |
| 148 | + "euser": row[5], | |
| 149 | + "comment": row[6], | |
| 150 | + "ecomment": row[7], | |
| 151 | + } | |
| 152 | + ) | |
| 154 | 153 | return result |
| 155 | 154 | |
| 156 | 155 | def query_tickets(self) -> list[dict]: |
| 157 | 156 | """ |
| 158 | 157 | Query Fossil tickets. |
| @@ -232,9 +231,7 @@ | ||
| 232 | 231 | f"SET {prop_str}", |
| 233 | 232 | props, |
| 234 | 233 | ) |
| 235 | 234 | ticket_count += 1 |
| 236 | 235 | |
| 237 | - logger.info( | |
| 238 | - "Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count | |
| 239 | - ) | |
| 236 | + logger.info("Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count) | |
| 240 | 237 | return {"commits": commit_count, "tickets": ticket_count} |
| 241 | 238 |
| --- navegador/cluster/fossil_live.py | |
| +++ navegador/cluster/fossil_live.py | |
| @@ -7,13 +7,11 @@ | |
| 7 | SQLite-backed. |
| 8 | """ |
| 9 | |
| 10 | from __future__ import annotations |
| 11 | |
| 12 | import json |
| 13 | import logging |
| 14 | import time |
| 15 | from pathlib import Path |
| 16 | from typing import TYPE_CHECKING, Any |
| 17 | |
| 18 | if TYPE_CHECKING: |
| 19 | from navegador.graph.store import GraphStore |
| @@ -71,10 +69,11 @@ | |
| 71 | client = store._client # type: ignore[attr-defined] |
| 72 | for attr in ("_db", "connection", "_connection", "db"): |
| 73 | conn = getattr(client, attr, None) |
| 74 | if conn is not None: |
| 75 | import sqlite3 |
| 76 | if isinstance(conn, sqlite3.Connection): |
| 77 | return conn |
| 78 | except Exception: |
| 79 | pass |
| 80 | return None |
| @@ -94,13 +93,11 @@ | |
| 94 | if self._attached: |
| 95 | return |
| 96 | |
| 97 | native_conn = self._extract_sqlite_conn(store) |
| 98 | if native_conn is not None: |
| 99 | native_conn.execute( |
| 100 | f"ATTACH DATABASE ? AS fossil", (str(self._fossil_path),) |
| 101 | ) |
| 102 | self._conn = native_conn |
| 103 | self._attached = True |
| 104 | logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path) |
| 105 | else: |
| 106 | logger.warning( |
| @@ -139,20 +136,22 @@ | |
| 139 | result = [] |
| 140 | for row in rows: |
| 141 | if hasattr(row, "keys"): |
| 142 | result.append(dict(row)) |
| 143 | else: |
| 144 | result.append({ |
| 145 | "type": row[0], |
| 146 | "mtime": row[1], |
| 147 | "objid": row[2], |
| 148 | "uid": row[3], |
| 149 | "user": row[4], |
| 150 | "euser": row[5], |
| 151 | "comment": row[6], |
| 152 | "ecomment": row[7], |
| 153 | }) |
| 154 | return result |
| 155 | |
| 156 | def query_tickets(self) -> list[dict]: |
| 157 | """ |
| 158 | Query Fossil tickets. |
| @@ -232,9 +231,7 @@ | |
| 232 | f"SET {prop_str}", |
| 233 | props, |
| 234 | ) |
| 235 | ticket_count += 1 |
| 236 | |
| 237 | logger.info( |
| 238 | "Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count |
| 239 | ) |
| 240 | return {"commits": commit_count, "tickets": ticket_count} |
| 241 |
| --- navegador/cluster/fossil_live.py | |
| +++ navegador/cluster/fossil_live.py | |
| @@ -7,13 +7,11 @@ | |
| 7 | SQLite-backed. |
| 8 | """ |
| 9 | |
| 10 | from __future__ import annotations |
| 11 | |
| 12 | import logging |
| 13 | from pathlib import Path |
| 14 | from typing import TYPE_CHECKING, Any |
| 15 | |
| 16 | if TYPE_CHECKING: |
| 17 | from navegador.graph.store import GraphStore |
| @@ -71,10 +69,11 @@ | |
| 69 | client = store._client # type: ignore[attr-defined] |
| 70 | for attr in ("_db", "connection", "_connection", "db"): |
| 71 | conn = getattr(client, attr, None) |
| 72 | if conn is not None: |
| 73 | import sqlite3 |
| 74 | |
| 75 | if isinstance(conn, sqlite3.Connection): |
| 76 | return conn |
| 77 | except Exception: |
| 78 | pass |
| 79 | return None |
| @@ -94,13 +93,11 @@ | |
| 93 | if self._attached: |
| 94 | return |
| 95 | |
| 96 | native_conn = self._extract_sqlite_conn(store) |
| 97 | if native_conn is not None: |
| 98 | native_conn.execute("ATTACH DATABASE ? AS fossil", (str(self._fossil_path),)) |
| 99 | self._conn = native_conn |
| 100 | self._attached = True |
| 101 | logger.info("Fossil DB attached to FalkorDB SQLite: %s", self._fossil_path) |
| 102 | else: |
| 103 | logger.warning( |
| @@ -139,20 +136,22 @@ | |
| 136 | result = [] |
| 137 | for row in rows: |
| 138 | if hasattr(row, "keys"): |
| 139 | result.append(dict(row)) |
| 140 | else: |
| 141 | result.append( |
| 142 | { |
| 143 | "type": row[0], |
| 144 | "mtime": row[1], |
| 145 | "objid": row[2], |
| 146 | "uid": row[3], |
| 147 | "user": row[4], |
| 148 | "euser": row[5], |
| 149 | "comment": row[6], |
| 150 | "ecomment": row[7], |
| 151 | } |
| 152 | ) |
| 153 | return result |
| 154 | |
| 155 | def query_tickets(self) -> list[dict]: |
| 156 | """ |
| 157 | Query Fossil tickets. |
| @@ -232,9 +231,7 @@ | |
| 231 | f"SET {prop_str}", |
| 232 | props, |
| 233 | ) |
| 234 | ticket_count += 1 |
| 235 | |
| 236 | logger.info("Fossil sync complete: %d commits, %d tickets", commit_count, ticket_count) |
| 237 | return {"commits": commit_count, "tickets": ticket_count} |
| 238 |
+1
-4
| --- navegador/cluster/locking.py | ||
| +++ navegador/cluster/locking.py | ||
| @@ -8,11 +8,10 @@ | ||
| 8 | 8 | from __future__ import annotations |
| 9 | 9 | |
| 10 | 10 | import logging |
| 11 | 11 | import time |
| 12 | 12 | import uuid |
| 13 | -from contextlib import contextmanager | |
| 14 | 13 | from typing import Any |
| 15 | 14 | |
| 16 | 15 | logger = logging.getLogger(__name__) |
| 17 | 16 | |
| 18 | 17 | _LOCK_PREFIX = "navegador:lock:" |
| @@ -127,12 +126,10 @@ | ||
| 127 | 126 | |
| 128 | 127 | def __enter__(self) -> "DistributedLock": |
| 129 | 128 | deadline = time.monotonic() + self._timeout |
| 130 | 129 | acquired = self.acquire(blocking=True, deadline=deadline) |
| 131 | 130 | if not acquired: |
| 132 | - raise LockTimeout( | |
| 133 | - f"Could not acquire lock '{self._name}' within {self._timeout}s" | |
| 134 | - ) | |
| 131 | + raise LockTimeout(f"Could not acquire lock '{self._name}' within {self._timeout}s") | |
| 135 | 132 | return self |
| 136 | 133 | |
| 137 | 134 | def __exit__(self, *_: object) -> None: |
| 138 | 135 | self.release() |
| 139 | 136 |
| --- navegador/cluster/locking.py | |
| +++ navegador/cluster/locking.py | |
| @@ -8,11 +8,10 @@ | |
| 8 | from __future__ import annotations |
| 9 | |
| 10 | import logging |
| 11 | import time |
| 12 | import uuid |
| 13 | from contextlib import contextmanager |
| 14 | from typing import Any |
| 15 | |
| 16 | logger = logging.getLogger(__name__) |
| 17 | |
| 18 | _LOCK_PREFIX = "navegador:lock:" |
| @@ -127,12 +126,10 @@ | |
| 127 | |
| 128 | def __enter__(self) -> "DistributedLock": |
| 129 | deadline = time.monotonic() + self._timeout |
| 130 | acquired = self.acquire(blocking=True, deadline=deadline) |
| 131 | if not acquired: |
| 132 | raise LockTimeout( |
| 133 | f"Could not acquire lock '{self._name}' within {self._timeout}s" |
| 134 | ) |
| 135 | return self |
| 136 | |
| 137 | def __exit__(self, *_: object) -> None: |
| 138 | self.release() |
| 139 |
| --- navegador/cluster/locking.py | |
| +++ navegador/cluster/locking.py | |
| @@ -8,11 +8,10 @@ | |
| 8 | from __future__ import annotations |
| 9 | |
| 10 | import logging |
| 11 | import time |
| 12 | import uuid |
| 13 | from typing import Any |
| 14 | |
| 15 | logger = logging.getLogger(__name__) |
| 16 | |
| 17 | _LOCK_PREFIX = "navegador:lock:" |
| @@ -127,12 +126,10 @@ | |
| 126 | |
| 127 | def __enter__(self) -> "DistributedLock": |
| 128 | deadline = time.monotonic() + self._timeout |
| 129 | acquired = self.acquire(blocking=True, deadline=deadline) |
| 130 | if not acquired: |
| 131 | raise LockTimeout(f"Could not acquire lock '{self._name}' within {self._timeout}s") |
| 132 | return self |
| 133 | |
| 134 | def __exit__(self, *_: object) -> None: |
| 135 | self.release() |
| 136 |
+1
-1
| --- navegador/cluster/messaging.py | ||
| +++ navegador/cluster/messaging.py | ||
| @@ -11,11 +11,11 @@ | ||
| 11 | 11 | |
| 12 | 12 | import json |
| 13 | 13 | import logging |
| 14 | 14 | import time |
| 15 | 15 | import uuid |
| 16 | -from dataclasses import asdict, dataclass, field | |
| 16 | +from dataclasses import asdict, dataclass | |
| 17 | 17 | from typing import Any |
| 18 | 18 | |
| 19 | 19 | logger = logging.getLogger(__name__) |
| 20 | 20 | |
| 21 | 21 | _QUEUE_PREFIX = "navegador:msg:queue:" |
| 22 | 22 |
| --- navegador/cluster/messaging.py | |
| +++ navegador/cluster/messaging.py | |
| @@ -11,11 +11,11 @@ | |
| 11 | |
| 12 | import json |
| 13 | import logging |
| 14 | import time |
| 15 | import uuid |
| 16 | from dataclasses import asdict, dataclass, field |
| 17 | from typing import Any |
| 18 | |
| 19 | logger = logging.getLogger(__name__) |
| 20 | |
| 21 | _QUEUE_PREFIX = "navegador:msg:queue:" |
| 22 |
| --- navegador/cluster/messaging.py | |
| +++ navegador/cluster/messaging.py | |
| @@ -11,11 +11,11 @@ | |
| 11 | |
| 12 | import json |
| 13 | import logging |
| 14 | import time |
| 15 | import uuid |
| 16 | from dataclasses import asdict, dataclass |
| 17 | from typing import Any |
| 18 | |
| 19 | logger = logging.getLogger(__name__) |
| 20 | |
| 21 | _QUEUE_PREFIX = "navegador:msg:queue:" |
| 22 |
| --- navegador/cluster/partitioning.py | ||
| +++ navegador/cluster/partitioning.py | ||
| @@ -19,11 +19,11 @@ | ||
| 19 | 19 | |
| 20 | 20 | from __future__ import annotations |
| 21 | 21 | |
| 22 | 22 | import logging |
| 23 | 23 | import math |
| 24 | -from dataclasses import dataclass, field | |
| 24 | +from dataclasses import dataclass | |
| 25 | 25 | from typing import Any |
| 26 | 26 | |
| 27 | 27 | logger = logging.getLogger(__name__) |
| 28 | 28 | |
| 29 | 29 | |
| @@ -63,12 +63,11 @@ | ||
| 63 | 63 | # ── Internal ────────────────────────────────────────────────────────────── |
| 64 | 64 | |
| 65 | 65 | def _get_all_file_paths(self) -> list[str]: |
| 66 | 66 | """Retrieve distinct file paths recorded in the graph.""" |
| 67 | 67 | result = self._store.query( |
| 68 | - "MATCH (n) WHERE n.file_path IS NOT NULL " | |
| 69 | - "RETURN DISTINCT n.file_path AS fp ORDER BY fp" | |
| 68 | + "MATCH (n) WHERE n.file_path IS NOT NULL RETURN DISTINCT n.file_path AS fp ORDER BY fp" | |
| 70 | 69 | ) |
| 71 | 70 | if not result.result_set: |
| 72 | 71 | return [] |
| 73 | 72 | paths: list[str] = [] |
| 74 | 73 | for row in result.result_set: |
| @@ -85,11 +84,11 @@ | ||
| 85 | 84 | if not items: |
| 86 | 85 | return [[] for _ in range(n)] |
| 87 | 86 | chunk_size = math.ceil(len(items) / n) |
| 88 | 87 | buckets = [] |
| 89 | 88 | for i in range(0, len(items), chunk_size): |
| 90 | - buckets.append(items[i: i + chunk_size]) | |
| 89 | + buckets.append(items[i : i + chunk_size]) | |
| 91 | 90 | # Pad with empty lists if fewer chunks than agents |
| 92 | 91 | while len(buckets) < n: |
| 93 | 92 | buckets.append([]) |
| 94 | 93 | return buckets[:n] |
| 95 | 94 | |
| 96 | 95 |
| --- navegador/cluster/partitioning.py | |
| +++ navegador/cluster/partitioning.py | |
| @@ -19,11 +19,11 @@ | |
| 19 | |
| 20 | from __future__ import annotations |
| 21 | |
| 22 | import logging |
| 23 | import math |
| 24 | from dataclasses import dataclass, field |
| 25 | from typing import Any |
| 26 | |
| 27 | logger = logging.getLogger(__name__) |
| 28 | |
| 29 | |
| @@ -63,12 +63,11 @@ | |
| 63 | # ── Internal ────────────────────────────────────────────────────────────── |
| 64 | |
| 65 | def _get_all_file_paths(self) -> list[str]: |
| 66 | """Retrieve distinct file paths recorded in the graph.""" |
| 67 | result = self._store.query( |
| 68 | "MATCH (n) WHERE n.file_path IS NOT NULL " |
| 69 | "RETURN DISTINCT n.file_path AS fp ORDER BY fp" |
| 70 | ) |
| 71 | if not result.result_set: |
| 72 | return [] |
| 73 | paths: list[str] = [] |
| 74 | for row in result.result_set: |
| @@ -85,11 +84,11 @@ | |
| 85 | if not items: |
| 86 | return [[] for _ in range(n)] |
| 87 | chunk_size = math.ceil(len(items) / n) |
| 88 | buckets = [] |
| 89 | for i in range(0, len(items), chunk_size): |
| 90 | buckets.append(items[i: i + chunk_size]) |
| 91 | # Pad with empty lists if fewer chunks than agents |
| 92 | while len(buckets) < n: |
| 93 | buckets.append([]) |
| 94 | return buckets[:n] |
| 95 | |
| 96 |
| --- navegador/cluster/partitioning.py | |
| +++ navegador/cluster/partitioning.py | |
| @@ -19,11 +19,11 @@ | |
| 19 | |
| 20 | from __future__ import annotations |
| 21 | |
| 22 | import logging |
| 23 | import math |
| 24 | from dataclasses import dataclass |
| 25 | from typing import Any |
| 26 | |
| 27 | logger = logging.getLogger(__name__) |
| 28 | |
| 29 | |
| @@ -63,12 +63,11 @@ | |
| 63 | # ── Internal ────────────────────────────────────────────────────────────── |
| 64 | |
| 65 | def _get_all_file_paths(self) -> list[str]: |
| 66 | """Retrieve distinct file paths recorded in the graph.""" |
| 67 | result = self._store.query( |
| 68 | "MATCH (n) WHERE n.file_path IS NOT NULL RETURN DISTINCT n.file_path AS fp ORDER BY fp" |
| 69 | ) |
| 70 | if not result.result_set: |
| 71 | return [] |
| 72 | paths: list[str] = [] |
| 73 | for row in result.result_set: |
| @@ -85,11 +84,11 @@ | |
| 84 | if not items: |
| 85 | return [[] for _ in range(n)] |
| 86 | chunk_size = math.ceil(len(items) / n) |
| 87 | buckets = [] |
| 88 | for i in range(0, len(items), chunk_size): |
| 89 | buckets.append(items[i : i + chunk_size]) |
| 90 | # Pad with empty lists if fewer chunks than agents |
| 91 | while len(buckets) < n: |
| 92 | buckets.append([]) |
| 93 | return buckets[:n] |
| 94 | |
| 95 |
+6
-4
| --- navegador/cluster/pubsub.py | ||
| +++ navegador/cluster/pubsub.py | ||
| @@ -92,14 +92,16 @@ | ||
| 92 | 92 | ------- |
| 93 | 93 | int |
| 94 | 94 | Number of clients that received the message. |
| 95 | 95 | """ |
| 96 | 96 | channel = _channel_name(event_type) |
| 97 | - payload = json.dumps({ | |
| 98 | - "event_type": event_type.value if isinstance(event_type, EventType) else event_type, | |
| 99 | - "data": data, | |
| 100 | - }) | |
| 97 | + payload = json.dumps( | |
| 98 | + { | |
| 99 | + "event_type": event_type.value if isinstance(event_type, EventType) else event_type, | |
| 100 | + "data": data, | |
| 101 | + } | |
| 102 | + ) | |
| 101 | 103 | result = self._redis.publish(channel, payload) |
| 102 | 104 | logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result) |
| 103 | 105 | return result |
| 104 | 106 | |
| 105 | 107 | def subscribe( |
| 106 | 108 |
| --- navegador/cluster/pubsub.py | |
| +++ navegador/cluster/pubsub.py | |
| @@ -92,14 +92,16 @@ | |
| 92 | ------- |
| 93 | int |
| 94 | Number of clients that received the message. |
| 95 | """ |
| 96 | channel = _channel_name(event_type) |
| 97 | payload = json.dumps({ |
| 98 | "event_type": event_type.value if isinstance(event_type, EventType) else event_type, |
| 99 | "data": data, |
| 100 | }) |
| 101 | result = self._redis.publish(channel, payload) |
| 102 | logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result) |
| 103 | return result |
| 104 | |
| 105 | def subscribe( |
| 106 |
| --- navegador/cluster/pubsub.py | |
| +++ navegador/cluster/pubsub.py | |
| @@ -92,14 +92,16 @@ | |
| 92 | ------- |
| 93 | int |
| 94 | Number of clients that received the message. |
| 95 | """ |
| 96 | channel = _channel_name(event_type) |
| 97 | payload = json.dumps( |
| 98 | { |
| 99 | "event_type": event_type.value if isinstance(event_type, EventType) else event_type, |
| 100 | "data": data, |
| 101 | } |
| 102 | ) |
| 103 | result = self._redis.publish(channel, payload) |
| 104 | logger.debug("Published %s to channel %s (%d receivers)", event_type, channel, result) |
| 105 | return result |
| 106 | |
| 107 | def subscribe( |
| 108 |
+1
-1
| --- navegador/cluster/sessions.py | ||
| +++ navegador/cluster/sessions.py | ||
| @@ -27,11 +27,11 @@ | ||
| 27 | 27 | import uuid |
| 28 | 28 | from typing import Any |
| 29 | 29 | |
| 30 | 30 | logger = logging.getLogger(__name__) |
| 31 | 31 | |
| 32 | -_SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON | |
| 32 | +_SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON | |
| 33 | 33 | _SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs |
| 34 | 34 | |
| 35 | 35 | |
| 36 | 36 | def _make_session_id() -> str: |
| 37 | 37 | return str(uuid.uuid4()) |
| 38 | 38 |
| --- navegador/cluster/sessions.py | |
| +++ navegador/cluster/sessions.py | |
| @@ -27,11 +27,11 @@ | |
| 27 | import uuid |
| 28 | from typing import Any |
| 29 | |
| 30 | logger = logging.getLogger(__name__) |
| 31 | |
| 32 | _SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON |
| 33 | _SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs |
| 34 | |
| 35 | |
| 36 | def _make_session_id() -> str: |
| 37 | return str(uuid.uuid4()) |
| 38 |
| --- navegador/cluster/sessions.py | |
| +++ navegador/cluster/sessions.py | |
| @@ -27,11 +27,11 @@ | |
| 27 | import uuid |
| 28 | from typing import Any |
| 29 | |
| 30 | logger = logging.getLogger(__name__) |
| 31 | |
| 32 | _SESSIONS_KEY = "navegador:sessions" # Redis hash: session_id -> JSON |
| 33 | _SESSION_INDEX_KEY = "navegador:sessions:ids" # Redis set: all session IDs |
| 34 | |
| 35 | |
| 36 | def _make_session_id() -> str: |
| 37 | return str(uuid.uuid4()) |
| 38 |
+27
-18
| --- navegador/cluster/taskqueue.py | ||
| +++ navegador/cluster/taskqueue.py | ||
| @@ -25,18 +25,18 @@ | ||
| 25 | 25 | |
| 26 | 26 | import json |
| 27 | 27 | import logging |
| 28 | 28 | import time |
| 29 | 29 | import uuid |
| 30 | -from dataclasses import asdict, dataclass, field | |
| 30 | +from dataclasses import dataclass, field | |
| 31 | 31 | from enum import Enum |
| 32 | 32 | from typing import Any |
| 33 | 33 | |
| 34 | 34 | logger = logging.getLogger(__name__) |
| 35 | 35 | |
| 36 | -_QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP) | |
| 37 | -_TASK_KEY_PREFIX = "navegador:task:" # Hash per task | |
| 36 | +_QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP) | |
| 37 | +_TASK_KEY_PREFIX = "navegador:task:" # Hash per task | |
| 38 | 38 | _INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs |
| 39 | 39 | |
| 40 | 40 | |
| 41 | 41 | class TaskStatus(str, Enum): |
| 42 | 42 | PENDING = "pending" |
| @@ -162,15 +162,18 @@ | ||
| 162 | 162 | return None |
| 163 | 163 | |
| 164 | 164 | task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw |
| 165 | 165 | now = time.time() |
| 166 | 166 | pipe = self._redis.pipeline() |
| 167 | - pipe.hset(_task_key(task_id), mapping={ | |
| 168 | - "status": TaskStatus.IN_PROGRESS.value, | |
| 169 | - "agent_id": agent_id, | |
| 170 | - "updated_at": now, | |
| 171 | - }) | |
| 167 | + pipe.hset( | |
| 168 | + _task_key(task_id), | |
| 169 | + mapping={ | |
| 170 | + "status": TaskStatus.IN_PROGRESS.value, | |
| 171 | + "agent_id": agent_id, | |
| 172 | + "updated_at": now, | |
| 173 | + }, | |
| 174 | + ) | |
| 172 | 175 | pipe.sadd(_INPROGRESS_KEY, task_id) |
| 173 | 176 | pipe.execute() |
| 174 | 177 | |
| 175 | 178 | raw = self._redis.hgetall(_task_key(task_id)) |
| 176 | 179 | task = Task.from_dict(raw) |
| @@ -179,27 +182,33 @@ | ||
| 179 | 182 | |
| 180 | 183 | def complete(self, task_id: str, result: Any = None) -> None: |
| 181 | 184 | """Mark a task as successfully completed.""" |
| 182 | 185 | result_encoded = json.dumps(result) if result is not None else "" |
| 183 | 186 | pipe = self._redis.pipeline() |
| 184 | - pipe.hset(_task_key(task_id), mapping={ | |
| 185 | - "status": TaskStatus.DONE.value, | |
| 186 | - "result": result_encoded, | |
| 187 | - "updated_at": time.time(), | |
| 188 | - }) | |
| 187 | + pipe.hset( | |
| 188 | + _task_key(task_id), | |
| 189 | + mapping={ | |
| 190 | + "status": TaskStatus.DONE.value, | |
| 191 | + "result": result_encoded, | |
| 192 | + "updated_at": time.time(), | |
| 193 | + }, | |
| 194 | + ) | |
| 189 | 195 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 190 | 196 | pipe.execute() |
| 191 | 197 | logger.debug("Task %s completed", task_id) |
| 192 | 198 | |
| 193 | 199 | def fail(self, task_id: str, error: str) -> None: |
| 194 | 200 | """Mark a task as failed with an error message.""" |
| 195 | 201 | pipe = self._redis.pipeline() |
| 196 | - pipe.hset(_task_key(task_id), mapping={ | |
| 197 | - "status": TaskStatus.FAILED.value, | |
| 198 | - "error": error, | |
| 199 | - "updated_at": time.time(), | |
| 200 | - }) | |
| 202 | + pipe.hset( | |
| 203 | + _task_key(task_id), | |
| 204 | + mapping={ | |
| 205 | + "status": TaskStatus.FAILED.value, | |
| 206 | + "error": error, | |
| 207 | + "updated_at": time.time(), | |
| 208 | + }, | |
| 209 | + ) | |
| 201 | 210 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 202 | 211 | pipe.execute() |
| 203 | 212 | logger.debug("Task %s failed: %s", task_id, error) |
| 204 | 213 | |
| 205 | 214 | def status(self, task_id: str) -> dict[str, Any]: |
| 206 | 215 |
| --- navegador/cluster/taskqueue.py | |
| +++ navegador/cluster/taskqueue.py | |
| @@ -25,18 +25,18 @@ | |
| 25 | |
| 26 | import json |
| 27 | import logging |
| 28 | import time |
| 29 | import uuid |
| 30 | from dataclasses import asdict, dataclass, field |
| 31 | from enum import Enum |
| 32 | from typing import Any |
| 33 | |
| 34 | logger = logging.getLogger(__name__) |
| 35 | |
| 36 | _QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP) |
| 37 | _TASK_KEY_PREFIX = "navegador:task:" # Hash per task |
| 38 | _INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs |
| 39 | |
| 40 | |
| 41 | class TaskStatus(str, Enum): |
| 42 | PENDING = "pending" |
| @@ -162,15 +162,18 @@ | |
| 162 | return None |
| 163 | |
| 164 | task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw |
| 165 | now = time.time() |
| 166 | pipe = self._redis.pipeline() |
| 167 | pipe.hset(_task_key(task_id), mapping={ |
| 168 | "status": TaskStatus.IN_PROGRESS.value, |
| 169 | "agent_id": agent_id, |
| 170 | "updated_at": now, |
| 171 | }) |
| 172 | pipe.sadd(_INPROGRESS_KEY, task_id) |
| 173 | pipe.execute() |
| 174 | |
| 175 | raw = self._redis.hgetall(_task_key(task_id)) |
| 176 | task = Task.from_dict(raw) |
| @@ -179,27 +182,33 @@ | |
| 179 | |
| 180 | def complete(self, task_id: str, result: Any = None) -> None: |
| 181 | """Mark a task as successfully completed.""" |
| 182 | result_encoded = json.dumps(result) if result is not None else "" |
| 183 | pipe = self._redis.pipeline() |
| 184 | pipe.hset(_task_key(task_id), mapping={ |
| 185 | "status": TaskStatus.DONE.value, |
| 186 | "result": result_encoded, |
| 187 | "updated_at": time.time(), |
| 188 | }) |
| 189 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 190 | pipe.execute() |
| 191 | logger.debug("Task %s completed", task_id) |
| 192 | |
| 193 | def fail(self, task_id: str, error: str) -> None: |
| 194 | """Mark a task as failed with an error message.""" |
| 195 | pipe = self._redis.pipeline() |
| 196 | pipe.hset(_task_key(task_id), mapping={ |
| 197 | "status": TaskStatus.FAILED.value, |
| 198 | "error": error, |
| 199 | "updated_at": time.time(), |
| 200 | }) |
| 201 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 202 | pipe.execute() |
| 203 | logger.debug("Task %s failed: %s", task_id, error) |
| 204 | |
| 205 | def status(self, task_id: str) -> dict[str, Any]: |
| 206 |
| --- navegador/cluster/taskqueue.py | |
| +++ navegador/cluster/taskqueue.py | |
| @@ -25,18 +25,18 @@ | |
| 25 | |
| 26 | import json |
| 27 | import logging |
| 28 | import time |
| 29 | import uuid |
| 30 | from dataclasses import dataclass, field |
| 31 | from enum import Enum |
| 32 | from typing import Any |
| 33 | |
| 34 | logger = logging.getLogger(__name__) |
| 35 | |
| 36 | _QUEUE_KEY = "navegador:taskqueue:pending" # Redis list (RPUSH/BLPOP) |
| 37 | _TASK_KEY_PREFIX = "navegador:task:" # Hash per task |
| 38 | _INPROGRESS_KEY = "navegador:taskqueue:inprogress" # Set of in-progress task IDs |
| 39 | |
| 40 | |
| 41 | class TaskStatus(str, Enum): |
| 42 | PENDING = "pending" |
| @@ -162,15 +162,18 @@ | |
| 162 | return None |
| 163 | |
| 164 | task_id = task_id_raw.decode() if isinstance(task_id_raw, bytes) else task_id_raw |
| 165 | now = time.time() |
| 166 | pipe = self._redis.pipeline() |
| 167 | pipe.hset( |
| 168 | _task_key(task_id), |
| 169 | mapping={ |
| 170 | "status": TaskStatus.IN_PROGRESS.value, |
| 171 | "agent_id": agent_id, |
| 172 | "updated_at": now, |
| 173 | }, |
| 174 | ) |
| 175 | pipe.sadd(_INPROGRESS_KEY, task_id) |
| 176 | pipe.execute() |
| 177 | |
| 178 | raw = self._redis.hgetall(_task_key(task_id)) |
| 179 | task = Task.from_dict(raw) |
| @@ -179,27 +182,33 @@ | |
| 182 | |
| 183 | def complete(self, task_id: str, result: Any = None) -> None: |
| 184 | """Mark a task as successfully completed.""" |
| 185 | result_encoded = json.dumps(result) if result is not None else "" |
| 186 | pipe = self._redis.pipeline() |
| 187 | pipe.hset( |
| 188 | _task_key(task_id), |
| 189 | mapping={ |
| 190 | "status": TaskStatus.DONE.value, |
| 191 | "result": result_encoded, |
| 192 | "updated_at": time.time(), |
| 193 | }, |
| 194 | ) |
| 195 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 196 | pipe.execute() |
| 197 | logger.debug("Task %s completed", task_id) |
| 198 | |
| 199 | def fail(self, task_id: str, error: str) -> None: |
| 200 | """Mark a task as failed with an error message.""" |
| 201 | pipe = self._redis.pipeline() |
| 202 | pipe.hset( |
| 203 | _task_key(task_id), |
| 204 | mapping={ |
| 205 | "status": TaskStatus.FAILED.value, |
| 206 | "error": error, |
| 207 | "updated_at": time.time(), |
| 208 | }, |
| 209 | ) |
| 210 | pipe.srem(_INPROGRESS_KEY, task_id) |
| 211 | pipe.execute() |
| 212 | logger.debug("Task %s failed: %s", task_id, error) |
| 213 | |
| 214 | def status(self, task_id: str) -> dict[str, Any]: |
| 215 |
+1
-4
| --- navegador/codeowners.py | ||
| +++ navegador/codeowners.py | ||
| @@ -12,11 +12,10 @@ | ||
| 12 | 12 | """ |
| 13 | 13 | |
| 14 | 14 | from __future__ import annotations |
| 15 | 15 | |
| 16 | 16 | import logging |
| 17 | -import re | |
| 18 | 17 | from pathlib import Path |
| 19 | 18 | from typing import Any |
| 20 | 19 | |
| 21 | 20 | from navegador.graph.schema import EdgeType, NodeLabel |
| 22 | 21 | from navegador.graph.store import GraphStore |
| @@ -113,13 +112,11 @@ | ||
| 113 | 112 | logger.info("CodeownersIngester: %s", stats) |
| 114 | 113 | return stats |
| 115 | 114 | |
| 116 | 115 | # ── Parsing ─────────────────────────────────────────────────────────────── |
| 117 | 116 | |
| 118 | - def _parse_codeowners( | |
| 119 | - self, path: Path | |
| 120 | - ) -> list[tuple[str, list[str]]]: | |
| 117 | + def _parse_codeowners(self, path: Path) -> list[tuple[str, list[str]]]: | |
| 121 | 118 | """ |
| 122 | 119 | Parse a CODEOWNERS file at *path*. |
| 123 | 120 | |
| 124 | 121 | Returns a list of (pattern, [owner, ...]) tuples. Comment lines and |
| 125 | 122 | blank lines are ignored. |
| 126 | 123 |
| --- navegador/codeowners.py | |
| +++ navegador/codeowners.py | |
| @@ -12,11 +12,10 @@ | |
| 12 | """ |
| 13 | |
| 14 | from __future__ import annotations |
| 15 | |
| 16 | import logging |
| 17 | import re |
| 18 | from pathlib import Path |
| 19 | from typing import Any |
| 20 | |
| 21 | from navegador.graph.schema import EdgeType, NodeLabel |
| 22 | from navegador.graph.store import GraphStore |
| @@ -113,13 +112,11 @@ | |
| 113 | logger.info("CodeownersIngester: %s", stats) |
| 114 | return stats |
| 115 | |
| 116 | # ── Parsing ─────────────────────────────────────────────────────────────── |
| 117 | |
| 118 | def _parse_codeowners( |
| 119 | self, path: Path |
| 120 | ) -> list[tuple[str, list[str]]]: |
| 121 | """ |
| 122 | Parse a CODEOWNERS file at *path*. |
| 123 | |
| 124 | Returns a list of (pattern, [owner, ...]) tuples. Comment lines and |
| 125 | blank lines are ignored. |
| 126 |
| --- navegador/codeowners.py | |
| +++ navegador/codeowners.py | |
| @@ -12,11 +12,10 @@ | |
| 12 | """ |
| 13 | |
| 14 | from __future__ import annotations |
| 15 | |
| 16 | import logging |
| 17 | from pathlib import Path |
| 18 | from typing import Any |
| 19 | |
| 20 | from navegador.graph.schema import EdgeType, NodeLabel |
| 21 | from navegador.graph.store import GraphStore |
| @@ -113,13 +112,11 @@ | |
| 112 | logger.info("CodeownersIngester: %s", stats) |
| 113 | return stats |
| 114 | |
| 115 | # ── Parsing ─────────────────────────────────────────────────────────────── |
| 116 | |
| 117 | def _parse_codeowners(self, path: Path) -> list[tuple[str, list[str]]]: |
| 118 | """ |
| 119 | Parse a CODEOWNERS file at *path*. |
| 120 | |
| 121 | Returns a list of (pattern, [owner, ...]) tuples. Comment lines and |
| 122 | blank lines are ignored. |
| 123 |
+12
-4
| --- navegador/completions.py | ||
| +++ navegador/completions.py | ||
| @@ -45,28 +45,34 @@ | ||
| 45 | 45 | """Return the eval/source line to add to the shell rc file. |
| 46 | 46 | |
| 47 | 47 | Raises ValueError for unsupported shells. |
| 48 | 48 | """ |
| 49 | 49 | if shell not in SUPPORTED_SHELLS: |
| 50 | - raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") | |
| 50 | + raise ValueError( | |
| 51 | + f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" | |
| 52 | + ) | |
| 51 | 53 | return _EVAL_LINES[shell] |
| 52 | 54 | |
| 53 | 55 | |
| 54 | 56 | def get_rc_path(shell: str) -> str: |
| 55 | 57 | """Return the default rc file path (unexpanded) for *shell*. |
| 56 | 58 | |
| 57 | 59 | Raises ValueError for unsupported shells. |
| 58 | 60 | """ |
| 59 | 61 | if shell not in SUPPORTED_SHELLS: |
| 60 | - raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") | |
| 62 | + raise ValueError( | |
| 63 | + f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" | |
| 64 | + ) | |
| 61 | 65 | return _RC_PATHS[shell] |
| 62 | 66 | |
| 63 | 67 | |
| 64 | 68 | def get_install_instruction(shell: str) -> str: |
| 65 | 69 | """Return a human-readable instruction for adding completions to *shell*.""" |
| 66 | 70 | if shell not in SUPPORTED_SHELLS: |
| 67 | - raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") | |
| 71 | + raise ValueError( | |
| 72 | + f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" | |
| 73 | + ) | |
| 68 | 74 | rc = _RC_PATHS[shell] |
| 69 | 75 | line = _EVAL_LINES[shell] |
| 70 | 76 | return f"Add the following line to {rc}:\n\n {line}" |
| 71 | 77 | |
| 72 | 78 | |
| @@ -82,11 +88,13 @@ | ||
| 82 | 88 | |
| 83 | 89 | Raises: |
| 84 | 90 | ValueError: For unsupported shells. |
| 85 | 91 | """ |
| 86 | 92 | if shell not in SUPPORTED_SHELLS: |
| 87 | - raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") | |
| 93 | + raise ValueError( | |
| 94 | + f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" | |
| 95 | + ) | |
| 88 | 96 | |
| 89 | 97 | target = Path(rc_path or _RC_PATHS[shell]).expanduser() |
| 90 | 98 | line = _EVAL_LINES[shell] |
| 91 | 99 | |
| 92 | 100 | # Idempotent: don't append if the line is already present |
| 93 | 101 |
| --- navegador/completions.py | |
| +++ navegador/completions.py | |
| @@ -45,28 +45,34 @@ | |
| 45 | """Return the eval/source line to add to the shell rc file. |
| 46 | |
| 47 | Raises ValueError for unsupported shells. |
| 48 | """ |
| 49 | if shell not in SUPPORTED_SHELLS: |
| 50 | raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") |
| 51 | return _EVAL_LINES[shell] |
| 52 | |
| 53 | |
| 54 | def get_rc_path(shell: str) -> str: |
| 55 | """Return the default rc file path (unexpanded) for *shell*. |
| 56 | |
| 57 | Raises ValueError for unsupported shells. |
| 58 | """ |
| 59 | if shell not in SUPPORTED_SHELLS: |
| 60 | raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") |
| 61 | return _RC_PATHS[shell] |
| 62 | |
| 63 | |
| 64 | def get_install_instruction(shell: str) -> str: |
| 65 | """Return a human-readable instruction for adding completions to *shell*.""" |
| 66 | if shell not in SUPPORTED_SHELLS: |
| 67 | raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") |
| 68 | rc = _RC_PATHS[shell] |
| 69 | line = _EVAL_LINES[shell] |
| 70 | return f"Add the following line to {rc}:\n\n {line}" |
| 71 | |
| 72 | |
| @@ -82,11 +88,13 @@ | |
| 82 | |
| 83 | Raises: |
| 84 | ValueError: For unsupported shells. |
| 85 | """ |
| 86 | if shell not in SUPPORTED_SHELLS: |
| 87 | raise ValueError(f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}") |
| 88 | |
| 89 | target = Path(rc_path or _RC_PATHS[shell]).expanduser() |
| 90 | line = _EVAL_LINES[shell] |
| 91 | |
| 92 | # Idempotent: don't append if the line is already present |
| 93 |
| --- navegador/completions.py | |
| +++ navegador/completions.py | |
| @@ -45,28 +45,34 @@ | |
| 45 | """Return the eval/source line to add to the shell rc file. |
| 46 | |
| 47 | Raises ValueError for unsupported shells. |
| 48 | """ |
| 49 | if shell not in SUPPORTED_SHELLS: |
| 50 | raise ValueError( |
| 51 | f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" |
| 52 | ) |
| 53 | return _EVAL_LINES[shell] |
| 54 | |
| 55 | |
| 56 | def get_rc_path(shell: str) -> str: |
| 57 | """Return the default rc file path (unexpanded) for *shell*. |
| 58 | |
| 59 | Raises ValueError for unsupported shells. |
| 60 | """ |
| 61 | if shell not in SUPPORTED_SHELLS: |
| 62 | raise ValueError( |
| 63 | f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" |
| 64 | ) |
| 65 | return _RC_PATHS[shell] |
| 66 | |
| 67 | |
| 68 | def get_install_instruction(shell: str) -> str: |
| 69 | """Return a human-readable instruction for adding completions to *shell*.""" |
| 70 | if shell not in SUPPORTED_SHELLS: |
| 71 | raise ValueError( |
| 72 | f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" |
| 73 | ) |
| 74 | rc = _RC_PATHS[shell] |
| 75 | line = _EVAL_LINES[shell] |
| 76 | return f"Add the following line to {rc}:\n\n {line}" |
| 77 | |
| 78 | |
| @@ -82,11 +88,13 @@ | |
| 88 | |
| 89 | Raises: |
| 90 | ValueError: For unsupported shells. |
| 91 | """ |
| 92 | if shell not in SUPPORTED_SHELLS: |
| 93 | raise ValueError( |
| 94 | f"Unsupported shell: {shell!r}. Choose from: {', '.join(SUPPORTED_SHELLS)}" |
| 95 | ) |
| 96 | |
| 97 | target = Path(rc_path or _RC_PATHS[shell]).expanduser() |
| 98 | line = _EVAL_LINES[shell] |
| 99 | |
| 100 | # Idempotent: don't append if the line is already present |
| 101 |
+1
-3
| --- navegador/context/loader.py | ||
| +++ navegador/context/loader.py | ||
| @@ -339,13 +339,11 @@ | ||
| 339 | 339 | |
| 340 | 340 | # ── Knowledge: find owners ──────────────────────────────────────────────── |
| 341 | 341 | |
| 342 | 342 | def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]: |
| 343 | 343 | """Find people assigned to a named node.""" |
| 344 | - result = self.store.query( | |
| 345 | - queries.FIND_OWNERS, {"name": name, "file_path": file_path} | |
| 346 | - ) | |
| 344 | + result = self.store.query(queries.FIND_OWNERS, {"name": name, "file_path": file_path}) | |
| 347 | 345 | return [ |
| 348 | 346 | ContextNode( |
| 349 | 347 | type="Person", |
| 350 | 348 | name=row[2], |
| 351 | 349 | description=f"role={row[4]}, team={row[5]}", |
| 352 | 350 |
| --- navegador/context/loader.py | |
| +++ navegador/context/loader.py | |
| @@ -339,13 +339,11 @@ | |
| 339 | |
| 340 | # ── Knowledge: find owners ──────────────────────────────────────────────── |
| 341 | |
| 342 | def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]: |
| 343 | """Find people assigned to a named node.""" |
| 344 | result = self.store.query( |
| 345 | queries.FIND_OWNERS, {"name": name, "file_path": file_path} |
| 346 | ) |
| 347 | return [ |
| 348 | ContextNode( |
| 349 | type="Person", |
| 350 | name=row[2], |
| 351 | description=f"role={row[4]}, team={row[5]}", |
| 352 |
| --- navegador/context/loader.py | |
| +++ navegador/context/loader.py | |
| @@ -339,13 +339,11 @@ | |
| 339 | |
| 340 | # ── Knowledge: find owners ──────────────────────────────────────────────── |
| 341 | |
| 342 | def find_owners(self, name: str, file_path: str = "") -> list[ContextNode]: |
| 343 | """Find people assigned to a named node.""" |
| 344 | result = self.store.query(queries.FIND_OWNERS, {"name": name, "file_path": file_path}) |
| 345 | return [ |
| 346 | ContextNode( |
| 347 | type="Person", |
| 348 | name=row[2], |
| 349 | description=f"role={row[4]}, team={row[5]}", |
| 350 |
+1
-4
| --- navegador/diff.py | ||
| +++ navegador/diff.py | ||
| @@ -27,11 +27,10 @@ | ||
| 27 | 27 | from typing import Any |
| 28 | 28 | |
| 29 | 29 | from navegador.graph.store import GraphStore |
| 30 | 30 | from navegador.vcs import GitAdapter |
| 31 | 31 | |
| 32 | - | |
| 33 | 32 | # ── Cypher helpers ──────────────────────────────────────────────────────────── |
| 34 | 33 | |
| 35 | 34 | # All symbols (Function / Class / Method) in a given file with their line ranges |
| 36 | 35 | _SYMBOLS_IN_FILE = """ |
| 37 | 36 | MATCH (n) |
| @@ -101,13 +100,11 @@ | ||
| 101 | 100 | else: |
| 102 | 101 | current_new_start = int(new_info) |
| 103 | 102 | current_new_count = 1 |
| 104 | 103 | if current_file and current_new_count > 0: |
| 105 | 104 | end = current_new_start + max(current_new_count - 1, 0) |
| 106 | - result.setdefault(current_file, []).append( | |
| 107 | - (current_new_start, end) | |
| 108 | - ) | |
| 105 | + result.setdefault(current_file, []).append((current_new_start, end)) | |
| 109 | 106 | except (ValueError, IndexError): |
| 110 | 107 | pass |
| 111 | 108 | |
| 112 | 109 | return result |
| 113 | 110 | |
| 114 | 111 |
| --- navegador/diff.py | |
| +++ navegador/diff.py | |
| @@ -27,11 +27,10 @@ | |
| 27 | from typing import Any |
| 28 | |
| 29 | from navegador.graph.store import GraphStore |
| 30 | from navegador.vcs import GitAdapter |
| 31 | |
| 32 | |
| 33 | # ── Cypher helpers ──────────────────────────────────────────────────────────── |
| 34 | |
| 35 | # All symbols (Function / Class / Method) in a given file with their line ranges |
| 36 | _SYMBOLS_IN_FILE = """ |
| 37 | MATCH (n) |
| @@ -101,13 +100,11 @@ | |
| 101 | else: |
| 102 | current_new_start = int(new_info) |
| 103 | current_new_count = 1 |
| 104 | if current_file and current_new_count > 0: |
| 105 | end = current_new_start + max(current_new_count - 1, 0) |
| 106 | result.setdefault(current_file, []).append( |
| 107 | (current_new_start, end) |
| 108 | ) |
| 109 | except (ValueError, IndexError): |
| 110 | pass |
| 111 | |
| 112 | return result |
| 113 | |
| 114 |
| --- navegador/diff.py | |
| +++ navegador/diff.py | |
| @@ -27,11 +27,10 @@ | |
| 27 | from typing import Any |
| 28 | |
| 29 | from navegador.graph.store import GraphStore |
| 30 | from navegador.vcs import GitAdapter |
| 31 | |
| 32 | # ── Cypher helpers ──────────────────────────────────────────────────────────── |
| 33 | |
| 34 | # All symbols (Function / Class / Method) in a given file with their line ranges |
| 35 | _SYMBOLS_IN_FILE = """ |
| 36 | MATCH (n) |
| @@ -101,13 +100,11 @@ | |
| 100 | else: |
| 101 | current_new_start = int(new_info) |
| 102 | current_new_count = 1 |
| 103 | if current_file and current_new_count > 0: |
| 104 | end = current_new_start + max(current_new_count - 1, 0) |
| 105 | result.setdefault(current_file, []).append((current_new_start, end)) |
| 106 | except (ValueError, IndexError): |
| 107 | pass |
| 108 | |
| 109 | return result |
| 110 | |
| 111 |
+2
-4
| --- navegador/editor.py | ||
| +++ navegador/editor.py | ||
| @@ -49,12 +49,11 @@ | ||
| 49 | 49 | |
| 50 | 50 | Raises ValueError for unsupported editors. |
| 51 | 51 | """ |
| 52 | 52 | if editor not in SUPPORTED_EDITORS: |
| 53 | 53 | raise ValueError( |
| 54 | - f"Unsupported editor {editor!r}. " | |
| 55 | - f"Choose from: {', '.join(SUPPORTED_EDITORS)}" | |
| 54 | + f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}" | |
| 56 | 55 | ) |
| 57 | 56 | return _mcp_block(self.db) |
| 58 | 57 | |
| 59 | 58 | def config_json(self, editor: str) -> str: |
| 60 | 59 | """Return the JSON string for *editor*'s config file.""" |
| @@ -62,12 +61,11 @@ | ||
| 62 | 61 | |
| 63 | 62 | def config_path(self, editor: str) -> str: |
| 64 | 63 | """Return the relative config file path for *editor*.""" |
| 65 | 64 | if editor not in SUPPORTED_EDITORS: |
| 66 | 65 | raise ValueError( |
| 67 | - f"Unsupported editor {editor!r}. " | |
| 68 | - f"Choose from: {', '.join(SUPPORTED_EDITORS)}" | |
| 66 | + f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}" | |
| 69 | 67 | ) |
| 70 | 68 | return _CONFIG_PATHS[editor] |
| 71 | 69 | |
| 72 | 70 | def write_config(self, editor: str, base_dir: str = ".") -> Path: |
| 73 | 71 | """Write the config file to the expected path under *base_dir*. |
| 74 | 72 |
| --- navegador/editor.py | |
| +++ navegador/editor.py | |
| @@ -49,12 +49,11 @@ | |
| 49 | |
| 50 | Raises ValueError for unsupported editors. |
| 51 | """ |
| 52 | if editor not in SUPPORTED_EDITORS: |
| 53 | raise ValueError( |
| 54 | f"Unsupported editor {editor!r}. " |
| 55 | f"Choose from: {', '.join(SUPPORTED_EDITORS)}" |
| 56 | ) |
| 57 | return _mcp_block(self.db) |
| 58 | |
| 59 | def config_json(self, editor: str) -> str: |
| 60 | """Return the JSON string for *editor*'s config file.""" |
| @@ -62,12 +61,11 @@ | |
| 62 | |
| 63 | def config_path(self, editor: str) -> str: |
| 64 | """Return the relative config file path for *editor*.""" |
| 65 | if editor not in SUPPORTED_EDITORS: |
| 66 | raise ValueError( |
| 67 | f"Unsupported editor {editor!r}. " |
| 68 | f"Choose from: {', '.join(SUPPORTED_EDITORS)}" |
| 69 | ) |
| 70 | return _CONFIG_PATHS[editor] |
| 71 | |
| 72 | def write_config(self, editor: str, base_dir: str = ".") -> Path: |
| 73 | """Write the config file to the expected path under *base_dir*. |
| 74 |
| --- navegador/editor.py | |
| +++ navegador/editor.py | |
| @@ -49,12 +49,11 @@ | |
| 49 | |
| 50 | Raises ValueError for unsupported editors. |
| 51 | """ |
| 52 | if editor not in SUPPORTED_EDITORS: |
| 53 | raise ValueError( |
| 54 | f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}" |
| 55 | ) |
| 56 | return _mcp_block(self.db) |
| 57 | |
| 58 | def config_json(self, editor: str) -> str: |
| 59 | """Return the JSON string for *editor*'s config file.""" |
| @@ -62,12 +61,11 @@ | |
| 61 | |
| 62 | def config_path(self, editor: str) -> str: |
| 63 | """Return the relative config file path for *editor*.""" |
| 64 | if editor not in SUPPORTED_EDITORS: |
| 65 | raise ValueError( |
| 66 | f"Unsupported editor {editor!r}. Choose from: {', '.join(SUPPORTED_EDITORS)}" |
| 67 | ) |
| 68 | return _CONFIG_PATHS[editor] |
| 69 | |
| 70 | def write_config(self, editor: str, base_dir: str = ".") -> Path: |
| 71 | """Write the config file to the expected path under *base_dir*. |
| 72 |
+55
-33
| --- navegador/enrichment/express.py | ||
| +++ navegador/enrichment/express.py | ||
| @@ -9,12 +9,22 @@ | ||
| 9 | 9 | """ |
| 10 | 10 | |
| 11 | 11 | from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher |
| 12 | 12 | |
| 13 | 13 | # HTTP method prefixes that indicate a route definition |
| 14 | -_ROUTE_PREFIXES = ("app.get", "app.post", "app.put", "app.delete", "app.patch", "router.get", | |
| 15 | - "router.post", "router.put", "router.delete", "router.patch") | |
| 14 | +_ROUTE_PREFIXES = ( | |
| 15 | + "app.get", | |
| 16 | + "app.post", | |
| 17 | + "app.put", | |
| 18 | + "app.delete", | |
| 19 | + "app.patch", | |
| 20 | + "router.get", | |
| 21 | + "router.post", | |
| 22 | + "router.put", | |
| 23 | + "router.delete", | |
| 24 | + "router.patch", | |
| 25 | +) | |
| 16 | 26 | |
| 17 | 27 | |
| 18 | 28 | class ExpressEnricher(FrameworkEnricher): |
| 19 | 29 | """Enricher for Express.js codebases.""" |
| 20 | 30 | |
| @@ -28,60 +38,72 @@ | ||
| 28 | 38 | |
| 29 | 39 | def enrich(self) -> EnrichmentResult: |
| 30 | 40 | result = EnrichmentResult() |
| 31 | 41 | |
| 32 | 42 | # ── Routes: app.<method> or router.<method> patterns ───────────────── |
| 33 | - route_rows = self.store.query( | |
| 34 | - "MATCH (n) WHERE (n.name STARTS WITH 'app.get' " | |
| 35 | - "OR n.name STARTS WITH 'app.post' " | |
| 36 | - "OR n.name STARTS WITH 'app.put' " | |
| 37 | - "OR n.name STARTS WITH 'app.delete' " | |
| 38 | - "OR n.name STARTS WITH 'app.patch' " | |
| 39 | - "OR n.name STARTS WITH 'router.get' " | |
| 40 | - "OR n.name STARTS WITH 'router.post' " | |
| 41 | - "OR n.name STARTS WITH 'router.put' " | |
| 42 | - "OR n.name STARTS WITH 'router.delete' " | |
| 43 | - "OR n.name STARTS WITH 'router.patch') " | |
| 44 | - "AND n.file_path IS NOT NULL " | |
| 45 | - "RETURN n.name, n.file_path", | |
| 46 | - ).result_set or [] | |
| 43 | + route_rows = ( | |
| 44 | + self.store.query( | |
| 45 | + "MATCH (n) WHERE (n.name STARTS WITH 'app.get' " | |
| 46 | + "OR n.name STARTS WITH 'app.post' " | |
| 47 | + "OR n.name STARTS WITH 'app.put' " | |
| 48 | + "OR n.name STARTS WITH 'app.delete' " | |
| 49 | + "OR n.name STARTS WITH 'app.patch' " | |
| 50 | + "OR n.name STARTS WITH 'router.get' " | |
| 51 | + "OR n.name STARTS WITH 'router.post' " | |
| 52 | + "OR n.name STARTS WITH 'router.put' " | |
| 53 | + "OR n.name STARTS WITH 'router.delete' " | |
| 54 | + "OR n.name STARTS WITH 'router.patch') " | |
| 55 | + "AND n.file_path IS NOT NULL " | |
| 56 | + "RETURN n.name, n.file_path", | |
| 57 | + ).result_set | |
| 58 | + or [] | |
| 59 | + ) | |
| 47 | 60 | for name, file_path in route_rows: |
| 48 | 61 | self._promote_node(name, file_path, "ExpressRoute") |
| 49 | 62 | result.promoted += 1 |
| 50 | 63 | result.patterns_found["routes"] = len(route_rows) |
| 51 | 64 | |
| 52 | 65 | # ── Middleware: app.use calls ───────────────────────────────────────── |
| 53 | - middleware_rows = self.store.query( | |
| 54 | - "MATCH (n) WHERE (n.name STARTS WITH 'app.use' " | |
| 55 | - "OR n.name STARTS WITH 'router.use') " | |
| 56 | - "AND n.file_path IS NOT NULL " | |
| 57 | - "RETURN n.name, n.file_path", | |
| 58 | - ).result_set or [] | |
| 66 | + middleware_rows = ( | |
| 67 | + self.store.query( | |
| 68 | + "MATCH (n) WHERE (n.name STARTS WITH 'app.use' " | |
| 69 | + "OR n.name STARTS WITH 'router.use') " | |
| 70 | + "AND n.file_path IS NOT NULL " | |
| 71 | + "RETURN n.name, n.file_path", | |
| 72 | + ).result_set | |
| 73 | + or [] | |
| 74 | + ) | |
| 59 | 75 | for name, file_path in middleware_rows: |
| 60 | 76 | self._promote_node(name, file_path, "ExpressMiddleware") |
| 61 | 77 | result.promoted += 1 |
| 62 | 78 | result.patterns_found["middleware"] = len(middleware_rows) |
| 63 | 79 | |
| 64 | 80 | # ── Controllers: nodes whose file_path contains /controllers/ ───────── |
| 65 | - controller_rows = self.store.query( | |
| 66 | - "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' " | |
| 67 | - "AND n.name IS NOT NULL " | |
| 68 | - "RETURN n.name, n.file_path", | |
| 69 | - ).result_set or [] | |
| 81 | + controller_rows = ( | |
| 82 | + self.store.query( | |
| 83 | + "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' " | |
| 84 | + "AND n.name IS NOT NULL " | |
| 85 | + "RETURN n.name, n.file_path", | |
| 86 | + ).result_set | |
| 87 | + or [] | |
| 88 | + ) | |
| 70 | 89 | for name, file_path in controller_rows: |
| 71 | 90 | self._promote_node(name, file_path, "ExpressController") |
| 72 | 91 | result.promoted += 1 |
| 73 | 92 | result.patterns_found["controllers"] = len(controller_rows) |
| 74 | 93 | |
| 75 | 94 | # ── Routers: Router() / express.Router() instantiations ────────────── |
| 76 | - router_rows = self.store.query( | |
| 77 | - "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' " | |
| 78 | - "OR n.name CONTAINS 'express.Router') " | |
| 79 | - "AND n.file_path IS NOT NULL " | |
| 80 | - "RETURN n.name, n.file_path", | |
| 81 | - ).result_set or [] | |
| 95 | + router_rows = ( | |
| 96 | + self.store.query( | |
| 97 | + "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' " | |
| 98 | + "OR n.name CONTAINS 'express.Router') " | |
| 99 | + "AND n.file_path IS NOT NULL " | |
| 100 | + "RETURN n.name, n.file_path", | |
| 101 | + ).result_set | |
| 102 | + or [] | |
| 103 | + ) | |
| 82 | 104 | for name, file_path in router_rows: |
| 83 | 105 | self._promote_node(name, file_path, "ExpressRouter") |
| 84 | 106 | result.promoted += 1 |
| 85 | 107 | result.patterns_found["routers"] = len(router_rows) |
| 86 | 108 | |
| 87 | 109 | return result |
| 88 | 110 |
| --- navegador/enrichment/express.py | |
| +++ navegador/enrichment/express.py | |
| @@ -9,12 +9,22 @@ | |
| 9 | """ |
| 10 | |
| 11 | from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher |
| 12 | |
| 13 | # HTTP method prefixes that indicate a route definition |
| 14 | _ROUTE_PREFIXES = ("app.get", "app.post", "app.put", "app.delete", "app.patch", "router.get", |
| 15 | "router.post", "router.put", "router.delete", "router.patch") |
| 16 | |
| 17 | |
| 18 | class ExpressEnricher(FrameworkEnricher): |
| 19 | """Enricher for Express.js codebases.""" |
| 20 | |
| @@ -28,60 +38,72 @@ | |
| 28 | |
| 29 | def enrich(self) -> EnrichmentResult: |
| 30 | result = EnrichmentResult() |
| 31 | |
| 32 | # ── Routes: app.<method> or router.<method> patterns ───────────────── |
| 33 | route_rows = self.store.query( |
| 34 | "MATCH (n) WHERE (n.name STARTS WITH 'app.get' " |
| 35 | "OR n.name STARTS WITH 'app.post' " |
| 36 | "OR n.name STARTS WITH 'app.put' " |
| 37 | "OR n.name STARTS WITH 'app.delete' " |
| 38 | "OR n.name STARTS WITH 'app.patch' " |
| 39 | "OR n.name STARTS WITH 'router.get' " |
| 40 | "OR n.name STARTS WITH 'router.post' " |
| 41 | "OR n.name STARTS WITH 'router.put' " |
| 42 | "OR n.name STARTS WITH 'router.delete' " |
| 43 | "OR n.name STARTS WITH 'router.patch') " |
| 44 | "AND n.file_path IS NOT NULL " |
| 45 | "RETURN n.name, n.file_path", |
| 46 | ).result_set or [] |
| 47 | for name, file_path in route_rows: |
| 48 | self._promote_node(name, file_path, "ExpressRoute") |
| 49 | result.promoted += 1 |
| 50 | result.patterns_found["routes"] = len(route_rows) |
| 51 | |
| 52 | # ── Middleware: app.use calls ───────────────────────────────────────── |
| 53 | middleware_rows = self.store.query( |
| 54 | "MATCH (n) WHERE (n.name STARTS WITH 'app.use' " |
| 55 | "OR n.name STARTS WITH 'router.use') " |
| 56 | "AND n.file_path IS NOT NULL " |
| 57 | "RETURN n.name, n.file_path", |
| 58 | ).result_set or [] |
| 59 | for name, file_path in middleware_rows: |
| 60 | self._promote_node(name, file_path, "ExpressMiddleware") |
| 61 | result.promoted += 1 |
| 62 | result.patterns_found["middleware"] = len(middleware_rows) |
| 63 | |
| 64 | # ── Controllers: nodes whose file_path contains /controllers/ ───────── |
| 65 | controller_rows = self.store.query( |
| 66 | "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' " |
| 67 | "AND n.name IS NOT NULL " |
| 68 | "RETURN n.name, n.file_path", |
| 69 | ).result_set or [] |
| 70 | for name, file_path in controller_rows: |
| 71 | self._promote_node(name, file_path, "ExpressController") |
| 72 | result.promoted += 1 |
| 73 | result.patterns_found["controllers"] = len(controller_rows) |
| 74 | |
| 75 | # ── Routers: Router() / express.Router() instantiations ────────────── |
| 76 | router_rows = self.store.query( |
| 77 | "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' " |
| 78 | "OR n.name CONTAINS 'express.Router') " |
| 79 | "AND n.file_path IS NOT NULL " |
| 80 | "RETURN n.name, n.file_path", |
| 81 | ).result_set or [] |
| 82 | for name, file_path in router_rows: |
| 83 | self._promote_node(name, file_path, "ExpressRouter") |
| 84 | result.promoted += 1 |
| 85 | result.patterns_found["routers"] = len(router_rows) |
| 86 | |
| 87 | return result |
| 88 |
| --- navegador/enrichment/express.py | |
| +++ navegador/enrichment/express.py | |
| @@ -9,12 +9,22 @@ | |
| 9 | """ |
| 10 | |
| 11 | from navegador.enrichment.base import EnrichmentResult, FrameworkEnricher |
| 12 | |
| 13 | # HTTP method prefixes that indicate a route definition |
| 14 | _ROUTE_PREFIXES = ( |
| 15 | "app.get", |
| 16 | "app.post", |
| 17 | "app.put", |
| 18 | "app.delete", |
| 19 | "app.patch", |
| 20 | "router.get", |
| 21 | "router.post", |
| 22 | "router.put", |
| 23 | "router.delete", |
| 24 | "router.patch", |
| 25 | ) |
| 26 | |
| 27 | |
| 28 | class ExpressEnricher(FrameworkEnricher): |
| 29 | """Enricher for Express.js codebases.""" |
| 30 | |
| @@ -28,60 +38,72 @@ | |
| 38 | |
| 39 | def enrich(self) -> EnrichmentResult: |
| 40 | result = EnrichmentResult() |
| 41 | |
| 42 | # ── Routes: app.<method> or router.<method> patterns ───────────────── |
| 43 | route_rows = ( |
| 44 | self.store.query( |
| 45 | "MATCH (n) WHERE (n.name STARTS WITH 'app.get' " |
| 46 | "OR n.name STARTS WITH 'app.post' " |
| 47 | "OR n.name STARTS WITH 'app.put' " |
| 48 | "OR n.name STARTS WITH 'app.delete' " |
| 49 | "OR n.name STARTS WITH 'app.patch' " |
| 50 | "OR n.name STARTS WITH 'router.get' " |
| 51 | "OR n.name STARTS WITH 'router.post' " |
| 52 | "OR n.name STARTS WITH 'router.put' " |
| 53 | "OR n.name STARTS WITH 'router.delete' " |
| 54 | "OR n.name STARTS WITH 'router.patch') " |
| 55 | "AND n.file_path IS NOT NULL " |
| 56 | "RETURN n.name, n.file_path", |
| 57 | ).result_set |
| 58 | or [] |
| 59 | ) |
| 60 | for name, file_path in route_rows: |
| 61 | self._promote_node(name, file_path, "ExpressRoute") |
| 62 | result.promoted += 1 |
| 63 | result.patterns_found["routes"] = len(route_rows) |
| 64 | |
| 65 | # ── Middleware: app.use calls ───────────────────────────────────────── |
| 66 | middleware_rows = ( |
| 67 | self.store.query( |
| 68 | "MATCH (n) WHERE (n.name STARTS WITH 'app.use' " |
| 69 | "OR n.name STARTS WITH 'router.use') " |
| 70 | "AND n.file_path IS NOT NULL " |
| 71 | "RETURN n.name, n.file_path", |
| 72 | ).result_set |
| 73 | or [] |
| 74 | ) |
| 75 | for name, file_path in middleware_rows: |
| 76 | self._promote_node(name, file_path, "ExpressMiddleware") |
| 77 | result.promoted += 1 |
| 78 | result.patterns_found["middleware"] = len(middleware_rows) |
| 79 | |
| 80 | # ── Controllers: nodes whose file_path contains /controllers/ ───────── |
| 81 | controller_rows = ( |
| 82 | self.store.query( |
| 83 | "MATCH (n) WHERE n.file_path CONTAINS '/controllers/' " |
| 84 | "AND n.name IS NOT NULL " |
| 85 | "RETURN n.name, n.file_path", |
| 86 | ).result_set |
| 87 | or [] |
| 88 | ) |
| 89 | for name, file_path in controller_rows: |
| 90 | self._promote_node(name, file_path, "ExpressController") |
| 91 | result.promoted += 1 |
| 92 | result.patterns_found["controllers"] = len(controller_rows) |
| 93 | |
| 94 | # ── Routers: Router() / express.Router() instantiations ────────────── |
| 95 | router_rows = ( |
| 96 | self.store.query( |
| 97 | "MATCH (n) WHERE (n.name = 'Router' OR n.name CONTAINS 'Router()' " |
| 98 | "OR n.name CONTAINS 'express.Router') " |
| 99 | "AND n.file_path IS NOT NULL " |
| 100 | "RETURN n.name, n.file_path", |
| 101 | ).result_set |
| 102 | or [] |
| 103 | ) |
| 104 | for name, file_path in router_rows: |
| 105 | self._promote_node(name, file_path, "ExpressRouter") |
| 106 | result.promoted += 1 |
| 107 | result.patterns_found["routers"] = len(router_rows) |
| 108 | |
| 109 | return result |
| 110 |
+2
-4
| --- navegador/enrichment/fastapi.py | ||
| +++ navegador/enrichment/fastapi.py | ||
| @@ -76,12 +76,11 @@ | ||
| 76 | 76 | ) |
| 77 | 77 | rows = result.result_set or [] |
| 78 | 78 | for row in rows: |
| 79 | 79 | name, file_path = row[0], row[1] |
| 80 | 80 | if name and file_path: |
| 81 | - self._promote_node(name, file_path, "Route", | |
| 82 | - {"http_method": http_method}) | |
| 81 | + self._promote_node(name, file_path, "Route", {"http_method": http_method}) | |
| 83 | 82 | promoted += 1 |
| 84 | 83 | |
| 85 | 84 | # Strategy 2: signature / docstring heuristics (no Decorator nodes) |
| 86 | 85 | for http_method in _HTTP_METHODS: |
| 87 | 86 | for prop in ("signature", "docstring"): |
| @@ -94,12 +93,11 @@ | ||
| 94 | 93 | ) |
| 95 | 94 | rows = result.result_set or [] |
| 96 | 95 | for row in rows: |
| 97 | 96 | name, file_path = row[0], row[1] |
| 98 | 97 | if name and file_path: |
| 99 | - self._promote_node(name, file_path, "Route", | |
| 100 | - {"http_method": http_method}) | |
| 98 | + self._promote_node(name, file_path, "Route", {"http_method": http_method}) | |
| 101 | 99 | promoted += 1 |
| 102 | 100 | |
| 103 | 101 | return promoted |
| 104 | 102 | |
| 105 | 103 | def _enrich_dependencies(self) -> int: |
| 106 | 104 |
| --- navegador/enrichment/fastapi.py | |
| +++ navegador/enrichment/fastapi.py | |
| @@ -76,12 +76,11 @@ | |
| 76 | ) |
| 77 | rows = result.result_set or [] |
| 78 | for row in rows: |
| 79 | name, file_path = row[0], row[1] |
| 80 | if name and file_path: |
| 81 | self._promote_node(name, file_path, "Route", |
| 82 | {"http_method": http_method}) |
| 83 | promoted += 1 |
| 84 | |
| 85 | # Strategy 2: signature / docstring heuristics (no Decorator nodes) |
| 86 | for http_method in _HTTP_METHODS: |
| 87 | for prop in ("signature", "docstring"): |
| @@ -94,12 +93,11 @@ | |
| 94 | ) |
| 95 | rows = result.result_set or [] |
| 96 | for row in rows: |
| 97 | name, file_path = row[0], row[1] |
| 98 | if name and file_path: |
| 99 | self._promote_node(name, file_path, "Route", |
| 100 | {"http_method": http_method}) |
| 101 | promoted += 1 |
| 102 | |
| 103 | return promoted |
| 104 | |
| 105 | def _enrich_dependencies(self) -> int: |
| 106 |
| --- navegador/enrichment/fastapi.py | |
| +++ navegador/enrichment/fastapi.py | |
| @@ -76,12 +76,11 @@ | |
| 76 | ) |
| 77 | rows = result.result_set or [] |
| 78 | for row in rows: |
| 79 | name, file_path = row[0], row[1] |
| 80 | if name and file_path: |
| 81 | self._promote_node(name, file_path, "Route", {"http_method": http_method}) |
| 82 | promoted += 1 |
| 83 | |
| 84 | # Strategy 2: signature / docstring heuristics (no Decorator nodes) |
| 85 | for http_method in _HTTP_METHODS: |
| 86 | for prop in ("signature", "docstring"): |
| @@ -94,12 +93,11 @@ | |
| 93 | ) |
| 94 | rows = result.result_set or [] |
| 95 | for row in rows: |
| 96 | name, file_path = row[0], row[1] |
| 97 | if name and file_path: |
| 98 | self._promote_node(name, file_path, "Route", {"http_method": http_method}) |
| 99 | promoted += 1 |
| 100 | |
| 101 | return promoted |
| 102 | |
| 103 | def _enrich_dependencies(self) -> int: |
| 104 |
+43
-28
| --- navegador/enrichment/react.py | ||
| +++ navegador/enrichment/react.py | ||
| @@ -25,62 +25,77 @@ | ||
| 25 | 25 | |
| 26 | 26 | def enrich(self) -> EnrichmentResult: |
| 27 | 27 | result = EnrichmentResult() |
| 28 | 28 | |
| 29 | 29 | # ── Components: functions/classes defined in .jsx or .tsx files ────── |
| 30 | - component_rows = self.store.query( | |
| 31 | - "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " | |
| 32 | - "AND n.name IS NOT NULL " | |
| 33 | - "RETURN n.name, n.file_path", | |
| 34 | - ).result_set or [] | |
| 30 | + component_rows = ( | |
| 31 | + self.store.query( | |
| 32 | + "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " | |
| 33 | + "AND n.name IS NOT NULL " | |
| 34 | + "RETURN n.name, n.file_path", | |
| 35 | + ).result_set | |
| 36 | + or [] | |
| 37 | + ) | |
| 35 | 38 | for name, file_path in component_rows: |
| 36 | 39 | self._promote_node(name, file_path, "ReactComponent") |
| 37 | 40 | result.promoted += 1 |
| 38 | 41 | result.patterns_found["components"] = len(component_rows) |
| 39 | 42 | |
| 40 | 43 | # ── Pages: nodes whose file_path contains /pages/ ──────────────────── |
| 41 | - page_rows = self.store.query( | |
| 42 | - "MATCH (n) WHERE n.file_path CONTAINS '/pages/' " | |
| 43 | - "AND NOT n.file_path CONTAINS '/pages/api/' " | |
| 44 | - "AND n.name IS NOT NULL " | |
| 45 | - "RETURN n.name, n.file_path", | |
| 46 | - ).result_set or [] | |
| 44 | + page_rows = ( | |
| 45 | + self.store.query( | |
| 46 | + "MATCH (n) WHERE n.file_path CONTAINS '/pages/' " | |
| 47 | + "AND NOT n.file_path CONTAINS '/pages/api/' " | |
| 48 | + "AND n.name IS NOT NULL " | |
| 49 | + "RETURN n.name, n.file_path", | |
| 50 | + ).result_set | |
| 51 | + or [] | |
| 52 | + ) | |
| 47 | 53 | for name, file_path in page_rows: |
| 48 | 54 | self._promote_node(name, file_path, "NextPage") |
| 49 | 55 | result.promoted += 1 |
| 50 | 56 | result.patterns_found["pages"] = len(page_rows) |
| 51 | 57 | |
| 52 | 58 | # ── API Routes: nodes under pages/api/ or app/api/ ─────────────────── |
| 53 | - api_rows = self.store.query( | |
| 54 | - "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' " | |
| 55 | - "OR n.file_path CONTAINS '/app/api/') " | |
| 56 | - "AND n.name IS NOT NULL " | |
| 57 | - "RETURN n.name, n.file_path", | |
| 58 | - ).result_set or [] | |
| 59 | + api_rows = ( | |
| 60 | + self.store.query( | |
| 61 | + "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' " | |
| 62 | + "OR n.file_path CONTAINS '/app/api/') " | |
| 63 | + "AND n.name IS NOT NULL " | |
| 64 | + "RETURN n.name, n.file_path", | |
| 65 | + ).result_set | |
| 66 | + or [] | |
| 67 | + ) | |
| 59 | 68 | for name, file_path in api_rows: |
| 60 | 69 | self._promote_node(name, file_path, "NextApiRoute") |
| 61 | 70 | result.promoted += 1 |
| 62 | 71 | result.patterns_found["api_routes"] = len(api_rows) |
| 63 | 72 | |
| 64 | 73 | # ── Hooks: functions whose name starts with "use" ──────────────────── |
| 65 | - hook_rows = self.store.query( | |
| 66 | - "MATCH (n) WHERE n.name STARTS WITH 'use' " | |
| 67 | - "AND n.name <> 'use' " | |
| 68 | - "AND n.file_path IS NOT NULL " | |
| 69 | - "RETURN n.name, n.file_path", | |
| 70 | - ).result_set or [] | |
| 74 | + hook_rows = ( | |
| 75 | + self.store.query( | |
| 76 | + "MATCH (n) WHERE n.name STARTS WITH 'use' " | |
| 77 | + "AND n.name <> 'use' " | |
| 78 | + "AND n.file_path IS NOT NULL " | |
| 79 | + "RETURN n.name, n.file_path", | |
| 80 | + ).result_set | |
| 81 | + or [] | |
| 82 | + ) | |
| 71 | 83 | for name, file_path in hook_rows: |
| 72 | 84 | self._promote_node(name, file_path, "ReactHook") |
| 73 | 85 | result.promoted += 1 |
| 74 | 86 | result.patterns_found["hooks"] = len(hook_rows) |
| 75 | 87 | |
| 76 | 88 | # ── Stores: createStore / useStore patterns ─────────────────────────── |
| 77 | - store_rows = self.store.query( | |
| 78 | - "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') " | |
| 79 | - "AND n.file_path IS NOT NULL " | |
| 80 | - "RETURN n.name, n.file_path", | |
| 81 | - ).result_set or [] | |
| 89 | + store_rows = ( | |
| 90 | + self.store.query( | |
| 91 | + "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') " | |
| 92 | + "AND n.file_path IS NOT NULL " | |
| 93 | + "RETURN n.name, n.file_path", | |
| 94 | + ).result_set | |
| 95 | + or [] | |
| 96 | + ) | |
| 82 | 97 | for name, file_path in store_rows: |
| 83 | 98 | self._promote_node(name, file_path, "ReactStore") |
| 84 | 99 | result.promoted += 1 |
| 85 | 100 | result.patterns_found["stores"] = len(store_rows) |
| 86 | 101 | |
| 87 | 102 |
| --- navegador/enrichment/react.py | |
| +++ navegador/enrichment/react.py | |
| @@ -25,62 +25,77 @@ | |
| 25 | |
| 26 | def enrich(self) -> EnrichmentResult: |
| 27 | result = EnrichmentResult() |
| 28 | |
| 29 | # ── Components: functions/classes defined in .jsx or .tsx files ────── |
| 30 | component_rows = self.store.query( |
| 31 | "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " |
| 32 | "AND n.name IS NOT NULL " |
| 33 | "RETURN n.name, n.file_path", |
| 34 | ).result_set or [] |
| 35 | for name, file_path in component_rows: |
| 36 | self._promote_node(name, file_path, "ReactComponent") |
| 37 | result.promoted += 1 |
| 38 | result.patterns_found["components"] = len(component_rows) |
| 39 | |
| 40 | # ── Pages: nodes whose file_path contains /pages/ ──────────────────── |
| 41 | page_rows = self.store.query( |
| 42 | "MATCH (n) WHERE n.file_path CONTAINS '/pages/' " |
| 43 | "AND NOT n.file_path CONTAINS '/pages/api/' " |
| 44 | "AND n.name IS NOT NULL " |
| 45 | "RETURN n.name, n.file_path", |
| 46 | ).result_set or [] |
| 47 | for name, file_path in page_rows: |
| 48 | self._promote_node(name, file_path, "NextPage") |
| 49 | result.promoted += 1 |
| 50 | result.patterns_found["pages"] = len(page_rows) |
| 51 | |
| 52 | # ── API Routes: nodes under pages/api/ or app/api/ ─────────────────── |
| 53 | api_rows = self.store.query( |
| 54 | "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' " |
| 55 | "OR n.file_path CONTAINS '/app/api/') " |
| 56 | "AND n.name IS NOT NULL " |
| 57 | "RETURN n.name, n.file_path", |
| 58 | ).result_set or [] |
| 59 | for name, file_path in api_rows: |
| 60 | self._promote_node(name, file_path, "NextApiRoute") |
| 61 | result.promoted += 1 |
| 62 | result.patterns_found["api_routes"] = len(api_rows) |
| 63 | |
| 64 | # ── Hooks: functions whose name starts with "use" ──────────────────── |
| 65 | hook_rows = self.store.query( |
| 66 | "MATCH (n) WHERE n.name STARTS WITH 'use' " |
| 67 | "AND n.name <> 'use' " |
| 68 | "AND n.file_path IS NOT NULL " |
| 69 | "RETURN n.name, n.file_path", |
| 70 | ).result_set or [] |
| 71 | for name, file_path in hook_rows: |
| 72 | self._promote_node(name, file_path, "ReactHook") |
| 73 | result.promoted += 1 |
| 74 | result.patterns_found["hooks"] = len(hook_rows) |
| 75 | |
| 76 | # ── Stores: createStore / useStore patterns ─────────────────────────── |
| 77 | store_rows = self.store.query( |
| 78 | "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') " |
| 79 | "AND n.file_path IS NOT NULL " |
| 80 | "RETURN n.name, n.file_path", |
| 81 | ).result_set or [] |
| 82 | for name, file_path in store_rows: |
| 83 | self._promote_node(name, file_path, "ReactStore") |
| 84 | result.promoted += 1 |
| 85 | result.patterns_found["stores"] = len(store_rows) |
| 86 | |
| 87 |
| --- navegador/enrichment/react.py | |
| +++ navegador/enrichment/react.py | |
| @@ -25,62 +25,77 @@ | |
| 25 | |
| 26 | def enrich(self) -> EnrichmentResult: |
| 27 | result = EnrichmentResult() |
| 28 | |
| 29 | # ── Components: functions/classes defined in .jsx or .tsx files ────── |
| 30 | component_rows = ( |
| 31 | self.store.query( |
| 32 | "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " |
| 33 | "AND n.name IS NOT NULL " |
| 34 | "RETURN n.name, n.file_path", |
| 35 | ).result_set |
| 36 | or [] |
| 37 | ) |
| 38 | for name, file_path in component_rows: |
| 39 | self._promote_node(name, file_path, "ReactComponent") |
| 40 | result.promoted += 1 |
| 41 | result.patterns_found["components"] = len(component_rows) |
| 42 | |
| 43 | # ── Pages: nodes whose file_path contains /pages/ ──────────────────── |
| 44 | page_rows = ( |
| 45 | self.store.query( |
| 46 | "MATCH (n) WHERE n.file_path CONTAINS '/pages/' " |
| 47 | "AND NOT n.file_path CONTAINS '/pages/api/' " |
| 48 | "AND n.name IS NOT NULL " |
| 49 | "RETURN n.name, n.file_path", |
| 50 | ).result_set |
| 51 | or [] |
| 52 | ) |
| 53 | for name, file_path in page_rows: |
| 54 | self._promote_node(name, file_path, "NextPage") |
| 55 | result.promoted += 1 |
| 56 | result.patterns_found["pages"] = len(page_rows) |
| 57 | |
| 58 | # ── API Routes: nodes under pages/api/ or app/api/ ─────────────────── |
| 59 | api_rows = ( |
| 60 | self.store.query( |
| 61 | "MATCH (n) WHERE (n.file_path CONTAINS '/pages/api/' " |
| 62 | "OR n.file_path CONTAINS '/app/api/') " |
| 63 | "AND n.name IS NOT NULL " |
| 64 | "RETURN n.name, n.file_path", |
| 65 | ).result_set |
| 66 | or [] |
| 67 | ) |
| 68 | for name, file_path in api_rows: |
| 69 | self._promote_node(name, file_path, "NextApiRoute") |
| 70 | result.promoted += 1 |
| 71 | result.patterns_found["api_routes"] = len(api_rows) |
| 72 | |
| 73 | # ── Hooks: functions whose name starts with "use" ──────────────────── |
| 74 | hook_rows = ( |
| 75 | self.store.query( |
| 76 | "MATCH (n) WHERE n.name STARTS WITH 'use' " |
| 77 | "AND n.name <> 'use' " |
| 78 | "AND n.file_path IS NOT NULL " |
| 79 | "RETURN n.name, n.file_path", |
| 80 | ).result_set |
| 81 | or [] |
| 82 | ) |
| 83 | for name, file_path in hook_rows: |
| 84 | self._promote_node(name, file_path, "ReactHook") |
| 85 | result.promoted += 1 |
| 86 | result.patterns_found["hooks"] = len(hook_rows) |
| 87 | |
| 88 | # ── Stores: createStore / useStore patterns ─────────────────────────── |
| 89 | store_rows = ( |
| 90 | self.store.query( |
| 91 | "MATCH (n) WHERE (n.name CONTAINS 'createStore' OR n.name CONTAINS 'useStore') " |
| 92 | "AND n.file_path IS NOT NULL " |
| 93 | "RETURN n.name, n.file_path", |
| 94 | ).result_set |
| 95 | or [] |
| 96 | ) |
| 97 | for name, file_path in store_rows: |
| 98 | self._promote_node(name, file_path, "ReactStore") |
| 99 | result.promoted += 1 |
| 100 | result.patterns_found["stores"] = len(store_rows) |
| 101 | |
| 102 |
+35
-25
| --- navegador/enrichment/react_native.py | ||
| +++ navegador/enrichment/react_native.py | ||
| @@ -34,53 +34,63 @@ | ||
| 34 | 34 | |
| 35 | 35 | def enrich(self) -> EnrichmentResult: |
| 36 | 36 | result = EnrichmentResult() |
| 37 | 37 | |
| 38 | 38 | # ── Components: functions/classes in .jsx or .tsx files ────────────── |
| 39 | - component_rows = self.store.query( | |
| 40 | - "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " | |
| 41 | - "AND n.name IS NOT NULL " | |
| 42 | - "RETURN n.name, n.file_path", | |
| 43 | - ).result_set or [] | |
| 39 | + component_rows = ( | |
| 40 | + self.store.query( | |
| 41 | + "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " | |
| 42 | + "AND n.name IS NOT NULL " | |
| 43 | + "RETURN n.name, n.file_path", | |
| 44 | + ).result_set | |
| 45 | + or [] | |
| 46 | + ) | |
| 44 | 47 | for name, file_path in component_rows: |
| 45 | 48 | self._promote_node(name, file_path, "RNComponent") |
| 46 | 49 | result.promoted += 1 |
| 47 | 50 | result.patterns_found["components"] = len(component_rows) |
| 48 | 51 | |
| 49 | 52 | # ── Screens: nodes under screens/ or whose names end with "Screen" ─── |
| 50 | - screen_rows = self.store.query( | |
| 51 | - "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' " | |
| 52 | - "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) " | |
| 53 | - "AND n.name IS NOT NULL " | |
| 54 | - "RETURN n.name, n.file_path", | |
| 55 | - ).result_set or [] | |
| 53 | + screen_rows = ( | |
| 54 | + self.store.query( | |
| 55 | + "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' " | |
| 56 | + "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) " | |
| 57 | + "AND n.name IS NOT NULL " | |
| 58 | + "RETURN n.name, n.file_path", | |
| 59 | + ).result_set | |
| 60 | + or [] | |
| 61 | + ) | |
| 56 | 62 | for name, file_path in screen_rows: |
| 57 | 63 | self._promote_node(name, file_path, "RNScreen") |
| 58 | 64 | result.promoted += 1 |
| 59 | 65 | result.patterns_found["screens"] = len(screen_rows) |
| 60 | 66 | |
| 61 | 67 | # ── Hooks: functions whose name starts with "use" ───────────────────── |
| 62 | - hook_rows = self.store.query( | |
| 63 | - "MATCH (n) WHERE n.name STARTS WITH 'use' " | |
| 64 | - "AND n.name <> 'use' " | |
| 65 | - "AND n.file_path IS NOT NULL " | |
| 66 | - "RETURN n.name, n.file_path", | |
| 67 | - ).result_set or [] | |
| 68 | + hook_rows = ( | |
| 69 | + self.store.query( | |
| 70 | + "MATCH (n) WHERE n.name STARTS WITH 'use' " | |
| 71 | + "AND n.name <> 'use' " | |
| 72 | + "AND n.file_path IS NOT NULL " | |
| 73 | + "RETURN n.name, n.file_path", | |
| 74 | + ).result_set | |
| 75 | + or [] | |
| 76 | + ) | |
| 68 | 77 | for name, file_path in hook_rows: |
| 69 | 78 | self._promote_node(name, file_path, "RNHook") |
| 70 | 79 | result.promoted += 1 |
| 71 | 80 | result.patterns_found["hooks"] = len(hook_rows) |
| 72 | 81 | |
| 73 | 82 | # ── Navigation: navigator factory / container patterns ──────────────── |
| 74 | - nav_conditions = " OR ".join( | |
| 75 | - f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS | |
| 76 | - ) | |
| 77 | - nav_rows = self.store.query( | |
| 78 | - f"MATCH (n) WHERE ({nav_conditions}) " | |
| 79 | - "AND n.file_path IS NOT NULL " | |
| 80 | - "RETURN n.name, n.file_path", | |
| 81 | - ).result_set or [] | |
| 83 | + nav_conditions = " OR ".join(f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS) | |
| 84 | + nav_rows = ( | |
| 85 | + self.store.query( | |
| 86 | + f"MATCH (n) WHERE ({nav_conditions}) " | |
| 87 | + "AND n.file_path IS NOT NULL " | |
| 88 | + "RETURN n.name, n.file_path", | |
| 89 | + ).result_set | |
| 90 | + or [] | |
| 91 | + ) | |
| 82 | 92 | for name, file_path in nav_rows: |
| 83 | 93 | self._promote_node(name, file_path, "RNNavigation") |
| 84 | 94 | result.promoted += 1 |
| 85 | 95 | result.patterns_found["navigation"] = len(nav_rows) |
| 86 | 96 | |
| 87 | 97 |
| --- navegador/enrichment/react_native.py | |
| +++ navegador/enrichment/react_native.py | |
| @@ -34,53 +34,63 @@ | |
| 34 | |
| 35 | def enrich(self) -> EnrichmentResult: |
| 36 | result = EnrichmentResult() |
| 37 | |
| 38 | # ── Components: functions/classes in .jsx or .tsx files ────────────── |
| 39 | component_rows = self.store.query( |
| 40 | "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " |
| 41 | "AND n.name IS NOT NULL " |
| 42 | "RETURN n.name, n.file_path", |
| 43 | ).result_set or [] |
| 44 | for name, file_path in component_rows: |
| 45 | self._promote_node(name, file_path, "RNComponent") |
| 46 | result.promoted += 1 |
| 47 | result.patterns_found["components"] = len(component_rows) |
| 48 | |
| 49 | # ── Screens: nodes under screens/ or whose names end with "Screen" ─── |
| 50 | screen_rows = self.store.query( |
| 51 | "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' " |
| 52 | "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) " |
| 53 | "AND n.name IS NOT NULL " |
| 54 | "RETURN n.name, n.file_path", |
| 55 | ).result_set or [] |
| 56 | for name, file_path in screen_rows: |
| 57 | self._promote_node(name, file_path, "RNScreen") |
| 58 | result.promoted += 1 |
| 59 | result.patterns_found["screens"] = len(screen_rows) |
| 60 | |
| 61 | # ── Hooks: functions whose name starts with "use" ───────────────────── |
| 62 | hook_rows = self.store.query( |
| 63 | "MATCH (n) WHERE n.name STARTS WITH 'use' " |
| 64 | "AND n.name <> 'use' " |
| 65 | "AND n.file_path IS NOT NULL " |
| 66 | "RETURN n.name, n.file_path", |
| 67 | ).result_set or [] |
| 68 | for name, file_path in hook_rows: |
| 69 | self._promote_node(name, file_path, "RNHook") |
| 70 | result.promoted += 1 |
| 71 | result.patterns_found["hooks"] = len(hook_rows) |
| 72 | |
| 73 | # ── Navigation: navigator factory / container patterns ──────────────── |
| 74 | nav_conditions = " OR ".join( |
| 75 | f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS |
| 76 | ) |
| 77 | nav_rows = self.store.query( |
| 78 | f"MATCH (n) WHERE ({nav_conditions}) " |
| 79 | "AND n.file_path IS NOT NULL " |
| 80 | "RETURN n.name, n.file_path", |
| 81 | ).result_set or [] |
| 82 | for name, file_path in nav_rows: |
| 83 | self._promote_node(name, file_path, "RNNavigation") |
| 84 | result.promoted += 1 |
| 85 | result.patterns_found["navigation"] = len(nav_rows) |
| 86 | |
| 87 |
| --- navegador/enrichment/react_native.py | |
| +++ navegador/enrichment/react_native.py | |
| @@ -34,53 +34,63 @@ | |
| 34 | |
| 35 | def enrich(self) -> EnrichmentResult: |
| 36 | result = EnrichmentResult() |
| 37 | |
| 38 | # ── Components: functions/classes in .jsx or .tsx files ────────────── |
| 39 | component_rows = ( |
| 40 | self.store.query( |
| 41 | "MATCH (n) WHERE (n.file_path CONTAINS '.jsx' OR n.file_path CONTAINS '.tsx') " |
| 42 | "AND n.name IS NOT NULL " |
| 43 | "RETURN n.name, n.file_path", |
| 44 | ).result_set |
| 45 | or [] |
| 46 | ) |
| 47 | for name, file_path in component_rows: |
| 48 | self._promote_node(name, file_path, "RNComponent") |
| 49 | result.promoted += 1 |
| 50 | result.patterns_found["components"] = len(component_rows) |
| 51 | |
| 52 | # ── Screens: nodes under screens/ or whose names end with "Screen" ─── |
| 53 | screen_rows = ( |
| 54 | self.store.query( |
| 55 | "MATCH (n) WHERE (n.file_path CONTAINS '/screens/' " |
| 56 | "OR (n.name IS NOT NULL AND n.name ENDS WITH 'Screen')) " |
| 57 | "AND n.name IS NOT NULL " |
| 58 | "RETURN n.name, n.file_path", |
| 59 | ).result_set |
| 60 | or [] |
| 61 | ) |
| 62 | for name, file_path in screen_rows: |
| 63 | self._promote_node(name, file_path, "RNScreen") |
| 64 | result.promoted += 1 |
| 65 | result.patterns_found["screens"] = len(screen_rows) |
| 66 | |
| 67 | # ── Hooks: functions whose name starts with "use" ───────────────────── |
| 68 | hook_rows = ( |
| 69 | self.store.query( |
| 70 | "MATCH (n) WHERE n.name STARTS WITH 'use' " |
| 71 | "AND n.name <> 'use' " |
| 72 | "AND n.file_path IS NOT NULL " |
| 73 | "RETURN n.name, n.file_path", |
| 74 | ).result_set |
| 75 | or [] |
| 76 | ) |
| 77 | for name, file_path in hook_rows: |
| 78 | self._promote_node(name, file_path, "RNHook") |
| 79 | result.promoted += 1 |
| 80 | result.patterns_found["hooks"] = len(hook_rows) |
| 81 | |
| 82 | # ── Navigation: navigator factory / container patterns ──────────────── |
| 83 | nav_conditions = " OR ".join(f"n.name CONTAINS '{pat}'" for pat in _NAVIGATION_PATTERNS) |
| 84 | nav_rows = ( |
| 85 | self.store.query( |
| 86 | f"MATCH (n) WHERE ({nav_conditions}) " |
| 87 | "AND n.file_path IS NOT NULL " |
| 88 | "RETURN n.name, n.file_path", |
| 89 | ).result_set |
| 90 | or [] |
| 91 | ) |
| 92 | for name, file_path in nav_rows: |
| 93 | self._promote_node(name, file_path, "RNNavigation") |
| 94 | result.promoted += 1 |
| 95 | result.patterns_found["navigation"] = len(nav_rows) |
| 96 | |
| 97 |
+17
-13
| --- navegador/explorer/server.py | ||
| +++ navegador/explorer/server.py | ||
| @@ -51,16 +51,18 @@ | ||
| 51 | 51 | ) |
| 52 | 52 | result = [] |
| 53 | 53 | for row in rows: |
| 54 | 54 | nid, label, name, props = row[0], row[1], row[2], row[3] |
| 55 | 55 | node_props = dict(props) if isinstance(props, dict) else {} |
| 56 | - result.append({ | |
| 57 | - "id": str(nid), | |
| 58 | - "label": label or "default", | |
| 59 | - "name": name or str(nid), | |
| 60 | - "props": node_props, | |
| 61 | - }) | |
| 56 | + result.append( | |
| 57 | + { | |
| 58 | + "id": str(nid), | |
| 59 | + "label": label or "default", | |
| 60 | + "name": name or str(nid), | |
| 61 | + "props": node_props, | |
| 62 | + } | |
| 63 | + ) | |
| 62 | 64 | return result |
| 63 | 65 | |
| 64 | 66 | |
| 65 | 67 | def _get_all_edges(store: "GraphStore") -> list[dict]: |
| 66 | 68 | rows = _query( |
| @@ -85,16 +87,18 @@ | ||
| 85 | 87 | "LIMIT $limit", |
| 86 | 88 | {"q": q, "limit": limit}, |
| 87 | 89 | ) |
| 88 | 90 | result = [] |
| 89 | 91 | for row in rows: |
| 90 | - result.append({ | |
| 91 | - "label": row[0] or "", | |
| 92 | - "name": row[1] or "", | |
| 93 | - "file_path": row[2] or "", | |
| 94 | - "domain": row[3] or "", | |
| 95 | - }) | |
| 92 | + result.append( | |
| 93 | + { | |
| 94 | + "label": row[0] or "", | |
| 95 | + "name": row[1] or "", | |
| 96 | + "file_path": row[2] or "", | |
| 97 | + "domain": row[3] or "", | |
| 98 | + } | |
| 99 | + ) | |
| 96 | 100 | return result |
| 97 | 101 | |
| 98 | 102 | |
| 99 | 103 | def _get_node_detail(store: "GraphStore", name: str) -> dict: |
| 100 | 104 | # Node properties |
| @@ -210,11 +214,11 @@ | ||
| 210 | 214 | results = _search_nodes(self._store, q) if q else [] |
| 211 | 215 | self._send_json({"nodes": results}) |
| 212 | 216 | |
| 213 | 217 | # ── Node detail — /api/node/<name> |
| 214 | 218 | elif path.startswith("/api/node/"): |
| 215 | - raw_name = path[len("/api/node/"):] | |
| 219 | + raw_name = path[len("/api/node/") :] | |
| 216 | 220 | name = unquote(raw_name) |
| 217 | 221 | detail = _get_node_detail(self._store, name) |
| 218 | 222 | self._send_json(detail) |
| 219 | 223 | |
| 220 | 224 | # ── Stats |
| 221 | 225 |
| --- navegador/explorer/server.py | |
| +++ navegador/explorer/server.py | |
| @@ -51,16 +51,18 @@ | |
| 51 | ) |
| 52 | result = [] |
| 53 | for row in rows: |
| 54 | nid, label, name, props = row[0], row[1], row[2], row[3] |
| 55 | node_props = dict(props) if isinstance(props, dict) else {} |
| 56 | result.append({ |
| 57 | "id": str(nid), |
| 58 | "label": label or "default", |
| 59 | "name": name or str(nid), |
| 60 | "props": node_props, |
| 61 | }) |
| 62 | return result |
| 63 | |
| 64 | |
| 65 | def _get_all_edges(store: "GraphStore") -> list[dict]: |
| 66 | rows = _query( |
| @@ -85,16 +87,18 @@ | |
| 85 | "LIMIT $limit", |
| 86 | {"q": q, "limit": limit}, |
| 87 | ) |
| 88 | result = [] |
| 89 | for row in rows: |
| 90 | result.append({ |
| 91 | "label": row[0] or "", |
| 92 | "name": row[1] or "", |
| 93 | "file_path": row[2] or "", |
| 94 | "domain": row[3] or "", |
| 95 | }) |
| 96 | return result |
| 97 | |
| 98 | |
| 99 | def _get_node_detail(store: "GraphStore", name: str) -> dict: |
| 100 | # Node properties |
| @@ -210,11 +214,11 @@ | |
| 210 | results = _search_nodes(self._store, q) if q else [] |
| 211 | self._send_json({"nodes": results}) |
| 212 | |
| 213 | # ── Node detail — /api/node/<name> |
| 214 | elif path.startswith("/api/node/"): |
| 215 | raw_name = path[len("/api/node/"):] |
| 216 | name = unquote(raw_name) |
| 217 | detail = _get_node_detail(self._store, name) |
| 218 | self._send_json(detail) |
| 219 | |
| 220 | # ── Stats |
| 221 |
| --- navegador/explorer/server.py | |
| +++ navegador/explorer/server.py | |
| @@ -51,16 +51,18 @@ | |
| 51 | ) |
| 52 | result = [] |
| 53 | for row in rows: |
| 54 | nid, label, name, props = row[0], row[1], row[2], row[3] |
| 55 | node_props = dict(props) if isinstance(props, dict) else {} |
| 56 | result.append( |
| 57 | { |
| 58 | "id": str(nid), |
| 59 | "label": label or "default", |
| 60 | "name": name or str(nid), |
| 61 | "props": node_props, |
| 62 | } |
| 63 | ) |
| 64 | return result |
| 65 | |
| 66 | |
| 67 | def _get_all_edges(store: "GraphStore") -> list[dict]: |
| 68 | rows = _query( |
| @@ -85,16 +87,18 @@ | |
| 87 | "LIMIT $limit", |
| 88 | {"q": q, "limit": limit}, |
| 89 | ) |
| 90 | result = [] |
| 91 | for row in rows: |
| 92 | result.append( |
| 93 | { |
| 94 | "label": row[0] or "", |
| 95 | "name": row[1] or "", |
| 96 | "file_path": row[2] or "", |
| 97 | "domain": row[3] or "", |
| 98 | } |
| 99 | ) |
| 100 | return result |
| 101 | |
| 102 | |
| 103 | def _get_node_detail(store: "GraphStore", name: str) -> dict: |
| 104 | # Node properties |
| @@ -210,11 +214,11 @@ | |
| 214 | results = _search_nodes(self._store, q) if q else [] |
| 215 | self._send_json({"nodes": results}) |
| 216 | |
| 217 | # ── Node detail — /api/node/<name> |
| 218 | elif path.startswith("/api/node/"): |
| 219 | raw_name = path[len("/api/node/") :] |
| 220 | name = unquote(raw_name) |
| 221 | detail = _get_node_detail(self._store, name) |
| 222 | self._send_json(detail) |
| 223 | |
| 224 | # ── Stats |
| 225 |
+1
-3
| --- navegador/explorer/templates.py | ||
| +++ navegador/explorer/templates.py | ||
| @@ -28,13 +28,11 @@ | ||
| 28 | 28 | "WikiPage": "#d2b4de", |
| 29 | 29 | "Person": "#fadbd8", |
| 30 | 30 | "default": "#aaaaaa", |
| 31 | 31 | } |
| 32 | 32 | |
| 33 | -_COLORS_JS = "\n".join( | |
| 34 | - f" '{label}': '{color}'," for label, color in NODE_COLORS.items() | |
| 35 | -) | |
| 33 | +_COLORS_JS = "\n".join(f" '{label}': '{color}'," for label, color in NODE_COLORS.items()) | |
| 36 | 34 | |
| 37 | 35 | HTML_TEMPLATE = """<!DOCTYPE html> |
| 38 | 36 | <html lang="en"> |
| 39 | 37 | <head> |
| 40 | 38 | <meta charset="UTF-8"> |
| 41 | 39 |
| --- navegador/explorer/templates.py | |
| +++ navegador/explorer/templates.py | |
| @@ -28,13 +28,11 @@ | |
| 28 | "WikiPage": "#d2b4de", |
| 29 | "Person": "#fadbd8", |
| 30 | "default": "#aaaaaa", |
| 31 | } |
| 32 | |
| 33 | _COLORS_JS = "\n".join( |
| 34 | f" '{label}': '{color}'," for label, color in NODE_COLORS.items() |
| 35 | ) |
| 36 | |
| 37 | HTML_TEMPLATE = """<!DOCTYPE html> |
| 38 | <html lang="en"> |
| 39 | <head> |
| 40 | <meta charset="UTF-8"> |
| 41 |
| --- navegador/explorer/templates.py | |
| +++ navegador/explorer/templates.py | |
| @@ -28,13 +28,11 @@ | |
| 28 | "WikiPage": "#d2b4de", |
| 29 | "Person": "#fadbd8", |
| 30 | "default": "#aaaaaa", |
| 31 | } |
| 32 | |
| 33 | _COLORS_JS = "\n".join(f" '{label}': '{color}'," for label, color in NODE_COLORS.items()) |
| 34 | |
| 35 | HTML_TEMPLATE = """<!DOCTYPE html> |
| 36 | <html lang="en"> |
| 37 | <head> |
| 38 | <meta charset="UTF-8"> |
| 39 |
+18
-13
| --- navegador/graph/export.py | ||
| +++ navegador/graph/export.py | ||
| @@ -32,12 +32,17 @@ | ||
| 32 | 32 | nodes = _export_nodes(store) |
| 33 | 33 | edges = _export_edges(store) |
| 34 | 34 | |
| 35 | 35 | # Sort for deterministic output |
| 36 | 36 | nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True))) |
| 37 | - edges.sort(key=lambda e: (e["type"], json.dumps(e["from"], sort_keys=True), | |
| 38 | - json.dumps(e["to"], sort_keys=True))) | |
| 37 | + edges.sort( | |
| 38 | + key=lambda e: ( | |
| 39 | + e["type"], | |
| 40 | + json.dumps(e["from"], sort_keys=True), | |
| 41 | + json.dumps(e["to"], sort_keys=True), | |
| 42 | + ) | |
| 43 | + ) | |
| 39 | 44 | |
| 40 | 45 | with output_path.open("w", encoding="utf-8") as f: |
| 41 | 46 | for node in nodes: |
| 42 | 47 | f.write(json.dumps(node, sort_keys=True) + "\n") |
| 43 | 48 | for edge in edges: |
| @@ -87,13 +92,11 @@ | ||
| 87 | 92 | return {"nodes": node_count, "edges": edge_count} |
| 88 | 93 | |
| 89 | 94 | |
| 90 | 95 | def _export_nodes(store: GraphStore) -> list[dict]: |
| 91 | 96 | """Export all nodes with their labels and properties.""" |
| 92 | - result = store.query( | |
| 93 | - "MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props" | |
| 94 | - ) | |
| 97 | + result = store.query("MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props") | |
| 95 | 98 | nodes = [] |
| 96 | 99 | for row in result.result_set or []: |
| 97 | 100 | label = row[0] |
| 98 | 101 | props = row[1] if isinstance(row[1], dict) else {} |
| 99 | 102 | nodes.append({"kind": "node", "label": label, "props": props}) |
| @@ -109,16 +112,18 @@ | ||
| 109 | 112 | "labels(b)[0] AS to_label, b.name AS to_name, " |
| 110 | 113 | "coalesce(b.file_path, b.path, '') AS to_path" |
| 111 | 114 | ) |
| 112 | 115 | edges = [] |
| 113 | 116 | for row in result.result_set or []: |
| 114 | - edges.append({ | |
| 115 | - "kind": "edge", | |
| 116 | - "type": row[0], | |
| 117 | - "from": {"label": row[1], "name": row[2], "path": row[3]}, | |
| 118 | - "to": {"label": row[4], "name": row[5], "path": row[6]}, | |
| 119 | - }) | |
| 117 | + edges.append( | |
| 118 | + { | |
| 119 | + "kind": "edge", | |
| 120 | + "type": row[0], | |
| 121 | + "from": {"label": row[1], "name": row[2], "path": row[3]}, | |
| 122 | + "to": {"label": row[4], "name": row[5], "path": row[6]}, | |
| 123 | + } | |
| 124 | + ) | |
| 120 | 125 | return edges |
| 121 | 126 | |
| 122 | 127 | |
| 123 | 128 | def _import_node(store: GraphStore, record: dict) -> None: |
| 124 | 129 | """Create a node from an export record.""" |
| @@ -143,12 +148,12 @@ | ||
| 143 | 148 | """Create an edge from an export record.""" |
| 144 | 149 | edge_type = record["type"] |
| 145 | 150 | from_info = record["from"] |
| 146 | 151 | to_info = record["to"] |
| 147 | 152 | |
| 148 | - from_key = f"name: $from_name" | |
| 149 | - to_key = f"name: $to_name" | |
| 153 | + from_key = "name: $from_name" | |
| 154 | + to_key = "name: $to_name" | |
| 150 | 155 | |
| 151 | 156 | params = { |
| 152 | 157 | "from_name": from_info["name"], |
| 153 | 158 | "to_name": to_info["name"], |
| 154 | 159 | } |
| 155 | 160 |
| --- navegador/graph/export.py | |
| +++ navegador/graph/export.py | |
| @@ -32,12 +32,17 @@ | |
| 32 | nodes = _export_nodes(store) |
| 33 | edges = _export_edges(store) |
| 34 | |
| 35 | # Sort for deterministic output |
| 36 | nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True))) |
| 37 | edges.sort(key=lambda e: (e["type"], json.dumps(e["from"], sort_keys=True), |
| 38 | json.dumps(e["to"], sort_keys=True))) |
| 39 | |
| 40 | with output_path.open("w", encoding="utf-8") as f: |
| 41 | for node in nodes: |
| 42 | f.write(json.dumps(node, sort_keys=True) + "\n") |
| 43 | for edge in edges: |
| @@ -87,13 +92,11 @@ | |
| 87 | return {"nodes": node_count, "edges": edge_count} |
| 88 | |
| 89 | |
| 90 | def _export_nodes(store: GraphStore) -> list[dict]: |
| 91 | """Export all nodes with their labels and properties.""" |
| 92 | result = store.query( |
| 93 | "MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props" |
| 94 | ) |
| 95 | nodes = [] |
| 96 | for row in result.result_set or []: |
| 97 | label = row[0] |
| 98 | props = row[1] if isinstance(row[1], dict) else {} |
| 99 | nodes.append({"kind": "node", "label": label, "props": props}) |
| @@ -109,16 +112,18 @@ | |
| 109 | "labels(b)[0] AS to_label, b.name AS to_name, " |
| 110 | "coalesce(b.file_path, b.path, '') AS to_path" |
| 111 | ) |
| 112 | edges = [] |
| 113 | for row in result.result_set or []: |
| 114 | edges.append({ |
| 115 | "kind": "edge", |
| 116 | "type": row[0], |
| 117 | "from": {"label": row[1], "name": row[2], "path": row[3]}, |
| 118 | "to": {"label": row[4], "name": row[5], "path": row[6]}, |
| 119 | }) |
| 120 | return edges |
| 121 | |
| 122 | |
| 123 | def _import_node(store: GraphStore, record: dict) -> None: |
| 124 | """Create a node from an export record.""" |
| @@ -143,12 +148,12 @@ | |
| 143 | """Create an edge from an export record.""" |
| 144 | edge_type = record["type"] |
| 145 | from_info = record["from"] |
| 146 | to_info = record["to"] |
| 147 | |
| 148 | from_key = f"name: $from_name" |
| 149 | to_key = f"name: $to_name" |
| 150 | |
| 151 | params = { |
| 152 | "from_name": from_info["name"], |
| 153 | "to_name": to_info["name"], |
| 154 | } |
| 155 |
| --- navegador/graph/export.py | |
| +++ navegador/graph/export.py | |
| @@ -32,12 +32,17 @@ | |
| 32 | nodes = _export_nodes(store) |
| 33 | edges = _export_edges(store) |
| 34 | |
| 35 | # Sort for deterministic output |
| 36 | nodes.sort(key=lambda n: (n["label"], json.dumps(n["props"], sort_keys=True))) |
| 37 | edges.sort( |
| 38 | key=lambda e: ( |
| 39 | e["type"], |
| 40 | json.dumps(e["from"], sort_keys=True), |
| 41 | json.dumps(e["to"], sort_keys=True), |
| 42 | ) |
| 43 | ) |
| 44 | |
| 45 | with output_path.open("w", encoding="utf-8") as f: |
| 46 | for node in nodes: |
| 47 | f.write(json.dumps(node, sort_keys=True) + "\n") |
| 48 | for edge in edges: |
| @@ -87,13 +92,11 @@ | |
| 92 | return {"nodes": node_count, "edges": edge_count} |
| 93 | |
| 94 | |
| 95 | def _export_nodes(store: GraphStore) -> list[dict]: |
| 96 | """Export all nodes with their labels and properties.""" |
| 97 | result = store.query("MATCH (n) RETURN labels(n)[0] AS label, properties(n) AS props") |
| 98 | nodes = [] |
| 99 | for row in result.result_set or []: |
| 100 | label = row[0] |
| 101 | props = row[1] if isinstance(row[1], dict) else {} |
| 102 | nodes.append({"kind": "node", "label": label, "props": props}) |
| @@ -109,16 +112,18 @@ | |
| 112 | "labels(b)[0] AS to_label, b.name AS to_name, " |
| 113 | "coalesce(b.file_path, b.path, '') AS to_path" |
| 114 | ) |
| 115 | edges = [] |
| 116 | for row in result.result_set or []: |
| 117 | edges.append( |
| 118 | { |
| 119 | "kind": "edge", |
| 120 | "type": row[0], |
| 121 | "from": {"label": row[1], "name": row[2], "path": row[3]}, |
| 122 | "to": {"label": row[4], "name": row[5], "path": row[6]}, |
| 123 | } |
| 124 | ) |
| 125 | return edges |
| 126 | |
| 127 | |
| 128 | def _import_node(store: GraphStore, record: dict) -> None: |
| 129 | """Create a node from an export record.""" |
| @@ -143,12 +148,12 @@ | |
| 148 | """Create an edge from an export record.""" |
| 149 | edge_type = record["type"] |
| 150 | from_info = record["from"] |
| 151 | to_info = record["to"] |
| 152 | |
| 153 | from_key = "name: $from_name" |
| 154 | to_key = "name: $to_name" |
| 155 | |
| 156 | params = { |
| 157 | "from_name": from_info["name"], |
| 158 | "to_name": to_info["name"], |
| 159 | } |
| 160 |
+2
-6
| --- navegador/graph/migrations.py | ||
| +++ navegador/graph/migrations.py | ||
| @@ -62,13 +62,11 @@ | ||
| 62 | 62 | applied: list[int] = [] |
| 63 | 63 | |
| 64 | 64 | while current < CURRENT_SCHEMA_VERSION: |
| 65 | 65 | fn = _migrations.get(current) |
| 66 | 66 | if fn is None: |
| 67 | - raise RuntimeError( | |
| 68 | - f"No migration registered for version {current} -> {current + 1}" | |
| 69 | - ) | |
| 67 | + raise RuntimeError(f"No migration registered for version {current} -> {current + 1}") | |
| 70 | 68 | logger.info("Applying migration %d -> %d", current, current + 1) |
| 71 | 69 | fn(store) |
| 72 | 70 | current += 1 |
| 73 | 71 | set_schema_version(store, current) |
| 74 | 72 | applied.append(current) |
| @@ -93,9 +91,7 @@ | ||
| 93 | 91 | |
| 94 | 92 | @migration(1) |
| 95 | 93 | def _migrate_1_to_2(store: GraphStore) -> None: |
| 96 | 94 | """Add content_hash property to File nodes for incremental ingestion.""" |
| 97 | 95 | # Set content_hash to empty string on existing File nodes that lack it. |
| 98 | - store.query( | |
| 99 | - "MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''" | |
| 100 | - ) | |
| 96 | + store.query("MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''") | |
| 101 | 97 | logger.info("Added content_hash to File nodes") |
| 102 | 98 |
| --- navegador/graph/migrations.py | |
| +++ navegador/graph/migrations.py | |
| @@ -62,13 +62,11 @@ | |
| 62 | applied: list[int] = [] |
| 63 | |
| 64 | while current < CURRENT_SCHEMA_VERSION: |
| 65 | fn = _migrations.get(current) |
| 66 | if fn is None: |
| 67 | raise RuntimeError( |
| 68 | f"No migration registered for version {current} -> {current + 1}" |
| 69 | ) |
| 70 | logger.info("Applying migration %d -> %d", current, current + 1) |
| 71 | fn(store) |
| 72 | current += 1 |
| 73 | set_schema_version(store, current) |
| 74 | applied.append(current) |
| @@ -93,9 +91,7 @@ | |
| 93 | |
| 94 | @migration(1) |
| 95 | def _migrate_1_to_2(store: GraphStore) -> None: |
| 96 | """Add content_hash property to File nodes for incremental ingestion.""" |
| 97 | # Set content_hash to empty string on existing File nodes that lack it. |
| 98 | store.query( |
| 99 | "MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''" |
| 100 | ) |
| 101 | logger.info("Added content_hash to File nodes") |
| 102 |
| --- navegador/graph/migrations.py | |
| +++ navegador/graph/migrations.py | |
| @@ -62,13 +62,11 @@ | |
| 62 | applied: list[int] = [] |
| 63 | |
| 64 | while current < CURRENT_SCHEMA_VERSION: |
| 65 | fn = _migrations.get(current) |
| 66 | if fn is None: |
| 67 | raise RuntimeError(f"No migration registered for version {current} -> {current + 1}") |
| 68 | logger.info("Applying migration %d -> %d", current, current + 1) |
| 69 | fn(store) |
| 70 | current += 1 |
| 71 | set_schema_version(store, current) |
| 72 | applied.append(current) |
| @@ -93,9 +91,7 @@ | |
| 91 | |
| 92 | @migration(1) |
| 93 | def _migrate_1_to_2(store: GraphStore) -> None: |
| 94 | """Add content_hash property to File nodes for incremental ingestion.""" |
| 95 | # Set content_hash to empty string on existing File nodes that lack it. |
| 96 | store.query("MATCH (f:File) WHERE f.content_hash IS NULL SET f.content_hash = ''") |
| 97 | logger.info("Added content_hash to File nodes") |
| 98 |
+6
-8
| --- navegador/ingestion/c.py | ||
| +++ navegador/ingestion/c.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_c as tsc # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tsc.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-c: pip install tree-sitter-c" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-c: pip install tree-sitter-c") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -139,12 +137,14 @@ | ||
| 139 | 137 | name_node = next((c for c in node.children if c.type == "type_identifier"), None) |
| 140 | 138 | if not name_node: |
| 141 | 139 | return |
| 142 | 140 | name = _node_text(name_node, source) |
| 143 | 141 | |
| 144 | - kind = "struct" if node.type == "struct_specifier" else ( | |
| 145 | - "union" if node.type == "union_specifier" else "enum" | |
| 142 | + kind = ( | |
| 143 | + "struct" | |
| 144 | + if node.type == "struct_specifier" | |
| 145 | + else ("union" if node.type == "union_specifier" else "enum") | |
| 146 | 146 | ) |
| 147 | 147 | store.create_node( |
| 148 | 148 | NodeLabel.Class, |
| 149 | 149 | { |
| 150 | 150 | "name": name, |
| @@ -205,13 +205,11 @@ | ||
| 205 | 205 | ) -> None: |
| 206 | 206 | def walk(node): |
| 207 | 207 | if node.type == "call_expression": |
| 208 | 208 | func = node.child_by_field_name("function") |
| 209 | 209 | if not func: |
| 210 | - func = next( | |
| 211 | - (c for c in node.children if c.type == "identifier"), None | |
| 212 | - ) | |
| 210 | + func = next((c for c in node.children if c.type == "identifier"), None) | |
| 213 | 211 | if func: |
| 214 | 212 | callee = _node_text(func, source) |
| 215 | 213 | store.create_edge( |
| 216 | 214 | NodeLabel.Function, |
| 217 | 215 | {"name": fn_name, "file_path": file_path}, |
| 218 | 216 |
| --- navegador/ingestion/c.py | |
| +++ navegador/ingestion/c.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_c as tsc # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsc.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-c: pip install tree-sitter-c" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -139,12 +137,14 @@ | |
| 139 | name_node = next((c for c in node.children if c.type == "type_identifier"), None) |
| 140 | if not name_node: |
| 141 | return |
| 142 | name = _node_text(name_node, source) |
| 143 | |
| 144 | kind = "struct" if node.type == "struct_specifier" else ( |
| 145 | "union" if node.type == "union_specifier" else "enum" |
| 146 | ) |
| 147 | store.create_node( |
| 148 | NodeLabel.Class, |
| 149 | { |
| 150 | "name": name, |
| @@ -205,13 +205,11 @@ | |
| 205 | ) -> None: |
| 206 | def walk(node): |
| 207 | if node.type == "call_expression": |
| 208 | func = node.child_by_field_name("function") |
| 209 | if not func: |
| 210 | func = next( |
| 211 | (c for c in node.children if c.type == "identifier"), None |
| 212 | ) |
| 213 | if func: |
| 214 | callee = _node_text(func, source) |
| 215 | store.create_edge( |
| 216 | NodeLabel.Function, |
| 217 | {"name": fn_name, "file_path": file_path}, |
| 218 |
| --- navegador/ingestion/c.py | |
| +++ navegador/ingestion/c.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_c as tsc # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsc.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-c: pip install tree-sitter-c") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -139,12 +137,14 @@ | |
| 137 | name_node = next((c for c in node.children if c.type == "type_identifier"), None) |
| 138 | if not name_node: |
| 139 | return |
| 140 | name = _node_text(name_node, source) |
| 141 | |
| 142 | kind = ( |
| 143 | "struct" |
| 144 | if node.type == "struct_specifier" |
| 145 | else ("union" if node.type == "union_specifier" else "enum") |
| 146 | ) |
| 147 | store.create_node( |
| 148 | NodeLabel.Class, |
| 149 | { |
| 150 | "name": name, |
| @@ -205,13 +205,11 @@ | |
| 205 | ) -> None: |
| 206 | def walk(node): |
| 207 | if node.type == "call_expression": |
| 208 | func = node.child_by_field_name("function") |
| 209 | if not func: |
| 210 | func = next((c for c in node.children if c.type == "identifier"), None) |
| 211 | if func: |
| 212 | callee = _node_text(func, source) |
| 213 | store.create_edge( |
| 214 | NodeLabel.Function, |
| 215 | {"name": fn_name, "file_path": file_path}, |
| 216 |
+9
-13
| --- navegador/ingestion/cpp.py | ||
| +++ navegador/ingestion/cpp.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_cpp as tscpp # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tscpp.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-cpp: pip install tree-sitter-cpp" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-cpp: pip install tree-sitter-cpp") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -78,13 +76,11 @@ | ||
| 78 | 76 | return |
| 79 | 77 | if node.type == "namespace_definition": |
| 80 | 78 | # Recurse into namespace body |
| 81 | 79 | body = node.child_by_field_name("body") |
| 82 | 80 | if not body: |
| 83 | - body = next( | |
| 84 | - (c for c in node.children if c.type == "declaration_list"), None | |
| 85 | - ) | |
| 81 | + body = next((c for c in node.children if c.type == "declaration_list"), None) | |
| 86 | 82 | if body: |
| 87 | 83 | for child in body.children: |
| 88 | 84 | self._walk(child, source, file_path, store, stats, class_name) |
| 89 | 85 | return |
| 90 | 86 | for child in node.children: |
| @@ -95,13 +91,11 @@ | ||
| 95 | 91 | def _handle_class( |
| 96 | 92 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 97 | 93 | ) -> None: |
| 98 | 94 | name_node = node.child_by_field_name("name") |
| 99 | 95 | if not name_node: |
| 100 | - name_node = next( | |
| 101 | - (c for c in node.children if c.type == "type_identifier"), None | |
| 102 | - ) | |
| 96 | + name_node = next((c for c in node.children if c.type == "type_identifier"), None) | |
| 103 | 97 | if not name_node: |
| 104 | 98 | return |
| 105 | 99 | name = _node_text(name_node, source) |
| 106 | 100 | |
| 107 | 101 | store.create_node( |
| @@ -125,13 +119,11 @@ | ||
| 125 | 119 | stats["edges"] += 1 |
| 126 | 120 | |
| 127 | 121 | # Base classes |
| 128 | 122 | base_clause = node.child_by_field_name("base_clause") |
| 129 | 123 | if not base_clause: |
| 130 | - base_clause = next( | |
| 131 | - (c for c in node.children if c.type == "base_class_clause"), None | |
| 132 | - ) | |
| 124 | + base_clause = next((c for c in node.children if c.type == "base_class_clause"), None) | |
| 133 | 125 | if base_clause: |
| 134 | 126 | for child in base_clause.children: |
| 135 | 127 | if child.type == "type_identifier": |
| 136 | 128 | parent_name = _node_text(child, source) |
| 137 | 129 | store.create_edge( |
| @@ -262,11 +254,15 @@ | ||
| 262 | 254 | def walk(node): |
| 263 | 255 | if node.type == "call_expression": |
| 264 | 256 | func = node.child_by_field_name("function") |
| 265 | 257 | if not func: |
| 266 | 258 | func = next( |
| 267 | - (c for c in node.children if c.type in ("identifier", "qualified_identifier", "field_expression")), | |
| 259 | + ( | |
| 260 | + c | |
| 261 | + for c in node.children | |
| 262 | + if c.type in ("identifier", "qualified_identifier", "field_expression") | |
| 263 | + ), | |
| 268 | 264 | None, |
| 269 | 265 | ) |
| 270 | 266 | if func: |
| 271 | 267 | callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1] |
| 272 | 268 | store.create_edge( |
| 273 | 269 |
| --- navegador/ingestion/cpp.py | |
| +++ navegador/ingestion/cpp.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_cpp as tscpp # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tscpp.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-cpp: pip install tree-sitter-cpp" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -78,13 +76,11 @@ | |
| 78 | return |
| 79 | if node.type == "namespace_definition": |
| 80 | # Recurse into namespace body |
| 81 | body = node.child_by_field_name("body") |
| 82 | if not body: |
| 83 | body = next( |
| 84 | (c for c in node.children if c.type == "declaration_list"), None |
| 85 | ) |
| 86 | if body: |
| 87 | for child in body.children: |
| 88 | self._walk(child, source, file_path, store, stats, class_name) |
| 89 | return |
| 90 | for child in node.children: |
| @@ -95,13 +91,11 @@ | |
| 95 | def _handle_class( |
| 96 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 97 | ) -> None: |
| 98 | name_node = node.child_by_field_name("name") |
| 99 | if not name_node: |
| 100 | name_node = next( |
| 101 | (c for c in node.children if c.type == "type_identifier"), None |
| 102 | ) |
| 103 | if not name_node: |
| 104 | return |
| 105 | name = _node_text(name_node, source) |
| 106 | |
| 107 | store.create_node( |
| @@ -125,13 +119,11 @@ | |
| 125 | stats["edges"] += 1 |
| 126 | |
| 127 | # Base classes |
| 128 | base_clause = node.child_by_field_name("base_clause") |
| 129 | if not base_clause: |
| 130 | base_clause = next( |
| 131 | (c for c in node.children if c.type == "base_class_clause"), None |
| 132 | ) |
| 133 | if base_clause: |
| 134 | for child in base_clause.children: |
| 135 | if child.type == "type_identifier": |
| 136 | parent_name = _node_text(child, source) |
| 137 | store.create_edge( |
| @@ -262,11 +254,15 @@ | |
| 262 | def walk(node): |
| 263 | if node.type == "call_expression": |
| 264 | func = node.child_by_field_name("function") |
| 265 | if not func: |
| 266 | func = next( |
| 267 | (c for c in node.children if c.type in ("identifier", "qualified_identifier", "field_expression")), |
| 268 | None, |
| 269 | ) |
| 270 | if func: |
| 271 | callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1] |
| 272 | store.create_edge( |
| 273 |
| --- navegador/ingestion/cpp.py | |
| +++ navegador/ingestion/cpp.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_cpp as tscpp # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tscpp.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-cpp: pip install tree-sitter-cpp") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -78,13 +76,11 @@ | |
| 76 | return |
| 77 | if node.type == "namespace_definition": |
| 78 | # Recurse into namespace body |
| 79 | body = node.child_by_field_name("body") |
| 80 | if not body: |
| 81 | body = next((c for c in node.children if c.type == "declaration_list"), None) |
| 82 | if body: |
| 83 | for child in body.children: |
| 84 | self._walk(child, source, file_path, store, stats, class_name) |
| 85 | return |
| 86 | for child in node.children: |
| @@ -95,13 +91,11 @@ | |
| 91 | def _handle_class( |
| 92 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 93 | ) -> None: |
| 94 | name_node = node.child_by_field_name("name") |
| 95 | if not name_node: |
| 96 | name_node = next((c for c in node.children if c.type == "type_identifier"), None) |
| 97 | if not name_node: |
| 98 | return |
| 99 | name = _node_text(name_node, source) |
| 100 | |
| 101 | store.create_node( |
| @@ -125,13 +119,11 @@ | |
| 119 | stats["edges"] += 1 |
| 120 | |
| 121 | # Base classes |
| 122 | base_clause = node.child_by_field_name("base_clause") |
| 123 | if not base_clause: |
| 124 | base_clause = next((c for c in node.children if c.type == "base_class_clause"), None) |
| 125 | if base_clause: |
| 126 | for child in base_clause.children: |
| 127 | if child.type == "type_identifier": |
| 128 | parent_name = _node_text(child, source) |
| 129 | store.create_edge( |
| @@ -262,11 +254,15 @@ | |
| 254 | def walk(node): |
| 255 | if node.type == "call_expression": |
| 256 | func = node.child_by_field_name("function") |
| 257 | if not func: |
| 258 | func = next( |
| 259 | ( |
| 260 | c |
| 261 | for c in node.children |
| 262 | if c.type in ("identifier", "qualified_identifier", "field_expression") |
| 263 | ), |
| 264 | None, |
| 265 | ) |
| 266 | if func: |
| 267 | callee = _node_text(func, source).split("::")[-1].split(".")[-1].split("->")[-1] |
| 268 | store.create_edge( |
| 269 |
+8
-13
| --- navegador/ingestion/csharp.py | ||
| +++ navegador/ingestion/csharp.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_c_sharp as tscsharp # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tscsharp.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -132,13 +130,11 @@ | ||
| 132 | 130 | stats["edges"] += 1 |
| 133 | 131 | |
| 134 | 132 | # Walk class body for methods |
| 135 | 133 | body = node.child_by_field_name("body") |
| 136 | 134 | if not body: |
| 137 | - body = next( | |
| 138 | - (c for c in node.children if c.type == "declaration_list"), None | |
| 139 | - ) | |
| 135 | + body = next((c for c in node.children if c.type == "declaration_list"), None) | |
| 140 | 136 | if body: |
| 141 | 137 | for child in body.children: |
| 142 | 138 | if child.type in ("method_declaration", "constructor_declaration"): |
| 143 | 139 | self._handle_method(child, source, file_path, store, stats, class_name=name) |
| 144 | 140 | |
| @@ -190,16 +186,11 @@ | ||
| 190 | 186 | def _handle_using( |
| 191 | 187 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 192 | 188 | ) -> None: |
| 193 | 189 | raw = _node_text(node, source).strip() |
| 194 | 190 | # "using System.Collections.Generic;" or "using static ..." |
| 195 | - module = ( | |
| 196 | - raw.removeprefix("using") | |
| 197 | - .removeprefix(" static") | |
| 198 | - .removesuffix(";") | |
| 199 | - .strip() | |
| 200 | - ) | |
| 191 | + module = raw.removeprefix("using").removeprefix(" static").removesuffix(";").strip() | |
| 201 | 192 | if not module: |
| 202 | 193 | return |
| 203 | 194 | store.create_node( |
| 204 | 195 | NodeLabel.Import, |
| 205 | 196 | { |
| @@ -231,11 +222,15 @@ | ||
| 231 | 222 | def walk(node): |
| 232 | 223 | if node.type == "invocation_expression": |
| 233 | 224 | func = node.child_by_field_name("function") |
| 234 | 225 | if not func: |
| 235 | 226 | func = next( |
| 236 | - (c for c in node.children if c.type in ("identifier", "member_access_expression")), | |
| 227 | + ( | |
| 228 | + c | |
| 229 | + for c in node.children | |
| 230 | + if c.type in ("identifier", "member_access_expression") | |
| 231 | + ), | |
| 237 | 232 | None, |
| 238 | 233 | ) |
| 239 | 234 | if func: |
| 240 | 235 | callee = _node_text(func, source).split(".")[-1] |
| 241 | 236 | store.create_edge( |
| 242 | 237 |
| --- navegador/ingestion/csharp.py | |
| +++ navegador/ingestion/csharp.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_c_sharp as tscsharp # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tscsharp.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -132,13 +130,11 @@ | |
| 132 | stats["edges"] += 1 |
| 133 | |
| 134 | # Walk class body for methods |
| 135 | body = node.child_by_field_name("body") |
| 136 | if not body: |
| 137 | body = next( |
| 138 | (c for c in node.children if c.type == "declaration_list"), None |
| 139 | ) |
| 140 | if body: |
| 141 | for child in body.children: |
| 142 | if child.type in ("method_declaration", "constructor_declaration"): |
| 143 | self._handle_method(child, source, file_path, store, stats, class_name=name) |
| 144 | |
| @@ -190,16 +186,11 @@ | |
| 190 | def _handle_using( |
| 191 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 192 | ) -> None: |
| 193 | raw = _node_text(node, source).strip() |
| 194 | # "using System.Collections.Generic;" or "using static ..." |
| 195 | module = ( |
| 196 | raw.removeprefix("using") |
| 197 | .removeprefix(" static") |
| 198 | .removesuffix(";") |
| 199 | .strip() |
| 200 | ) |
| 201 | if not module: |
| 202 | return |
| 203 | store.create_node( |
| 204 | NodeLabel.Import, |
| 205 | { |
| @@ -231,11 +222,15 @@ | |
| 231 | def walk(node): |
| 232 | if node.type == "invocation_expression": |
| 233 | func = node.child_by_field_name("function") |
| 234 | if not func: |
| 235 | func = next( |
| 236 | (c for c in node.children if c.type in ("identifier", "member_access_expression")), |
| 237 | None, |
| 238 | ) |
| 239 | if func: |
| 240 | callee = _node_text(func, source).split(".")[-1] |
| 241 | store.create_edge( |
| 242 |
| --- navegador/ingestion/csharp.py | |
| +++ navegador/ingestion/csharp.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_c_sharp as tscsharp # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tscsharp.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-c-sharp: pip install tree-sitter-c-sharp") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -132,13 +130,11 @@ | |
| 130 | stats["edges"] += 1 |
| 131 | |
| 132 | # Walk class body for methods |
| 133 | body = node.child_by_field_name("body") |
| 134 | if not body: |
| 135 | body = next((c for c in node.children if c.type == "declaration_list"), None) |
| 136 | if body: |
| 137 | for child in body.children: |
| 138 | if child.type in ("method_declaration", "constructor_declaration"): |
| 139 | self._handle_method(child, source, file_path, store, stats, class_name=name) |
| 140 | |
| @@ -190,16 +186,11 @@ | |
| 186 | def _handle_using( |
| 187 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 188 | ) -> None: |
| 189 | raw = _node_text(node, source).strip() |
| 190 | # "using System.Collections.Generic;" or "using static ..." |
| 191 | module = raw.removeprefix("using").removeprefix(" static").removesuffix(";").strip() |
| 192 | if not module: |
| 193 | return |
| 194 | store.create_node( |
| 195 | NodeLabel.Import, |
| 196 | { |
| @@ -231,11 +222,15 @@ | |
| 222 | def walk(node): |
| 223 | if node.type == "invocation_expression": |
| 224 | func = node.child_by_field_name("function") |
| 225 | if not func: |
| 226 | func = next( |
| 227 | ( |
| 228 | c |
| 229 | for c in node.children |
| 230 | if c.type in ("identifier", "member_access_expression") |
| 231 | ), |
| 232 | None, |
| 233 | ) |
| 234 | if func: |
| 235 | callee = _node_text(func, source).split(".")[-1] |
| 236 | store.create_edge( |
| 237 |
+10
-16
| --- navegador/ingestion/kotlin.py | ||
| +++ navegador/ingestion/kotlin.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_kotlin as tskotlin # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tskotlin.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-kotlin: pip install tree-sitter-kotlin" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-kotlin: pip install tree-sitter-kotlin") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -85,13 +83,11 @@ | ||
| 85 | 83 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 86 | 84 | ) -> None: |
| 87 | 85 | name_node = node.child_by_field_name("name") |
| 88 | 86 | if not name_node: |
| 89 | 87 | # fallback: first simple_identifier child |
| 90 | - name_node = next( | |
| 91 | - (c for c in node.children if c.type == "simple_identifier"), None | |
| 92 | - ) | |
| 88 | + name_node = next((c for c in node.children if c.type == "simple_identifier"), None) | |
| 93 | 89 | if not name_node: |
| 94 | 90 | return |
| 95 | 91 | name = _node_text(name_node, source) |
| 96 | 92 | |
| 97 | 93 | store.create_node( |
| @@ -115,13 +111,11 @@ | ||
| 115 | 111 | stats["edges"] += 1 |
| 116 | 112 | |
| 117 | 113 | # Walk class body for member functions |
| 118 | 114 | body = node.child_by_field_name("body") |
| 119 | 115 | if not body: |
| 120 | - body = next( | |
| 121 | - (c for c in node.children if c.type in ("class_body", "object_body")), None | |
| 122 | - ) | |
| 116 | + body = next((c for c in node.children if c.type in ("class_body", "object_body")), None) | |
| 123 | 117 | if body: |
| 124 | 118 | for child in body.children: |
| 125 | 119 | if child.type == "function_declaration": |
| 126 | 120 | self._handle_function(child, source, file_path, store, stats, class_name=name) |
| 127 | 121 | |
| @@ -134,13 +128,11 @@ | ||
| 134 | 128 | stats: dict, |
| 135 | 129 | class_name: str | None, |
| 136 | 130 | ) -> None: |
| 137 | 131 | name_node = node.child_by_field_name("name") |
| 138 | 132 | if not name_node: |
| 139 | - name_node = next( | |
| 140 | - (c for c in node.children if c.type == "simple_identifier"), None | |
| 141 | - ) | |
| 133 | + name_node = next((c for c in node.children if c.type == "simple_identifier"), None) | |
| 142 | 134 | if not name_node: |
| 143 | 135 | return |
| 144 | 136 | name = _node_text(name_node, source) |
| 145 | 137 | |
| 146 | 138 | label = NodeLabel.Method if class_name else NodeLabel.Function |
| @@ -211,11 +203,15 @@ | ||
| 211 | 203 | def walk(node): |
| 212 | 204 | if node.type == "call_expression": |
| 213 | 205 | func = node.child_by_field_name("calleeExpression") |
| 214 | 206 | if not func: |
| 215 | 207 | func = next( |
| 216 | - (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")), | |
| 208 | + ( | |
| 209 | + c | |
| 210 | + for c in node.children | |
| 211 | + if c.type in ("simple_identifier", "navigation_expression") | |
| 212 | + ), | |
| 217 | 213 | None, |
| 218 | 214 | ) |
| 219 | 215 | if func: |
| 220 | 216 | callee = _node_text(func, source).split(".")[-1] |
| 221 | 217 | store.create_edge( |
| @@ -229,10 +225,8 @@ | ||
| 229 | 225 | for child in node.children: |
| 230 | 226 | walk(child) |
| 231 | 227 | |
| 232 | 228 | body = fn_node.child_by_field_name("body") |
| 233 | 229 | if not body: |
| 234 | - body = next( | |
| 235 | - (c for c in fn_node.children if c.type in ("function_body", "block")), None | |
| 236 | - ) | |
| 230 | + body = next((c for c in fn_node.children if c.type in ("function_body", "block")), None) | |
| 237 | 231 | if body: |
| 238 | 232 | walk(body) |
| 239 | 233 |
| --- navegador/ingestion/kotlin.py | |
| +++ navegador/ingestion/kotlin.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_kotlin as tskotlin # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tskotlin.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-kotlin: pip install tree-sitter-kotlin" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -85,13 +83,11 @@ | |
| 85 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 86 | ) -> None: |
| 87 | name_node = node.child_by_field_name("name") |
| 88 | if not name_node: |
| 89 | # fallback: first simple_identifier child |
| 90 | name_node = next( |
| 91 | (c for c in node.children if c.type == "simple_identifier"), None |
| 92 | ) |
| 93 | if not name_node: |
| 94 | return |
| 95 | name = _node_text(name_node, source) |
| 96 | |
| 97 | store.create_node( |
| @@ -115,13 +111,11 @@ | |
| 115 | stats["edges"] += 1 |
| 116 | |
| 117 | # Walk class body for member functions |
| 118 | body = node.child_by_field_name("body") |
| 119 | if not body: |
| 120 | body = next( |
| 121 | (c for c in node.children if c.type in ("class_body", "object_body")), None |
| 122 | ) |
| 123 | if body: |
| 124 | for child in body.children: |
| 125 | if child.type == "function_declaration": |
| 126 | self._handle_function(child, source, file_path, store, stats, class_name=name) |
| 127 | |
| @@ -134,13 +128,11 @@ | |
| 134 | stats: dict, |
| 135 | class_name: str | None, |
| 136 | ) -> None: |
| 137 | name_node = node.child_by_field_name("name") |
| 138 | if not name_node: |
| 139 | name_node = next( |
| 140 | (c for c in node.children if c.type == "simple_identifier"), None |
| 141 | ) |
| 142 | if not name_node: |
| 143 | return |
| 144 | name = _node_text(name_node, source) |
| 145 | |
| 146 | label = NodeLabel.Method if class_name else NodeLabel.Function |
| @@ -211,11 +203,15 @@ | |
| 211 | def walk(node): |
| 212 | if node.type == "call_expression": |
| 213 | func = node.child_by_field_name("calleeExpression") |
| 214 | if not func: |
| 215 | func = next( |
| 216 | (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")), |
| 217 | None, |
| 218 | ) |
| 219 | if func: |
| 220 | callee = _node_text(func, source).split(".")[-1] |
| 221 | store.create_edge( |
| @@ -229,10 +225,8 @@ | |
| 229 | for child in node.children: |
| 230 | walk(child) |
| 231 | |
| 232 | body = fn_node.child_by_field_name("body") |
| 233 | if not body: |
| 234 | body = next( |
| 235 | (c for c in fn_node.children if c.type in ("function_body", "block")), None |
| 236 | ) |
| 237 | if body: |
| 238 | walk(body) |
| 239 |
| --- navegador/ingestion/kotlin.py | |
| +++ navegador/ingestion/kotlin.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_kotlin as tskotlin # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tskotlin.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-kotlin: pip install tree-sitter-kotlin") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -85,13 +83,11 @@ | |
| 83 | self, node, source: bytes, file_path: str, store: GraphStore, stats: dict |
| 84 | ) -> None: |
| 85 | name_node = node.child_by_field_name("name") |
| 86 | if not name_node: |
| 87 | # fallback: first simple_identifier child |
| 88 | name_node = next((c for c in node.children if c.type == "simple_identifier"), None) |
| 89 | if not name_node: |
| 90 | return |
| 91 | name = _node_text(name_node, source) |
| 92 | |
| 93 | store.create_node( |
| @@ -115,13 +111,11 @@ | |
| 111 | stats["edges"] += 1 |
| 112 | |
| 113 | # Walk class body for member functions |
| 114 | body = node.child_by_field_name("body") |
| 115 | if not body: |
| 116 | body = next((c for c in node.children if c.type in ("class_body", "object_body")), None) |
| 117 | if body: |
| 118 | for child in body.children: |
| 119 | if child.type == "function_declaration": |
| 120 | self._handle_function(child, source, file_path, store, stats, class_name=name) |
| 121 | |
| @@ -134,13 +128,11 @@ | |
| 128 | stats: dict, |
| 129 | class_name: str | None, |
| 130 | ) -> None: |
| 131 | name_node = node.child_by_field_name("name") |
| 132 | if not name_node: |
| 133 | name_node = next((c for c in node.children if c.type == "simple_identifier"), None) |
| 134 | if not name_node: |
| 135 | return |
| 136 | name = _node_text(name_node, source) |
| 137 | |
| 138 | label = NodeLabel.Method if class_name else NodeLabel.Function |
| @@ -211,11 +203,15 @@ | |
| 203 | def walk(node): |
| 204 | if node.type == "call_expression": |
| 205 | func = node.child_by_field_name("calleeExpression") |
| 206 | if not func: |
| 207 | func = next( |
| 208 | ( |
| 209 | c |
| 210 | for c in node.children |
| 211 | if c.type in ("simple_identifier", "navigation_expression") |
| 212 | ), |
| 213 | None, |
| 214 | ) |
| 215 | if func: |
| 216 | callee = _node_text(func, source).split(".")[-1] |
| 217 | store.create_edge( |
| @@ -229,10 +225,8 @@ | |
| 225 | for child in node.children: |
| 226 | walk(child) |
| 227 | |
| 228 | body = fn_node.child_by_field_name("body") |
| 229 | if not body: |
| 230 | body = next((c for c in fn_node.children if c.type in ("function_body", "block")), None) |
| 231 | if body: |
| 232 | walk(body) |
| 233 |
| --- navegador/ingestion/optimization.py | ||
| +++ navegador/ingestion/optimization.py | ||
| @@ -296,11 +296,13 @@ | ||
| 296 | 296 | nodes: list[NodeDescriptor] = [] |
| 297 | 297 | for row in rows: |
| 298 | 298 | label, name, line_start = row[0], row[1], row[2] |
| 299 | 299 | if name is None or line_start is None: |
| 300 | 300 | continue |
| 301 | - nodes.append(NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start))) | |
| 301 | + nodes.append( | |
| 302 | + NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start)) | |
| 303 | + ) | |
| 302 | 304 | return nodes |
| 303 | 305 | |
| 304 | 306 | |
| 305 | 307 | # ── #45 — Parallel ingestion with worker pool ───────────────────────────────── |
| 306 | 308 | |
| @@ -368,13 +370,11 @@ | ||
| 368 | 370 | }, |
| 369 | 371 | ) |
| 370 | 372 | |
| 371 | 373 | # Collect all candidate files up-front (fast, single-threaded). |
| 372 | 374 | candidate_files = [ |
| 373 | - f | |
| 374 | - for f in self._ingester._iter_source_files(repo_path) | |
| 375 | - if LANGUAGE_MAP.get(f.suffix) | |
| 375 | + f for f in self._ingester._iter_source_files(repo_path) if LANGUAGE_MAP.get(f.suffix) | |
| 376 | 376 | ] |
| 377 | 377 | |
| 378 | 378 | aggregated: dict[str, int] = { |
| 379 | 379 | "files": 0, |
| 380 | 380 | "functions": 0, |
| @@ -396,13 +396,11 @@ | ||
| 396 | 396 | return |
| 397 | 397 | |
| 398 | 398 | if incremental: |
| 399 | 399 | self._ingester._clear_file_subgraph(rel_path) |
| 400 | 400 | |
| 401 | - parse_path, effective_root = self._ingester._maybe_redact_to_tmp( | |
| 402 | - source_file, repo_path | |
| 403 | - ) | |
| 401 | + parse_path, effective_root = self._ingester._maybe_redact_to_tmp(source_file, repo_path) | |
| 404 | 402 | try: |
| 405 | 403 | parser = self._ingester._get_parser(language) |
| 406 | 404 | file_stats = parser.parse_file(parse_path, effective_root, self._store) |
| 407 | 405 | self._ingester._store_file_hash(rel_path, content_hash) |
| 408 | 406 | with lock: |
| 409 | 407 |
| --- navegador/ingestion/optimization.py | |
| +++ navegador/ingestion/optimization.py | |
| @@ -296,11 +296,13 @@ | |
| 296 | nodes: list[NodeDescriptor] = [] |
| 297 | for row in rows: |
| 298 | label, name, line_start = row[0], row[1], row[2] |
| 299 | if name is None or line_start is None: |
| 300 | continue |
| 301 | nodes.append(NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start))) |
| 302 | return nodes |
| 303 | |
| 304 | |
| 305 | # ── #45 — Parallel ingestion with worker pool ───────────────────────────────── |
| 306 | |
| @@ -368,13 +370,11 @@ | |
| 368 | }, |
| 369 | ) |
| 370 | |
| 371 | # Collect all candidate files up-front (fast, single-threaded). |
| 372 | candidate_files = [ |
| 373 | f |
| 374 | for f in self._ingester._iter_source_files(repo_path) |
| 375 | if LANGUAGE_MAP.get(f.suffix) |
| 376 | ] |
| 377 | |
| 378 | aggregated: dict[str, int] = { |
| 379 | "files": 0, |
| 380 | "functions": 0, |
| @@ -396,13 +396,11 @@ | |
| 396 | return |
| 397 | |
| 398 | if incremental: |
| 399 | self._ingester._clear_file_subgraph(rel_path) |
| 400 | |
| 401 | parse_path, effective_root = self._ingester._maybe_redact_to_tmp( |
| 402 | source_file, repo_path |
| 403 | ) |
| 404 | try: |
| 405 | parser = self._ingester._get_parser(language) |
| 406 | file_stats = parser.parse_file(parse_path, effective_root, self._store) |
| 407 | self._ingester._store_file_hash(rel_path, content_hash) |
| 408 | with lock: |
| 409 |
| --- navegador/ingestion/optimization.py | |
| +++ navegador/ingestion/optimization.py | |
| @@ -296,11 +296,13 @@ | |
| 296 | nodes: list[NodeDescriptor] = [] |
| 297 | for row in rows: |
| 298 | label, name, line_start = row[0], row[1], row[2] |
| 299 | if name is None or line_start is None: |
| 300 | continue |
| 301 | nodes.append( |
| 302 | NodeDescriptor(label=str(label), name=str(name), line_start=int(line_start)) |
| 303 | ) |
| 304 | return nodes |
| 305 | |
| 306 | |
| 307 | # ── #45 — Parallel ingestion with worker pool ───────────────────────────────── |
| 308 | |
| @@ -368,13 +370,11 @@ | |
| 370 | }, |
| 371 | ) |
| 372 | |
| 373 | # Collect all candidate files up-front (fast, single-threaded). |
| 374 | candidate_files = [ |
| 375 | f for f in self._ingester._iter_source_files(repo_path) if LANGUAGE_MAP.get(f.suffix) |
| 376 | ] |
| 377 | |
| 378 | aggregated: dict[str, int] = { |
| 379 | "files": 0, |
| 380 | "functions": 0, |
| @@ -396,13 +396,11 @@ | |
| 396 | return |
| 397 | |
| 398 | if incremental: |
| 399 | self._ingester._clear_file_subgraph(rel_path) |
| 400 | |
| 401 | parse_path, effective_root = self._ingester._maybe_redact_to_tmp(source_file, repo_path) |
| 402 | try: |
| 403 | parser = self._ingester._get_parser(language) |
| 404 | file_stats = parser.parse_file(parse_path, effective_root, self._store) |
| 405 | self._ingester._store_file_hash(rel_path, content_hash) |
| 406 | with lock: |
| 407 |
| --- navegador/ingestion/parser.py | ||
| +++ navegador/ingestion/parser.py | ||
| @@ -151,10 +151,11 @@ | ||
| 151 | 151 | logger.exception("Failed to parse %s", source_file) |
| 152 | 152 | finally: |
| 153 | 153 | # Remove the temporary redacted directory if one was created |
| 154 | 154 | if effective_root is not repo_path: |
| 155 | 155 | import shutil |
| 156 | + | |
| 156 | 157 | shutil.rmtree(effective_root, ignore_errors=True) |
| 157 | 158 | |
| 158 | 159 | logger.info( |
| 159 | 160 | "Ingested %s: %d files, %d functions, %d classes, %d skipped", |
| 160 | 161 | repo_path.name, |
| 161 | 162 |
| --- navegador/ingestion/parser.py | |
| +++ navegador/ingestion/parser.py | |
| @@ -151,10 +151,11 @@ | |
| 151 | logger.exception("Failed to parse %s", source_file) |
| 152 | finally: |
| 153 | # Remove the temporary redacted directory if one was created |
| 154 | if effective_root is not repo_path: |
| 155 | import shutil |
| 156 | shutil.rmtree(effective_root, ignore_errors=True) |
| 157 | |
| 158 | logger.info( |
| 159 | "Ingested %s: %d files, %d functions, %d classes, %d skipped", |
| 160 | repo_path.name, |
| 161 |
| --- navegador/ingestion/parser.py | |
| +++ navegador/ingestion/parser.py | |
| @@ -151,10 +151,11 @@ | |
| 151 | logger.exception("Failed to parse %s", source_file) |
| 152 | finally: |
| 153 | # Remove the temporary redacted directory if one was created |
| 154 | if effective_root is not repo_path: |
| 155 | import shutil |
| 156 | |
| 157 | shutil.rmtree(effective_root, ignore_errors=True) |
| 158 | |
| 159 | logger.info( |
| 160 | "Ingested %s: %d files, %d functions, %d classes, %d skipped", |
| 161 | repo_path.name, |
| 162 |
+1
-3
| --- navegador/ingestion/php.py | ||
| +++ navegador/ingestion/php.py | ||
| @@ -22,13 +22,11 @@ | ||
| 22 | 22 | lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None) |
| 23 | 23 | if lang_fn is None: |
| 24 | 24 | raise ImportError("tree_sitter_php has no language() or language_php() callable") |
| 25 | 25 | return Language(lang_fn()) |
| 26 | 26 | except ImportError as e: |
| 27 | - raise ImportError( | |
| 28 | - "Install tree-sitter-php: pip install tree-sitter-php" | |
| 29 | - ) from e | |
| 27 | + raise ImportError("Install tree-sitter-php: pip install tree-sitter-php") from e | |
| 30 | 28 | |
| 31 | 29 | |
| 32 | 30 | def _node_text(node, source: bytes) -> str: |
| 33 | 31 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 34 | 32 | |
| 35 | 33 |
| --- navegador/ingestion/php.py | |
| +++ navegador/ingestion/php.py | |
| @@ -22,13 +22,11 @@ | |
| 22 | lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None) |
| 23 | if lang_fn is None: |
| 24 | raise ImportError("tree_sitter_php has no language() or language_php() callable") |
| 25 | return Language(lang_fn()) |
| 26 | except ImportError as e: |
| 27 | raise ImportError( |
| 28 | "Install tree-sitter-php: pip install tree-sitter-php" |
| 29 | ) from e |
| 30 | |
| 31 | |
| 32 | def _node_text(node, source: bytes) -> str: |
| 33 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 34 | |
| 35 |
| --- navegador/ingestion/php.py | |
| +++ navegador/ingestion/php.py | |
| @@ -22,13 +22,11 @@ | |
| 22 | lang_fn = getattr(tsphp, "language_php", None) or getattr(tsphp, "language", None) |
| 23 | if lang_fn is None: |
| 24 | raise ImportError("tree_sitter_php has no language() or language_php() callable") |
| 25 | return Language(lang_fn()) |
| 26 | except ImportError as e: |
| 27 | raise ImportError("Install tree-sitter-php: pip install tree-sitter-php") from e |
| 28 | |
| 29 | |
| 30 | def _node_text(node, source: bytes) -> str: |
| 31 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 32 | |
| 33 |
+2
-6
| --- navegador/ingestion/ruby.py | ||
| +++ navegador/ingestion/ruby.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_ruby as tsruby # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tsruby.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-ruby: pip install tree-sitter-ruby" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-ruby: pip install tree-sitter-ruby") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -275,13 +273,11 @@ | ||
| 275 | 273 | ) -> None: |
| 276 | 274 | def walk(node): |
| 277 | 275 | if node.type == "call": |
| 278 | 276 | method_node = node.child_by_field_name("method") |
| 279 | 277 | if not method_node: |
| 280 | - method_node = next( | |
| 281 | - (c for c in node.children if c.type == "identifier"), None | |
| 282 | - ) | |
| 278 | + method_node = next((c for c in node.children if c.type == "identifier"), None) | |
| 283 | 279 | if method_node: |
| 284 | 280 | callee = _node_text(method_node, source) |
| 285 | 281 | if callee not in ("require", "require_relative", "load"): |
| 286 | 282 | store.create_edge( |
| 287 | 283 | fn_label, |
| 288 | 284 |
| --- navegador/ingestion/ruby.py | |
| +++ navegador/ingestion/ruby.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_ruby as tsruby # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsruby.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-ruby: pip install tree-sitter-ruby" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -275,13 +273,11 @@ | |
| 275 | ) -> None: |
| 276 | def walk(node): |
| 277 | if node.type == "call": |
| 278 | method_node = node.child_by_field_name("method") |
| 279 | if not method_node: |
| 280 | method_node = next( |
| 281 | (c for c in node.children if c.type == "identifier"), None |
| 282 | ) |
| 283 | if method_node: |
| 284 | callee = _node_text(method_node, source) |
| 285 | if callee not in ("require", "require_relative", "load"): |
| 286 | store.create_edge( |
| 287 | fn_label, |
| 288 |
| --- navegador/ingestion/ruby.py | |
| +++ navegador/ingestion/ruby.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_ruby as tsruby # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsruby.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-ruby: pip install tree-sitter-ruby") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -275,13 +273,11 @@ | |
| 273 | ) -> None: |
| 274 | def walk(node): |
| 275 | if node.type == "call": |
| 276 | method_node = node.child_by_field_name("method") |
| 277 | if not method_node: |
| 278 | method_node = next((c for c in node.children if c.type == "identifier"), None) |
| 279 | if method_node: |
| 280 | callee = _node_text(method_node, source) |
| 281 | if callee not in ("require", "require_relative", "load"): |
| 282 | store.create_edge( |
| 283 | fn_label, |
| 284 |
+12
-5
| --- navegador/ingestion/swift.py | ||
| +++ navegador/ingestion/swift.py | ||
| @@ -18,13 +18,11 @@ | ||
| 18 | 18 | import tree_sitter_swift as tsswift # type: ignore[import] |
| 19 | 19 | from tree_sitter import Language |
| 20 | 20 | |
| 21 | 21 | return Language(tsswift.language()) |
| 22 | 22 | except ImportError as e: |
| 23 | - raise ImportError( | |
| 24 | - "Install tree-sitter-swift: pip install tree-sitter-swift" | |
| 25 | - ) from e | |
| 23 | + raise ImportError("Install tree-sitter-swift: pip install tree-sitter-swift") from e | |
| 26 | 24 | |
| 27 | 25 | |
| 28 | 26 | def _node_text(node, source: bytes) -> str: |
| 29 | 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | 28 | |
| @@ -137,11 +135,16 @@ | ||
| 137 | 135 | |
| 138 | 136 | # Walk body for member functions |
| 139 | 137 | body = node.child_by_field_name("body") |
| 140 | 138 | if not body: |
| 141 | 139 | body = next( |
| 142 | - (c for c in node.children if c.type in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")), | |
| 140 | + ( | |
| 141 | + c | |
| 142 | + for c in node.children | |
| 143 | + if c.type | |
| 144 | + in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body") | |
| 145 | + ), | |
| 143 | 146 | None, |
| 144 | 147 | ) |
| 145 | 148 | if body: |
| 146 | 149 | for child in body.children: |
| 147 | 150 | if child.type == "function_declaration": |
| @@ -232,11 +235,15 @@ | ||
| 232 | 235 | def walk(node): |
| 233 | 236 | if node.type == "call_expression": |
| 234 | 237 | func = node.child_by_field_name("function") |
| 235 | 238 | if not func: |
| 236 | 239 | func = next( |
| 237 | - (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")), | |
| 240 | + ( | |
| 241 | + c | |
| 242 | + for c in node.children | |
| 243 | + if c.type in ("simple_identifier", "navigation_expression") | |
| 244 | + ), | |
| 238 | 245 | None, |
| 239 | 246 | ) |
| 240 | 247 | if func: |
| 241 | 248 | callee = _node_text(func, source).split(".")[-1] |
| 242 | 249 | store.create_edge( |
| 243 | 250 |
| --- navegador/ingestion/swift.py | |
| +++ navegador/ingestion/swift.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_swift as tsswift # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsswift.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError( |
| 24 | "Install tree-sitter-swift: pip install tree-sitter-swift" |
| 25 | ) from e |
| 26 | |
| 27 | |
| 28 | def _node_text(node, source: bytes) -> str: |
| 29 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 30 | |
| @@ -137,11 +135,16 @@ | |
| 137 | |
| 138 | # Walk body for member functions |
| 139 | body = node.child_by_field_name("body") |
| 140 | if not body: |
| 141 | body = next( |
| 142 | (c for c in node.children if c.type in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body")), |
| 143 | None, |
| 144 | ) |
| 145 | if body: |
| 146 | for child in body.children: |
| 147 | if child.type == "function_declaration": |
| @@ -232,11 +235,15 @@ | |
| 232 | def walk(node): |
| 233 | if node.type == "call_expression": |
| 234 | func = node.child_by_field_name("function") |
| 235 | if not func: |
| 236 | func = next( |
| 237 | (c for c in node.children if c.type in ("simple_identifier", "navigation_expression")), |
| 238 | None, |
| 239 | ) |
| 240 | if func: |
| 241 | callee = _node_text(func, source).split(".")[-1] |
| 242 | store.create_edge( |
| 243 |
| --- navegador/ingestion/swift.py | |
| +++ navegador/ingestion/swift.py | |
| @@ -18,13 +18,11 @@ | |
| 18 | import tree_sitter_swift as tsswift # type: ignore[import] |
| 19 | from tree_sitter import Language |
| 20 | |
| 21 | return Language(tsswift.language()) |
| 22 | except ImportError as e: |
| 23 | raise ImportError("Install tree-sitter-swift: pip install tree-sitter-swift") from e |
| 24 | |
| 25 | |
| 26 | def _node_text(node, source: bytes) -> str: |
| 27 | return source[node.start_byte : node.end_byte].decode("utf-8", errors="replace") |
| 28 | |
| @@ -137,11 +135,16 @@ | |
| 135 | |
| 136 | # Walk body for member functions |
| 137 | body = node.child_by_field_name("body") |
| 138 | if not body: |
| 139 | body = next( |
| 140 | ( |
| 141 | c |
| 142 | for c in node.children |
| 143 | if c.type |
| 144 | in ("class_body", "struct_body", "enum_body", "protocol_body", "extension_body") |
| 145 | ), |
| 146 | None, |
| 147 | ) |
| 148 | if body: |
| 149 | for child in body.children: |
| 150 | if child.type == "function_declaration": |
| @@ -232,11 +235,15 @@ | |
| 235 | def walk(node): |
| 236 | if node.type == "call_expression": |
| 237 | func = node.child_by_field_name("function") |
| 238 | if not func: |
| 239 | func = next( |
| 240 | ( |
| 241 | c |
| 242 | for c in node.children |
| 243 | if c.type in ("simple_identifier", "navigation_expression") |
| 244 | ), |
| 245 | None, |
| 246 | ) |
| 247 | if func: |
| 248 | callee = _node_text(func, source).split(".")[-1] |
| 249 | store.create_edge( |
| 250 |
| --- navegador/intelligence/community.py | ||
| +++ navegador/intelligence/community.py | ||
| @@ -213,14 +213,11 @@ | ||
| 213 | 213 | member_set = set(members_ids) |
| 214 | 214 | member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes] |
| 215 | 215 | |
| 216 | 216 | # Density = actual internal edges / possible internal edges |
| 217 | 217 | internal_edges = sum( |
| 218 | - 1 | |
| 219 | - for nid in members_ids | |
| 220 | - for nb in adj.get(nid, []) | |
| 221 | - if nb in member_set | |
| 218 | + 1 for nid in members_ids for nb in adj.get(nid, []) if nb in member_set | |
| 222 | 219 | ) |
| 223 | 220 | # Each undirected edge counted twice in the adjacency list |
| 224 | 221 | internal_edges //= 2 |
| 225 | 222 | n = len(members_ids) |
| 226 | 223 | possible = n * (n - 1) / 2 |
| 227 | 224 |
| --- navegador/intelligence/community.py | |
| +++ navegador/intelligence/community.py | |
| @@ -213,14 +213,11 @@ | |
| 213 | member_set = set(members_ids) |
| 214 | member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes] |
| 215 | |
| 216 | # Density = actual internal edges / possible internal edges |
| 217 | internal_edges = sum( |
| 218 | 1 |
| 219 | for nid in members_ids |
| 220 | for nb in adj.get(nid, []) |
| 221 | if nb in member_set |
| 222 | ) |
| 223 | # Each undirected edge counted twice in the adjacency list |
| 224 | internal_edges //= 2 |
| 225 | n = len(members_ids) |
| 226 | possible = n * (n - 1) / 2 |
| 227 |
| --- navegador/intelligence/community.py | |
| +++ navegador/intelligence/community.py | |
| @@ -213,14 +213,11 @@ | |
| 213 | member_set = set(members_ids) |
| 214 | member_names = [nodes[nid]["name"] for nid in members_ids if nid in nodes] |
| 215 | |
| 216 | # Density = actual internal edges / possible internal edges |
| 217 | internal_edges = sum( |
| 218 | 1 for nid in members_ids for nb in adj.get(nid, []) if nb in member_set |
| 219 | ) |
| 220 | # Each undirected edge counted twice in the adjacency list |
| 221 | internal_edges //= 2 |
| 222 | n = len(members_ids) |
| 223 | possible = n * (n - 1) / 2 |
| 224 |
+3
-9
| --- navegador/intelligence/docgen.py | ||
| +++ navegador/intelligence/docgen.py | ||
| @@ -103,13 +103,11 @@ | ||
| 103 | 103 | store: A :class:`~navegador.graph.GraphStore` instance. |
| 104 | 104 | provider: Optional :class:`~navegador.llm.LLMProvider`. When |
| 105 | 105 | ``None`` (default) template-based generation is used. |
| 106 | 106 | """ |
| 107 | 107 | |
| 108 | - def __init__( | |
| 109 | - self, store: "GraphStore", provider: "LLMProvider | None" = None | |
| 110 | - ) -> None: | |
| 108 | + def __init__(self, store: "GraphStore", provider: "LLMProvider | None" = None) -> None: | |
| 111 | 109 | self._store = store |
| 112 | 110 | self._provider = provider |
| 113 | 111 | |
| 114 | 112 | # ── Public API ──────────────────────────────────────────────────────────── |
| 115 | 113 | |
| @@ -167,13 +165,11 @@ | ||
| 167 | 165 | if not rows: |
| 168 | 166 | lines.append("_No symbols found in the graph for this file._") |
| 169 | 167 | return "\n".join(lines) |
| 170 | 168 | |
| 171 | 169 | for row in rows: |
| 172 | - sym_type, name, docstring, signature, line = ( | |
| 173 | - row[0], row[1], row[2], row[3], row[4] | |
| 174 | - ) | |
| 170 | + sym_type, name, docstring, signature, line = (row[0], row[1], row[2], row[3], row[4]) | |
| 175 | 171 | lines.append(f"## {sym_type}: `{name}`") |
| 176 | 172 | if line is not None: |
| 177 | 173 | lines.append(f"_Line {line}_") |
| 178 | 174 | if signature: |
| 179 | 175 | lines += ["", f"```python\n{signature}\n```"] |
| @@ -200,13 +196,11 @@ | ||
| 200 | 196 | |
| 201 | 197 | for fp, file_rows in sorted(files.items()): |
| 202 | 198 | lines.append(f"## `{fp}`") |
| 203 | 199 | lines.append("") |
| 204 | 200 | for row in file_rows: |
| 205 | - sym_type, name, _, docstring, signature = ( | |
| 206 | - row[0], row[1], row[2], row[3], row[4] | |
| 207 | - ) | |
| 201 | + sym_type, name, _, docstring, signature = (row[0], row[1], row[2], row[3], row[4]) | |
| 208 | 202 | lines.append(f"### {sym_type}: `{name}`") |
| 209 | 203 | if signature: |
| 210 | 204 | lines += ["", f"```python\n{signature}\n```"] |
| 211 | 205 | if docstring: |
| 212 | 206 | lines += ["", docstring] |
| 213 | 207 |
| --- navegador/intelligence/docgen.py | |
| +++ navegador/intelligence/docgen.py | |
| @@ -103,13 +103,11 @@ | |
| 103 | store: A :class:`~navegador.graph.GraphStore` instance. |
| 104 | provider: Optional :class:`~navegador.llm.LLMProvider`. When |
| 105 | ``None`` (default) template-based generation is used. |
| 106 | """ |
| 107 | |
| 108 | def __init__( |
| 109 | self, store: "GraphStore", provider: "LLMProvider | None" = None |
| 110 | ) -> None: |
| 111 | self._store = store |
| 112 | self._provider = provider |
| 113 | |
| 114 | # ── Public API ──────────────────────────────────────────────────────────── |
| 115 | |
| @@ -167,13 +165,11 @@ | |
| 167 | if not rows: |
| 168 | lines.append("_No symbols found in the graph for this file._") |
| 169 | return "\n".join(lines) |
| 170 | |
| 171 | for row in rows: |
| 172 | sym_type, name, docstring, signature, line = ( |
| 173 | row[0], row[1], row[2], row[3], row[4] |
| 174 | ) |
| 175 | lines.append(f"## {sym_type}: `{name}`") |
| 176 | if line is not None: |
| 177 | lines.append(f"_Line {line}_") |
| 178 | if signature: |
| 179 | lines += ["", f"```python\n{signature}\n```"] |
| @@ -200,13 +196,11 @@ | |
| 200 | |
| 201 | for fp, file_rows in sorted(files.items()): |
| 202 | lines.append(f"## `{fp}`") |
| 203 | lines.append("") |
| 204 | for row in file_rows: |
| 205 | sym_type, name, _, docstring, signature = ( |
| 206 | row[0], row[1], row[2], row[3], row[4] |
| 207 | ) |
| 208 | lines.append(f"### {sym_type}: `{name}`") |
| 209 | if signature: |
| 210 | lines += ["", f"```python\n{signature}\n```"] |
| 211 | if docstring: |
| 212 | lines += ["", docstring] |
| 213 |
| --- navegador/intelligence/docgen.py | |
| +++ navegador/intelligence/docgen.py | |
| @@ -103,13 +103,11 @@ | |
| 103 | store: A :class:`~navegador.graph.GraphStore` instance. |
| 104 | provider: Optional :class:`~navegador.llm.LLMProvider`. When |
| 105 | ``None`` (default) template-based generation is used. |
| 106 | """ |
| 107 | |
| 108 | def __init__(self, store: "GraphStore", provider: "LLMProvider | None" = None) -> None: |
| 109 | self._store = store |
| 110 | self._provider = provider |
| 111 | |
| 112 | # ── Public API ──────────────────────────────────────────────────────────── |
| 113 | |
| @@ -167,13 +165,11 @@ | |
| 165 | if not rows: |
| 166 | lines.append("_No symbols found in the graph for this file._") |
| 167 | return "\n".join(lines) |
| 168 | |
| 169 | for row in rows: |
| 170 | sym_type, name, docstring, signature, line = (row[0], row[1], row[2], row[3], row[4]) |
| 171 | lines.append(f"## {sym_type}: `{name}`") |
| 172 | if line is not None: |
| 173 | lines.append(f"_Line {line}_") |
| 174 | if signature: |
| 175 | lines += ["", f"```python\n{signature}\n```"] |
| @@ -200,13 +196,11 @@ | |
| 196 | |
| 197 | for fp, file_rows in sorted(files.items()): |
| 198 | lines.append(f"## `{fp}`") |
| 199 | lines.append("") |
| 200 | for row in file_rows: |
| 201 | sym_type, name, _, docstring, signature = (row[0], row[1], row[2], row[3], row[4]) |
| 202 | lines.append(f"### {sym_type}: `{name}`") |
| 203 | if signature: |
| 204 | lines += ["", f"```python\n{signature}\n```"] |
| 205 | if docstring: |
| 206 | lines += ["", docstring] |
| 207 |
+6
-15
| --- navegador/intelligence/nlp.py | ||
| +++ navegador/intelligence/nlp.py | ||
| @@ -128,13 +128,11 @@ | ||
| 128 | 128 | |
| 129 | 129 | Returns: |
| 130 | 130 | A human-readable answer string. |
| 131 | 131 | """ |
| 132 | 132 | # Step 1: translate question → Cypher |
| 133 | - cypher_prompt = _NL_TO_CYPHER_PROMPT.format( | |
| 134 | - schema=_SCHEMA_SUMMARY, question=question | |
| 135 | - ) | |
| 133 | + cypher_prompt = _NL_TO_CYPHER_PROMPT.format(schema=_SCHEMA_SUMMARY, question=question) | |
| 136 | 134 | cypher = self._provider.complete(cypher_prompt).strip() |
| 137 | 135 | |
| 138 | 136 | # Strip any accidental markdown fences the model may still produce |
| 139 | 137 | cypher = _strip_fences(cypher) |
| 140 | 138 | |
| @@ -142,19 +140,16 @@ | ||
| 142 | 140 | try: |
| 143 | 141 | result = self._store.query(cypher, {}) |
| 144 | 142 | rows = result.result_set or [] |
| 145 | 143 | except Exception as exc: # noqa: BLE001 |
| 146 | 144 | return ( |
| 147 | - f"Failed to execute the generated Cypher query.\n\n" | |
| 148 | - f"Query: {cypher}\n\nError: {exc}" | |
| 145 | + f"Failed to execute the generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}" | |
| 149 | 146 | ) |
| 150 | 147 | |
| 151 | 148 | # Step 3: format result |
| 152 | 149 | rows_text = json.dumps(rows[:50], indent=2, default=str) |
| 153 | - fmt_prompt = _FORMAT_RESULT_PROMPT.format( | |
| 154 | - question=question, cypher=cypher, rows=rows_text | |
| 155 | - ) | |
| 150 | + fmt_prompt = _FORMAT_RESULT_PROMPT.format(question=question, cypher=cypher, rows=rows_text) | |
| 156 | 151 | return self._provider.complete(fmt_prompt) |
| 157 | 152 | |
| 158 | 153 | # ── Community naming ────────────────────────────────────────────────── |
| 159 | 154 | |
| 160 | 155 | def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]: |
| @@ -221,26 +216,22 @@ | ||
| 221 | 216 | signature = "" |
| 222 | 217 | fp = file_path |
| 223 | 218 | |
| 224 | 219 | if rows: |
| 225 | 220 | row = rows[0] |
| 226 | - node_type, _, fp, docstring, signature = ( | |
| 227 | - row[0], row[1], row[2], row[3], row[4] | |
| 228 | - ) | |
| 221 | + node_type, _, fp, docstring, signature = (row[0], row[1], row[2], row[3], row[4]) | |
| 229 | 222 | |
| 230 | 223 | # Fetch callers |
| 231 | 224 | callers_result = self._store.query( |
| 232 | - "MATCH (caller)-[:CALLS]->(n {name: $name}) " | |
| 233 | - "RETURN caller.name LIMIT 10", | |
| 225 | + "MATCH (caller)-[:CALLS]->(n {name: $name}) RETURN caller.name LIMIT 10", | |
| 234 | 226 | {"name": name}, |
| 235 | 227 | ) |
| 236 | 228 | callers = [r[0] for r in (callers_result.result_set or []) if r[0]] |
| 237 | 229 | |
| 238 | 230 | # Fetch callees |
| 239 | 231 | callees_result = self._store.query( |
| 240 | - "MATCH (n {name: $name})-[:CALLS]->(callee) " | |
| 241 | - "RETURN callee.name LIMIT 10", | |
| 232 | + "MATCH (n {name: $name})-[:CALLS]->(callee) RETURN callee.name LIMIT 10", | |
| 242 | 233 | {"name": name}, |
| 243 | 234 | ) |
| 244 | 235 | callees = [r[0] for r in (callees_result.result_set or []) if r[0]] |
| 245 | 236 | |
| 246 | 237 | prompt = _GENERATE_DOCS_PROMPT.format( |
| 247 | 238 |
| --- navegador/intelligence/nlp.py | |
| +++ navegador/intelligence/nlp.py | |
| @@ -128,13 +128,11 @@ | |
| 128 | |
| 129 | Returns: |
| 130 | A human-readable answer string. |
| 131 | """ |
| 132 | # Step 1: translate question → Cypher |
| 133 | cypher_prompt = _NL_TO_CYPHER_PROMPT.format( |
| 134 | schema=_SCHEMA_SUMMARY, question=question |
| 135 | ) |
| 136 | cypher = self._provider.complete(cypher_prompt).strip() |
| 137 | |
| 138 | # Strip any accidental markdown fences the model may still produce |
| 139 | cypher = _strip_fences(cypher) |
| 140 | |
| @@ -142,19 +140,16 @@ | |
| 142 | try: |
| 143 | result = self._store.query(cypher, {}) |
| 144 | rows = result.result_set or [] |
| 145 | except Exception as exc: # noqa: BLE001 |
| 146 | return ( |
| 147 | f"Failed to execute the generated Cypher query.\n\n" |
| 148 | f"Query: {cypher}\n\nError: {exc}" |
| 149 | ) |
| 150 | |
| 151 | # Step 3: format result |
| 152 | rows_text = json.dumps(rows[:50], indent=2, default=str) |
| 153 | fmt_prompt = _FORMAT_RESULT_PROMPT.format( |
| 154 | question=question, cypher=cypher, rows=rows_text |
| 155 | ) |
| 156 | return self._provider.complete(fmt_prompt) |
| 157 | |
| 158 | # ── Community naming ────────────────────────────────────────────────── |
| 159 | |
| 160 | def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]: |
| @@ -221,26 +216,22 @@ | |
| 221 | signature = "" |
| 222 | fp = file_path |
| 223 | |
| 224 | if rows: |
| 225 | row = rows[0] |
| 226 | node_type, _, fp, docstring, signature = ( |
| 227 | row[0], row[1], row[2], row[3], row[4] |
| 228 | ) |
| 229 | |
| 230 | # Fetch callers |
| 231 | callers_result = self._store.query( |
| 232 | "MATCH (caller)-[:CALLS]->(n {name: $name}) " |
| 233 | "RETURN caller.name LIMIT 10", |
| 234 | {"name": name}, |
| 235 | ) |
| 236 | callers = [r[0] for r in (callers_result.result_set or []) if r[0]] |
| 237 | |
| 238 | # Fetch callees |
| 239 | callees_result = self._store.query( |
| 240 | "MATCH (n {name: $name})-[:CALLS]->(callee) " |
| 241 | "RETURN callee.name LIMIT 10", |
| 242 | {"name": name}, |
| 243 | ) |
| 244 | callees = [r[0] for r in (callees_result.result_set or []) if r[0]] |
| 245 | |
| 246 | prompt = _GENERATE_DOCS_PROMPT.format( |
| 247 |
| --- navegador/intelligence/nlp.py | |
| +++ navegador/intelligence/nlp.py | |
| @@ -128,13 +128,11 @@ | |
| 128 | |
| 129 | Returns: |
| 130 | A human-readable answer string. |
| 131 | """ |
| 132 | # Step 1: translate question → Cypher |
| 133 | cypher_prompt = _NL_TO_CYPHER_PROMPT.format(schema=_SCHEMA_SUMMARY, question=question) |
| 134 | cypher = self._provider.complete(cypher_prompt).strip() |
| 135 | |
| 136 | # Strip any accidental markdown fences the model may still produce |
| 137 | cypher = _strip_fences(cypher) |
| 138 | |
| @@ -142,19 +140,16 @@ | |
| 140 | try: |
| 141 | result = self._store.query(cypher, {}) |
| 142 | rows = result.result_set or [] |
| 143 | except Exception as exc: # noqa: BLE001 |
| 144 | return ( |
| 145 | f"Failed to execute the generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}" |
| 146 | ) |
| 147 | |
| 148 | # Step 3: format result |
| 149 | rows_text = json.dumps(rows[:50], indent=2, default=str) |
| 150 | fmt_prompt = _FORMAT_RESULT_PROMPT.format(question=question, cypher=cypher, rows=rows_text) |
| 151 | return self._provider.complete(fmt_prompt) |
| 152 | |
| 153 | # ── Community naming ────────────────────────────────────────────────── |
| 154 | |
| 155 | def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]: |
| @@ -221,26 +216,22 @@ | |
| 216 | signature = "" |
| 217 | fp = file_path |
| 218 | |
| 219 | if rows: |
| 220 | row = rows[0] |
| 221 | node_type, _, fp, docstring, signature = (row[0], row[1], row[2], row[3], row[4]) |
| 222 | |
| 223 | # Fetch callers |
| 224 | callers_result = self._store.query( |
| 225 | "MATCH (caller)-[:CALLS]->(n {name: $name}) RETURN caller.name LIMIT 10", |
| 226 | {"name": name}, |
| 227 | ) |
| 228 | callers = [r[0] for r in (callers_result.result_set or []) if r[0]] |
| 229 | |
| 230 | # Fetch callees |
| 231 | callees_result = self._store.query( |
| 232 | "MATCH (n {name: $name})-[:CALLS]->(callee) RETURN callee.name LIMIT 10", |
| 233 | {"name": name}, |
| 234 | ) |
| 235 | callees = [r[0] for r in (callees_result.result_set or []) if r[0]] |
| 236 | |
| 237 | prompt = _GENERATE_DOCS_PROMPT.format( |
| 238 |
+1
-3
| --- navegador/intelligence/search.py | ||
| +++ navegador/intelligence/search.py | ||
| @@ -139,13 +139,11 @@ | ||
| 139 | 139 | result = self._store.query(_NODES_WITH_EMBEDDINGS, {}) |
| 140 | 140 | rows = result.result_set or [] |
| 141 | 141 | |
| 142 | 142 | scored: list[dict[str, Any]] = [] |
| 143 | 143 | for row in rows: |
| 144 | - node_type, name, file_path, text, emb_json = ( | |
| 145 | - row[0], row[1], row[2], row[3], row[4] | |
| 146 | - ) | |
| 144 | + node_type, name, file_path, text, emb_json = (row[0], row[1], row[2], row[3], row[4]) | |
| 147 | 145 | if not emb_json: |
| 148 | 146 | continue |
| 149 | 147 | try: |
| 150 | 148 | node_vec: list[float] = json.loads(emb_json) |
| 151 | 149 | except (json.JSONDecodeError, TypeError): |
| 152 | 150 |
| --- navegador/intelligence/search.py | |
| +++ navegador/intelligence/search.py | |
| @@ -139,13 +139,11 @@ | |
| 139 | result = self._store.query(_NODES_WITH_EMBEDDINGS, {}) |
| 140 | rows = result.result_set or [] |
| 141 | |
| 142 | scored: list[dict[str, Any]] = [] |
| 143 | for row in rows: |
| 144 | node_type, name, file_path, text, emb_json = ( |
| 145 | row[0], row[1], row[2], row[3], row[4] |
| 146 | ) |
| 147 | if not emb_json: |
| 148 | continue |
| 149 | try: |
| 150 | node_vec: list[float] = json.loads(emb_json) |
| 151 | except (json.JSONDecodeError, TypeError): |
| 152 |
| --- navegador/intelligence/search.py | |
| +++ navegador/intelligence/search.py | |
| @@ -139,13 +139,11 @@ | |
| 139 | result = self._store.query(_NODES_WITH_EMBEDDINGS, {}) |
| 140 | rows = result.result_set or [] |
| 141 | |
| 142 | scored: list[dict[str, Any]] = [] |
| 143 | for row in rows: |
| 144 | node_type, name, file_path, text, emb_json = (row[0], row[1], row[2], row[3], row[4]) |
| 145 | if not emb_json: |
| 146 | continue |
| 147 | try: |
| 148 | node_vec: list[float] = json.loads(emb_json) |
| 149 | except (json.JSONDecodeError, TypeError): |
| 150 |
+1
-3
| --- navegador/llm.py | ||
| +++ navegador/llm.py | ||
| @@ -22,11 +22,10 @@ | ||
| 22 | 22 | |
| 23 | 23 | from __future__ import annotations |
| 24 | 24 | |
| 25 | 25 | from abc import ABC, abstractmethod |
| 26 | 26 | |
| 27 | - | |
| 28 | 27 | # ── Abstract base ───────────────────────────────────────────────────────────── |
| 29 | 28 | |
| 30 | 29 | |
| 31 | 30 | class LLMProvider(ABC): |
| 32 | 31 | """Abstract interface that every concrete LLM provider must satisfy.""" |
| @@ -293,12 +292,11 @@ | ||
| 293 | 292 | ValueError: If *name* does not correspond to a known provider. |
| 294 | 293 | ImportError: If the underlying SDK is not installed. |
| 295 | 294 | """ |
| 296 | 295 | if name not in _PROVIDER_CLASS_MAP: |
| 297 | 296 | raise ValueError( |
| 298 | - f"Unknown LLM provider: {name!r}. " | |
| 299 | - f"Valid options are: {sorted(_PROVIDER_CLASS_MAP)}" | |
| 297 | + f"Unknown LLM provider: {name!r}. Valid options are: {sorted(_PROVIDER_CLASS_MAP)}" | |
| 300 | 298 | ) |
| 301 | 299 | cls = _PROVIDER_CLASS_MAP[name] |
| 302 | 300 | return cls(model=model) |
| 303 | 301 | |
| 304 | 302 | |
| 305 | 303 |
| --- navegador/llm.py | |
| +++ navegador/llm.py | |
| @@ -22,11 +22,10 @@ | |
| 22 | |
| 23 | from __future__ import annotations |
| 24 | |
| 25 | from abc import ABC, abstractmethod |
| 26 | |
| 27 | |
| 28 | # ── Abstract base ───────────────────────────────────────────────────────────── |
| 29 | |
| 30 | |
| 31 | class LLMProvider(ABC): |
| 32 | """Abstract interface that every concrete LLM provider must satisfy.""" |
| @@ -293,12 +292,11 @@ | |
| 293 | ValueError: If *name* does not correspond to a known provider. |
| 294 | ImportError: If the underlying SDK is not installed. |
| 295 | """ |
| 296 | if name not in _PROVIDER_CLASS_MAP: |
| 297 | raise ValueError( |
| 298 | f"Unknown LLM provider: {name!r}. " |
| 299 | f"Valid options are: {sorted(_PROVIDER_CLASS_MAP)}" |
| 300 | ) |
| 301 | cls = _PROVIDER_CLASS_MAP[name] |
| 302 | return cls(model=model) |
| 303 | |
| 304 | |
| 305 |
| --- navegador/llm.py | |
| +++ navegador/llm.py | |
| @@ -22,11 +22,10 @@ | |
| 22 | |
| 23 | from __future__ import annotations |
| 24 | |
| 25 | from abc import ABC, abstractmethod |
| 26 | |
| 27 | # ── Abstract base ───────────────────────────────────────────────────────────── |
| 28 | |
| 29 | |
| 30 | class LLMProvider(ABC): |
| 31 | """Abstract interface that every concrete LLM provider must satisfy.""" |
| @@ -293,12 +292,11 @@ | |
| 292 | ValueError: If *name* does not correspond to a known provider. |
| 293 | ImportError: If the underlying SDK is not installed. |
| 294 | """ |
| 295 | if name not in _PROVIDER_CLASS_MAP: |
| 296 | raise ValueError( |
| 297 | f"Unknown LLM provider: {name!r}. Valid options are: {sorted(_PROVIDER_CLASS_MAP)}" |
| 298 | ) |
| 299 | cls = _PROVIDER_CLASS_MAP[name] |
| 300 | return cls(model=model) |
| 301 | |
| 302 | |
| 303 |
+3
-9
| --- navegador/mcp/security.py | ||
| +++ navegador/mcp/security.py | ||
| @@ -65,25 +65,19 @@ | ||
| 65 | 65 | |
| 66 | 66 | # Check for write-operation keywords as whole words |
| 67 | 67 | for kw in _WRITE_KEYWORDS: |
| 68 | 68 | pattern = re.compile(rf"\b{kw}\b") |
| 69 | 69 | if pattern.search(upper): |
| 70 | - raise QueryValidationError( | |
| 71 | - f"Write operation '{kw}' is not allowed in read-only mode." | |
| 72 | - ) | |
| 70 | + raise QueryValidationError(f"Write operation '{kw}' is not allowed in read-only mode.") | |
| 73 | 71 | |
| 74 | 72 | # Check for CALL procedure injection |
| 75 | 73 | if _CALL_RE.search(stripped): |
| 76 | - raise QueryValidationError( | |
| 77 | - "CALL procedures are not allowed in read-only mode." | |
| 78 | - ) | |
| 74 | + raise QueryValidationError("CALL procedures are not allowed in read-only mode.") | |
| 79 | 75 | |
| 80 | 76 | # Check for nested / sub-query patterns |
| 81 | 77 | if _SUBQUERY_RE.search(stripped): |
| 82 | - raise QueryValidationError( | |
| 83 | - "Nested sub-queries are not allowed in read-only mode." | |
| 84 | - ) | |
| 78 | + raise QueryValidationError("Nested sub-queries are not allowed in read-only mode.") | |
| 85 | 79 | |
| 86 | 80 | |
| 87 | 81 | def check_complexity( |
| 88 | 82 | query: str, |
| 89 | 83 | max_depth: int = 5, |
| 90 | 84 |
| --- navegador/mcp/security.py | |
| +++ navegador/mcp/security.py | |
| @@ -65,25 +65,19 @@ | |
| 65 | |
| 66 | # Check for write-operation keywords as whole words |
| 67 | for kw in _WRITE_KEYWORDS: |
| 68 | pattern = re.compile(rf"\b{kw}\b") |
| 69 | if pattern.search(upper): |
| 70 | raise QueryValidationError( |
| 71 | f"Write operation '{kw}' is not allowed in read-only mode." |
| 72 | ) |
| 73 | |
| 74 | # Check for CALL procedure injection |
| 75 | if _CALL_RE.search(stripped): |
| 76 | raise QueryValidationError( |
| 77 | "CALL procedures are not allowed in read-only mode." |
| 78 | ) |
| 79 | |
| 80 | # Check for nested / sub-query patterns |
| 81 | if _SUBQUERY_RE.search(stripped): |
| 82 | raise QueryValidationError( |
| 83 | "Nested sub-queries are not allowed in read-only mode." |
| 84 | ) |
| 85 | |
| 86 | |
| 87 | def check_complexity( |
| 88 | query: str, |
| 89 | max_depth: int = 5, |
| 90 |
| --- navegador/mcp/security.py | |
| +++ navegador/mcp/security.py | |
| @@ -65,25 +65,19 @@ | |
| 65 | |
| 66 | # Check for write-operation keywords as whole words |
| 67 | for kw in _WRITE_KEYWORDS: |
| 68 | pattern = re.compile(rf"\b{kw}\b") |
| 69 | if pattern.search(upper): |
| 70 | raise QueryValidationError(f"Write operation '{kw}' is not allowed in read-only mode.") |
| 71 | |
| 72 | # Check for CALL procedure injection |
| 73 | if _CALL_RE.search(stripped): |
| 74 | raise QueryValidationError("CALL procedures are not allowed in read-only mode.") |
| 75 | |
| 76 | # Check for nested / sub-query patterns |
| 77 | if _SUBQUERY_RE.search(stripped): |
| 78 | raise QueryValidationError("Nested sub-queries are not allowed in read-only mode.") |
| 79 | |
| 80 | |
| 81 | def check_complexity( |
| 82 | query: str, |
| 83 | max_depth: int = 5, |
| 84 |
+11
-16
| --- navegador/mcp/server.py | ||
| +++ navegador/mcp/server.py | ||
| @@ -143,11 +143,11 @@ | ||
| 143 | 143 | description="Return node and edge counts for the current graph.", |
| 144 | 144 | inputSchema={"type": "object", "properties": {}}, |
| 145 | 145 | ), |
| 146 | 146 | Tool( |
| 147 | 147 | name="get_rationale", |
| 148 | - description="Return the rationale, alternatives, and status of an architectural decision.", | |
| 148 | + description="Return rationale, alternatives, and status of a decision.", | |
| 149 | 149 | inputSchema={ |
| 150 | 150 | "type": "object", |
| 151 | 151 | "properties": { |
| 152 | 152 | "name": {"type": "string", "description": "Decision name."}, |
| 153 | 153 | "format": { |
| @@ -175,11 +175,11 @@ | ||
| 175 | 175 | "required": ["name"], |
| 176 | 176 | }, |
| 177 | 177 | ), |
| 178 | 178 | Tool( |
| 179 | 179 | name="search_knowledge", |
| 180 | - description="Search concepts, rules, decisions, and wiki pages by name or description.", | |
| 180 | + description="Search concepts, rules, decisions, and wiki pages.", | |
| 181 | 181 | inputSchema={ |
| 182 | 182 | "type": "object", |
| 183 | 183 | "properties": { |
| 184 | 184 | "query": {"type": "string", "description": "Search query."}, |
| 185 | 185 | "limit": {"type": "integer", "default": 20}, |
| @@ -217,14 +217,16 @@ | ||
| 217 | 217 | async def call_tool(name: str, arguments: dict) -> list[TextContent]: |
| 218 | 218 | loader = _get_loader() |
| 219 | 219 | |
| 220 | 220 | if name == "ingest_repo": |
| 221 | 221 | if read_only: |
| 222 | - return [TextContent( | |
| 223 | - type="text", | |
| 224 | - text="Error: ingest_repo is disabled in read-only mode.", | |
| 225 | - )] | |
| 222 | + return [ | |
| 223 | + TextContent( | |
| 224 | + type="text", | |
| 225 | + text="Error: ingest_repo is disabled in read-only mode.", | |
| 226 | + ) | |
| 227 | + ] | |
| 226 | 228 | from navegador.ingestion import RepoIngester |
| 227 | 229 | |
| 228 | 230 | ingester = RepoIngester(loader.store) |
| 229 | 231 | stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False)) |
| 230 | 232 | return [TextContent(type="text", text=json.dumps(stats, indent=2))] |
| @@ -289,25 +291,18 @@ | ||
| 289 | 291 | results = loader.find_owners( |
| 290 | 292 | arguments["name"], file_path=arguments.get("file_path", "") |
| 291 | 293 | ) |
| 292 | 294 | if not results: |
| 293 | 295 | return [TextContent(type="text", text="No owners found.")] |
| 294 | - lines = [ | |
| 295 | - f"- **{r.name}** ({r.description})" for r in results | |
| 296 | - ] | |
| 296 | + lines = [f"- **{r.name}** ({r.description})" for r in results] | |
| 297 | 297 | return [TextContent(type="text", text="\n".join(lines))] |
| 298 | 298 | |
| 299 | 299 | elif name == "search_knowledge": |
| 300 | - results = loader.search_knowledge( | |
| 301 | - arguments["query"], limit=arguments.get("limit", 20) | |
| 302 | - ) | |
| 300 | + results = loader.search_knowledge(arguments["query"], limit=arguments.get("limit", 20)) | |
| 303 | 301 | if not results: |
| 304 | 302 | return [TextContent(type="text", text="No results.")] |
| 305 | - lines = [ | |
| 306 | - f"- **{r.type}** `{r.name}` — {r.description or ''}" | |
| 307 | - for r in results | |
| 308 | - ] | |
| 303 | + lines = [f"- **{r.type}** `{r.name}` — {r.description or ''}" for r in results] | |
| 309 | 304 | return [TextContent(type="text", text="\n".join(lines))] |
| 310 | 305 | |
| 311 | 306 | elif name == "blast_radius": |
| 312 | 307 | from navegador.analysis.impact import ImpactAnalyzer |
| 313 | 308 | |
| 314 | 309 |
| --- navegador/mcp/server.py | |
| +++ navegador/mcp/server.py | |
| @@ -143,11 +143,11 @@ | |
| 143 | description="Return node and edge counts for the current graph.", |
| 144 | inputSchema={"type": "object", "properties": {}}, |
| 145 | ), |
| 146 | Tool( |
| 147 | name="get_rationale", |
| 148 | description="Return the rationale, alternatives, and status of an architectural decision.", |
| 149 | inputSchema={ |
| 150 | "type": "object", |
| 151 | "properties": { |
| 152 | "name": {"type": "string", "description": "Decision name."}, |
| 153 | "format": { |
| @@ -175,11 +175,11 @@ | |
| 175 | "required": ["name"], |
| 176 | }, |
| 177 | ), |
| 178 | Tool( |
| 179 | name="search_knowledge", |
| 180 | description="Search concepts, rules, decisions, and wiki pages by name or description.", |
| 181 | inputSchema={ |
| 182 | "type": "object", |
| 183 | "properties": { |
| 184 | "query": {"type": "string", "description": "Search query."}, |
| 185 | "limit": {"type": "integer", "default": 20}, |
| @@ -217,14 +217,16 @@ | |
| 217 | async def call_tool(name: str, arguments: dict) -> list[TextContent]: |
| 218 | loader = _get_loader() |
| 219 | |
| 220 | if name == "ingest_repo": |
| 221 | if read_only: |
| 222 | return [TextContent( |
| 223 | type="text", |
| 224 | text="Error: ingest_repo is disabled in read-only mode.", |
| 225 | )] |
| 226 | from navegador.ingestion import RepoIngester |
| 227 | |
| 228 | ingester = RepoIngester(loader.store) |
| 229 | stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False)) |
| 230 | return [TextContent(type="text", text=json.dumps(stats, indent=2))] |
| @@ -289,25 +291,18 @@ | |
| 289 | results = loader.find_owners( |
| 290 | arguments["name"], file_path=arguments.get("file_path", "") |
| 291 | ) |
| 292 | if not results: |
| 293 | return [TextContent(type="text", text="No owners found.")] |
| 294 | lines = [ |
| 295 | f"- **{r.name}** ({r.description})" for r in results |
| 296 | ] |
| 297 | return [TextContent(type="text", text="\n".join(lines))] |
| 298 | |
| 299 | elif name == "search_knowledge": |
| 300 | results = loader.search_knowledge( |
| 301 | arguments["query"], limit=arguments.get("limit", 20) |
| 302 | ) |
| 303 | if not results: |
| 304 | return [TextContent(type="text", text="No results.")] |
| 305 | lines = [ |
| 306 | f"- **{r.type}** `{r.name}` — {r.description or ''}" |
| 307 | for r in results |
| 308 | ] |
| 309 | return [TextContent(type="text", text="\n".join(lines))] |
| 310 | |
| 311 | elif name == "blast_radius": |
| 312 | from navegador.analysis.impact import ImpactAnalyzer |
| 313 | |
| 314 |
| --- navegador/mcp/server.py | |
| +++ navegador/mcp/server.py | |
| @@ -143,11 +143,11 @@ | |
| 143 | description="Return node and edge counts for the current graph.", |
| 144 | inputSchema={"type": "object", "properties": {}}, |
| 145 | ), |
| 146 | Tool( |
| 147 | name="get_rationale", |
| 148 | description="Return rationale, alternatives, and status of a decision.", |
| 149 | inputSchema={ |
| 150 | "type": "object", |
| 151 | "properties": { |
| 152 | "name": {"type": "string", "description": "Decision name."}, |
| 153 | "format": { |
| @@ -175,11 +175,11 @@ | |
| 175 | "required": ["name"], |
| 176 | }, |
| 177 | ), |
| 178 | Tool( |
| 179 | name="search_knowledge", |
| 180 | description="Search concepts, rules, decisions, and wiki pages.", |
| 181 | inputSchema={ |
| 182 | "type": "object", |
| 183 | "properties": { |
| 184 | "query": {"type": "string", "description": "Search query."}, |
| 185 | "limit": {"type": "integer", "default": 20}, |
| @@ -217,14 +217,16 @@ | |
| 217 | async def call_tool(name: str, arguments: dict) -> list[TextContent]: |
| 218 | loader = _get_loader() |
| 219 | |
| 220 | if name == "ingest_repo": |
| 221 | if read_only: |
| 222 | return [ |
| 223 | TextContent( |
| 224 | type="text", |
| 225 | text="Error: ingest_repo is disabled in read-only mode.", |
| 226 | ) |
| 227 | ] |
| 228 | from navegador.ingestion import RepoIngester |
| 229 | |
| 230 | ingester = RepoIngester(loader.store) |
| 231 | stats = ingester.ingest(arguments["path"], clear=arguments.get("clear", False)) |
| 232 | return [TextContent(type="text", text=json.dumps(stats, indent=2))] |
| @@ -289,25 +291,18 @@ | |
| 291 | results = loader.find_owners( |
| 292 | arguments["name"], file_path=arguments.get("file_path", "") |
| 293 | ) |
| 294 | if not results: |
| 295 | return [TextContent(type="text", text="No owners found.")] |
| 296 | lines = [f"- **{r.name}** ({r.description})" for r in results] |
| 297 | return [TextContent(type="text", text="\n".join(lines))] |
| 298 | |
| 299 | elif name == "search_knowledge": |
| 300 | results = loader.search_knowledge(arguments["query"], limit=arguments.get("limit", 20)) |
| 301 | if not results: |
| 302 | return [TextContent(type="text", text="No results.")] |
| 303 | lines = [f"- **{r.type}** `{r.name}` — {r.description or ''}" for r in results] |
| 304 | return [TextContent(type="text", text="\n".join(lines))] |
| 305 | |
| 306 | elif name == "blast_radius": |
| 307 | from navegador.analysis.impact import ImpactAnalyzer |
| 308 | |
| 309 |
+4
-6
| --- navegador/monorepo.py | ||
| +++ navegador/monorepo.py | ||
| @@ -14,11 +14,11 @@ | ||
| 14 | 14 | from __future__ import annotations |
| 15 | 15 | |
| 16 | 16 | import fnmatch |
| 17 | 17 | import json |
| 18 | 18 | import logging |
| 19 | -from dataclasses import dataclass, field | |
| 19 | +from dataclasses import dataclass | |
| 20 | 20 | from pathlib import Path |
| 21 | 21 | from typing import Any |
| 22 | 22 | |
| 23 | 23 | from navegador.graph.schema import EdgeType, NodeLabel |
| 24 | 24 | from navegador.graph.store import GraphStore |
| @@ -136,13 +136,13 @@ | ||
| 136 | 136 | or declare them in nx.json under "projects". |
| 137 | 137 | """ |
| 138 | 138 | # Try reading nx.json for explicit projects |
| 139 | 139 | nx_json = root / "nx.json" |
| 140 | 140 | try: |
| 141 | - data = json.loads(nx_json.read_text(encoding="utf-8")) | |
| 141 | + json.loads(nx_json.read_text(encoding="utf-8")) | |
| 142 | 142 | except (OSError, json.JSONDecodeError): |
| 143 | - data = {} | |
| 143 | + pass | |
| 144 | 144 | |
| 145 | 145 | # Nx 16+ uses workspaceLayout or projects in project.json files |
| 146 | 146 | packages: list[Path] = [] |
| 147 | 147 | for subdir in ("apps", "libs", "packages"): |
| 148 | 148 | base = root / subdir |
| @@ -447,13 +447,11 @@ | ||
| 447 | 447 | to_label=NodeLabel.Repository, |
| 448 | 448 | to_key={"name": target}, |
| 449 | 449 | ) |
| 450 | 450 | edges_created += 1 |
| 451 | 451 | except Exception: |
| 452 | - logger.debug( | |
| 453 | - "Could not create DEPENDS_ON edge %s → %s", pkg_name, target | |
| 454 | - ) | |
| 452 | + logger.debug("Could not create DEPENDS_ON edge %s → %s", pkg_name, target) | |
| 455 | 453 | |
| 456 | 454 | return edges_created |
| 457 | 455 | |
| 458 | 456 | def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]: |
| 459 | 457 | """Return a flat list of declared dependency names for a package.""" |
| 460 | 458 |
| --- navegador/monorepo.py | |
| +++ navegador/monorepo.py | |
| @@ -14,11 +14,11 @@ | |
| 14 | from __future__ import annotations |
| 15 | |
| 16 | import fnmatch |
| 17 | import json |
| 18 | import logging |
| 19 | from dataclasses import dataclass, field |
| 20 | from pathlib import Path |
| 21 | from typing import Any |
| 22 | |
| 23 | from navegador.graph.schema import EdgeType, NodeLabel |
| 24 | from navegador.graph.store import GraphStore |
| @@ -136,13 +136,13 @@ | |
| 136 | or declare them in nx.json under "projects". |
| 137 | """ |
| 138 | # Try reading nx.json for explicit projects |
| 139 | nx_json = root / "nx.json" |
| 140 | try: |
| 141 | data = json.loads(nx_json.read_text(encoding="utf-8")) |
| 142 | except (OSError, json.JSONDecodeError): |
| 143 | data = {} |
| 144 | |
| 145 | # Nx 16+ uses workspaceLayout or projects in project.json files |
| 146 | packages: list[Path] = [] |
| 147 | for subdir in ("apps", "libs", "packages"): |
| 148 | base = root / subdir |
| @@ -447,13 +447,11 @@ | |
| 447 | to_label=NodeLabel.Repository, |
| 448 | to_key={"name": target}, |
| 449 | ) |
| 450 | edges_created += 1 |
| 451 | except Exception: |
| 452 | logger.debug( |
| 453 | "Could not create DEPENDS_ON edge %s → %s", pkg_name, target |
| 454 | ) |
| 455 | |
| 456 | return edges_created |
| 457 | |
| 458 | def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]: |
| 459 | """Return a flat list of declared dependency names for a package.""" |
| 460 |
| --- navegador/monorepo.py | |
| +++ navegador/monorepo.py | |
| @@ -14,11 +14,11 @@ | |
| 14 | from __future__ import annotations |
| 15 | |
| 16 | import fnmatch |
| 17 | import json |
| 18 | import logging |
| 19 | from dataclasses import dataclass |
| 20 | from pathlib import Path |
| 21 | from typing import Any |
| 22 | |
| 23 | from navegador.graph.schema import EdgeType, NodeLabel |
| 24 | from navegador.graph.store import GraphStore |
| @@ -136,13 +136,13 @@ | |
| 136 | or declare them in nx.json under "projects". |
| 137 | """ |
| 138 | # Try reading nx.json for explicit projects |
| 139 | nx_json = root / "nx.json" |
| 140 | try: |
| 141 | json.loads(nx_json.read_text(encoding="utf-8")) |
| 142 | except (OSError, json.JSONDecodeError): |
| 143 | pass |
| 144 | |
| 145 | # Nx 16+ uses workspaceLayout or projects in project.json files |
| 146 | packages: list[Path] = [] |
| 147 | for subdir in ("apps", "libs", "packages"): |
| 148 | base = root / subdir |
| @@ -447,13 +447,11 @@ | |
| 447 | to_label=NodeLabel.Repository, |
| 448 | to_key={"name": target}, |
| 449 | ) |
| 450 | edges_created += 1 |
| 451 | except Exception: |
| 452 | logger.debug("Could not create DEPENDS_ON edge %s → %s", pkg_name, target) |
| 453 | |
| 454 | return edges_created |
| 455 | |
| 456 | def _read_package_deps(self, workspace_type: str, pkg_path: Path) -> list[str]: |
| 457 | """Return a flat list of declared dependency names for a package.""" |
| 458 |
+6
-18
| --- navegador/multirepo.py | ||
| +++ navegador/multirepo.py | ||
| @@ -27,17 +27,16 @@ | ||
| 27 | 27 | results = ws_fed.search("authenticate") |
| 28 | 28 | """ |
| 29 | 29 | |
| 30 | 30 | from __future__ import annotations |
| 31 | 31 | |
| 32 | -import json | |
| 33 | 32 | import logging |
| 34 | 33 | from enum import Enum |
| 35 | 34 | from pathlib import Path |
| 36 | 35 | from typing import Any |
| 37 | 36 | |
| 38 | -from navegador.graph.schema import EdgeType, NodeLabel | |
| 37 | +from navegador.graph.schema import NodeLabel | |
| 39 | 38 | from navegador.graph.store import GraphStore |
| 40 | 39 | |
| 41 | 40 | logger = logging.getLogger(__name__) |
| 42 | 41 | |
| 43 | 42 | # Key used to store repo registry as a special node in the graph |
| @@ -103,13 +102,11 @@ | ||
| 103 | 102 | "description": f"workspace:{self.mode.value}", |
| 104 | 103 | "language": "", |
| 105 | 104 | "file_path": resolved, |
| 106 | 105 | }, |
| 107 | 106 | ) |
| 108 | - logger.info( | |
| 109 | - "WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved | |
| 110 | - ) | |
| 107 | + logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved) | |
| 111 | 108 | |
| 112 | 109 | def list_repos(self) -> list[dict[str, str]]: |
| 113 | 110 | """Return all registered repositories.""" |
| 114 | 111 | return [ |
| 115 | 112 | {"name": name, "path": info["path"], "graph_name": info["graph_name"]} |
| @@ -153,13 +150,11 @@ | ||
| 153 | 150 | try: |
| 154 | 151 | ingester = RepoIngester(target_store) |
| 155 | 152 | stats = ingester.ingest(path, clear=False) |
| 156 | 153 | summary[name] = stats |
| 157 | 154 | except Exception as exc: # noqa: BLE001 |
| 158 | - logger.error( | |
| 159 | - "WorkspaceManager: failed to ingest %s: %s", name, exc | |
| 160 | - ) | |
| 155 | + logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc) | |
| 161 | 156 | summary[name] = {"error": str(exc)} |
| 162 | 157 | |
| 163 | 158 | return summary |
| 164 | 159 | |
| 165 | 160 | # ── Search ──────────────────────────────────────────────────────────────── |
| @@ -191,13 +186,11 @@ | ||
| 191 | 186 | if key not in seen: |
| 192 | 187 | seen.add(key) |
| 193 | 188 | r["repo"] = name |
| 194 | 189 | all_results.append(r) |
| 195 | 190 | except Exception: |
| 196 | - logger.debug( | |
| 197 | - "WorkspaceManager: search failed for repo %s", name, exc_info=True | |
| 198 | - ) | |
| 191 | + logger.debug("WorkspaceManager: search failed for repo %s", name, exc_info=True) | |
| 199 | 192 | |
| 200 | 193 | return all_results[:limit] |
| 201 | 194 | |
| 202 | 195 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 203 | 196 | |
| @@ -263,13 +256,11 @@ | ||
| 263 | 256 | |
| 264 | 257 | # ── Query ───────────────────────────────────────────────────────────────── |
| 265 | 258 | |
| 266 | 259 | def list_repos(self) -> list[dict[str, Any]]: |
| 267 | 260 | """Return all registered repositories.""" |
| 268 | - result = self.store.query( | |
| 269 | - "MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name" | |
| 270 | - ) | |
| 261 | + result = self.store.query("MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name") | |
| 271 | 262 | rows = result.result_set or [] |
| 272 | 263 | return [{"name": row[0], "path": row[1]} for row in rows] |
| 273 | 264 | |
| 274 | 265 | # ── Ingestion ───────────────────────────────────────────────────────────── |
| 275 | 266 | |
| @@ -320,9 +311,6 @@ | ||
| 320 | 311 | " coalesce(n.file_path, n.path, '') AS file_path " |
| 321 | 312 | f"LIMIT {int(limit)}" |
| 322 | 313 | ) |
| 323 | 314 | result = self.store.query(cypher, {"q": query}) |
| 324 | 315 | rows = result.result_set or [] |
| 325 | - return [ | |
| 326 | - {"label": row[0], "name": row[1], "file_path": row[2]} | |
| 327 | - for row in rows | |
| 328 | - ] | |
| 316 | + return [{"label": row[0], "name": row[1], "file_path": row[2]} for row in rows] | |
| 329 | 317 |
| --- navegador/multirepo.py | |
| +++ navegador/multirepo.py | |
| @@ -27,17 +27,16 @@ | |
| 27 | results = ws_fed.search("authenticate") |
| 28 | """ |
| 29 | |
| 30 | from __future__ import annotations |
| 31 | |
| 32 | import json |
| 33 | import logging |
| 34 | from enum import Enum |
| 35 | from pathlib import Path |
| 36 | from typing import Any |
| 37 | |
| 38 | from navegador.graph.schema import EdgeType, NodeLabel |
| 39 | from navegador.graph.store import GraphStore |
| 40 | |
| 41 | logger = logging.getLogger(__name__) |
| 42 | |
| 43 | # Key used to store repo registry as a special node in the graph |
| @@ -103,13 +102,11 @@ | |
| 103 | "description": f"workspace:{self.mode.value}", |
| 104 | "language": "", |
| 105 | "file_path": resolved, |
| 106 | }, |
| 107 | ) |
| 108 | logger.info( |
| 109 | "WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved |
| 110 | ) |
| 111 | |
| 112 | def list_repos(self) -> list[dict[str, str]]: |
| 113 | """Return all registered repositories.""" |
| 114 | return [ |
| 115 | {"name": name, "path": info["path"], "graph_name": info["graph_name"]} |
| @@ -153,13 +150,11 @@ | |
| 153 | try: |
| 154 | ingester = RepoIngester(target_store) |
| 155 | stats = ingester.ingest(path, clear=False) |
| 156 | summary[name] = stats |
| 157 | except Exception as exc: # noqa: BLE001 |
| 158 | logger.error( |
| 159 | "WorkspaceManager: failed to ingest %s: %s", name, exc |
| 160 | ) |
| 161 | summary[name] = {"error": str(exc)} |
| 162 | |
| 163 | return summary |
| 164 | |
| 165 | # ── Search ──────────────────────────────────────────────────────────────── |
| @@ -191,13 +186,11 @@ | |
| 191 | if key not in seen: |
| 192 | seen.add(key) |
| 193 | r["repo"] = name |
| 194 | all_results.append(r) |
| 195 | except Exception: |
| 196 | logger.debug( |
| 197 | "WorkspaceManager: search failed for repo %s", name, exc_info=True |
| 198 | ) |
| 199 | |
| 200 | return all_results[:limit] |
| 201 | |
| 202 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 203 | |
| @@ -263,13 +256,11 @@ | |
| 263 | |
| 264 | # ── Query ───────────────────────────────────────────────────────────────── |
| 265 | |
| 266 | def list_repos(self) -> list[dict[str, Any]]: |
| 267 | """Return all registered repositories.""" |
| 268 | result = self.store.query( |
| 269 | "MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name" |
| 270 | ) |
| 271 | rows = result.result_set or [] |
| 272 | return [{"name": row[0], "path": row[1]} for row in rows] |
| 273 | |
| 274 | # ── Ingestion ───────────────────────────────────────────────────────────── |
| 275 | |
| @@ -320,9 +311,6 @@ | |
| 320 | " coalesce(n.file_path, n.path, '') AS file_path " |
| 321 | f"LIMIT {int(limit)}" |
| 322 | ) |
| 323 | result = self.store.query(cypher, {"q": query}) |
| 324 | rows = result.result_set or [] |
| 325 | return [ |
| 326 | {"label": row[0], "name": row[1], "file_path": row[2]} |
| 327 | for row in rows |
| 328 | ] |
| 329 |
| --- navegador/multirepo.py | |
| +++ navegador/multirepo.py | |
| @@ -27,17 +27,16 @@ | |
| 27 | results = ws_fed.search("authenticate") |
| 28 | """ |
| 29 | |
| 30 | from __future__ import annotations |
| 31 | |
| 32 | import logging |
| 33 | from enum import Enum |
| 34 | from pathlib import Path |
| 35 | from typing import Any |
| 36 | |
| 37 | from navegador.graph.schema import NodeLabel |
| 38 | from navegador.graph.store import GraphStore |
| 39 | |
| 40 | logger = logging.getLogger(__name__) |
| 41 | |
| 42 | # Key used to store repo registry as a special node in the graph |
| @@ -103,13 +102,11 @@ | |
| 102 | "description": f"workspace:{self.mode.value}", |
| 103 | "language": "", |
| 104 | "file_path": resolved, |
| 105 | }, |
| 106 | ) |
| 107 | logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved) |
| 108 | |
| 109 | def list_repos(self) -> list[dict[str, str]]: |
| 110 | """Return all registered repositories.""" |
| 111 | return [ |
| 112 | {"name": name, "path": info["path"], "graph_name": info["graph_name"]} |
| @@ -153,13 +150,11 @@ | |
| 150 | try: |
| 151 | ingester = RepoIngester(target_store) |
| 152 | stats = ingester.ingest(path, clear=False) |
| 153 | summary[name] = stats |
| 154 | except Exception as exc: # noqa: BLE001 |
| 155 | logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc) |
| 156 | summary[name] = {"error": str(exc)} |
| 157 | |
| 158 | return summary |
| 159 | |
| 160 | # ── Search ──────────────────────────────────────────────────────────────── |
| @@ -191,13 +186,11 @@ | |
| 186 | if key not in seen: |
| 187 | seen.add(key) |
| 188 | r["repo"] = name |
| 189 | all_results.append(r) |
| 190 | except Exception: |
| 191 | logger.debug("WorkspaceManager: search failed for repo %s", name, exc_info=True) |
| 192 | |
| 193 | return all_results[:limit] |
| 194 | |
| 195 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 196 | |
| @@ -263,13 +256,11 @@ | |
| 256 | |
| 257 | # ── Query ───────────────────────────────────────────────────────────────── |
| 258 | |
| 259 | def list_repos(self) -> list[dict[str, Any]]: |
| 260 | """Return all registered repositories.""" |
| 261 | result = self.store.query("MATCH (r:Repository) RETURN r.name, r.path ORDER BY r.name") |
| 262 | rows = result.result_set or [] |
| 263 | return [{"name": row[0], "path": row[1]} for row in rows] |
| 264 | |
| 265 | # ── Ingestion ───────────────────────────────────────────────────────────── |
| 266 | |
| @@ -320,9 +311,6 @@ | |
| 311 | " coalesce(n.file_path, n.path, '') AS file_path " |
| 312 | f"LIMIT {int(limit)}" |
| 313 | ) |
| 314 | result = self.store.query(cypher, {"q": query}) |
| 315 | rows = result.result_set or [] |
| 316 | return [{"label": row[0], "name": row[1], "file_path": row[2]} for row in rows] |
| 317 |
+21
-36
| --- navegador/planopticon_pipeline.py | ||
| +++ navegador/planopticon_pipeline.py | ||
| @@ -18,15 +18,14 @@ | ||
| 18 | 18 | """ |
| 19 | 19 | |
| 20 | 20 | from __future__ import annotations |
| 21 | 21 | |
| 22 | 22 | import logging |
| 23 | -from dataclasses import dataclass, field | |
| 23 | +from dataclasses import dataclass | |
| 24 | 24 | from pathlib import Path |
| 25 | 25 | from typing import Any |
| 26 | 26 | |
| 27 | -from navegador.graph.schema import EdgeType, NodeLabel | |
| 28 | 27 | from navegador.graph.store import GraphStore |
| 29 | 28 | |
| 30 | 29 | logger = logging.getLogger(__name__) |
| 31 | 30 | |
| 32 | 31 | |
| @@ -135,13 +134,13 @@ | ||
| 135 | 134 | knowledge_graph.json, or any combination that may contain an |
| 136 | 135 | ``action_items`` list or ``entities``/``nodes`` with task types. |
| 137 | 136 | """ |
| 138 | 137 | items: list[ActionItem] = [] |
| 139 | 138 | |
| 140 | - source = kg_data.get("video", {}).get("title", "") or kg_data.get( | |
| 141 | - "project", {} | |
| 142 | - ).get("name", "") | |
| 139 | + source = kg_data.get("video", {}).get("title", "") or kg_data.get("project", {}).get( | |
| 140 | + "name", "" | |
| 141 | + ) | |
| 143 | 142 | |
| 144 | 143 | # Explicit action_items list (manifest format) |
| 145 | 144 | for raw in kg_data.get("action_items", []): |
| 146 | 145 | action = (raw.get("action") or "").strip() |
| 147 | 146 | if not action: |
| @@ -201,18 +200,20 @@ | ||
| 201 | 200 | logger.warning("build_decision_timeline: query failed", exc_info=True) |
| 202 | 201 | return [] |
| 203 | 202 | |
| 204 | 203 | timeline = [] |
| 205 | 204 | for row in rows: |
| 206 | - timeline.append({ | |
| 207 | - "name": row[0] or "", | |
| 208 | - "description": row[1] or "", | |
| 209 | - "domain": row[2] or "", | |
| 210 | - "status": row[3] or "", | |
| 211 | - "rationale": row[4] or "", | |
| 212 | - "date": row[5] or "", | |
| 213 | - }) | |
| 205 | + timeline.append( | |
| 206 | + { | |
| 207 | + "name": row[0] or "", | |
| 208 | + "description": row[1] or "", | |
| 209 | + "domain": row[2] or "", | |
| 210 | + "status": row[3] or "", | |
| 211 | + "rationale": row[4] or "", | |
| 212 | + "date": row[5] or "", | |
| 213 | + } | |
| 214 | + ) | |
| 214 | 215 | return timeline |
| 215 | 216 | |
| 216 | 217 | # ── Auto-link to code ───────────────────────────────────────────────────── |
| 217 | 218 | |
| 218 | 219 | @staticmethod |
| @@ -230,18 +231,14 @@ | ||
| 230 | 231 | ------- |
| 231 | 232 | int — number of new ANNOTATES edges created |
| 232 | 233 | """ |
| 233 | 234 | # Fetch all knowledge nodes |
| 234 | 235 | knowledge_cypher = ( |
| 235 | - "MATCH (k) " | |
| 236 | - "WHERE k:Concept OR k:Decision OR k:Rule " | |
| 237 | - "RETURN labels(k)[0], k.name" | |
| 236 | + "MATCH (k) WHERE k:Concept OR k:Decision OR k:Rule RETURN labels(k)[0], k.name" | |
| 238 | 237 | ) |
| 239 | 238 | code_cypher = ( |
| 240 | - "MATCH (c) " | |
| 241 | - "WHERE c:Function OR c:Class OR c:Method " | |
| 242 | - "RETURN labels(c)[0], c.name" | |
| 239 | + "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name" | |
| 243 | 240 | ) |
| 244 | 241 | |
| 245 | 242 | try: |
| 246 | 243 | k_result = store.query(knowledge_cypher) |
| 247 | 244 | c_result = store.query(code_cypher) |
| @@ -248,53 +245,41 @@ | ||
| 248 | 245 | except Exception: |
| 249 | 246 | logger.warning("auto_link_to_code: initial queries failed", exc_info=True) |
| 250 | 247 | return 0 |
| 251 | 248 | |
| 252 | 249 | knowledge_nodes: list[tuple[str, str]] = [ |
| 253 | - (str(row[0]), str(row[1])) | |
| 254 | - for row in (k_result.result_set or []) | |
| 255 | - if row[0] and row[1] | |
| 250 | + (str(row[0]), str(row[1])) for row in (k_result.result_set or []) if row[0] and row[1] | |
| 256 | 251 | ] |
| 257 | 252 | code_nodes: list[tuple[str, str]] = [ |
| 258 | - (str(row[0]), str(row[1])) | |
| 259 | - for row in (c_result.result_set or []) | |
| 260 | - if row[0] and row[1] | |
| 253 | + (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1] | |
| 261 | 254 | ] |
| 262 | 255 | |
| 263 | 256 | if not knowledge_nodes or not code_nodes: |
| 264 | 257 | return 0 |
| 265 | 258 | |
| 266 | 259 | linked = 0 |
| 267 | 260 | for k_label, k_name in knowledge_nodes: |
| 268 | 261 | # Extract significant tokens (length >= 4) from the knowledge name |
| 269 | 262 | tokens = [ |
| 270 | - w.lower() | |
| 271 | - for w in k_name.replace("_", " ").replace("-", " ").split() | |
| 272 | - if len(w) >= 4 | |
| 263 | + w.lower() for w in k_name.replace("_", " ").replace("-", " ").split() if len(w) >= 4 | |
| 273 | 264 | ] |
| 274 | 265 | if not tokens: |
| 275 | 266 | continue |
| 276 | 267 | |
| 277 | 268 | for c_label, c_name in code_nodes: |
| 278 | 269 | c_lower = c_name.lower() |
| 279 | 270 | if any(tok in c_lower for tok in tokens): |
| 280 | 271 | # Create ANNOTATES edge from knowledge node to code node |
| 281 | 272 | cypher = ( |
| 282 | - "MATCH (k:" | |
| 283 | - + k_label | |
| 284 | - + " {name: $kn}), (c:" | |
| 285 | - + c_label | |
| 286 | - + " {name: $cn}) " | |
| 273 | + "MATCH (k:" + k_label + " {name: $kn}), (c:" + c_label + " {name: $cn}) " | |
| 287 | 274 | "MERGE (k)-[r:ANNOTATES]->(c)" |
| 288 | 275 | ) |
| 289 | 276 | try: |
| 290 | 277 | store.query(cypher, {"kn": k_name, "cn": c_name}) |
| 291 | 278 | linked += 1 |
| 292 | 279 | except Exception: |
| 293 | - logger.debug( | |
| 294 | - "auto_link_to_code: could not link %s → %s", k_name, c_name | |
| 295 | - ) | |
| 280 | + logger.debug("auto_link_to_code: could not link %s → %s", k_name, c_name) | |
| 296 | 281 | |
| 297 | 282 | return linked |
| 298 | 283 | |
| 299 | 284 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 300 | 285 | |
| 301 | 286 |
| --- navegador/planopticon_pipeline.py | |
| +++ navegador/planopticon_pipeline.py | |
| @@ -18,15 +18,14 @@ | |
| 18 | """ |
| 19 | |
| 20 | from __future__ import annotations |
| 21 | |
| 22 | import logging |
| 23 | from dataclasses import dataclass, field |
| 24 | from pathlib import Path |
| 25 | from typing import Any |
| 26 | |
| 27 | from navegador.graph.schema import EdgeType, NodeLabel |
| 28 | from navegador.graph.store import GraphStore |
| 29 | |
| 30 | logger = logging.getLogger(__name__) |
| 31 | |
| 32 | |
| @@ -135,13 +134,13 @@ | |
| 135 | knowledge_graph.json, or any combination that may contain an |
| 136 | ``action_items`` list or ``entities``/``nodes`` with task types. |
| 137 | """ |
| 138 | items: list[ActionItem] = [] |
| 139 | |
| 140 | source = kg_data.get("video", {}).get("title", "") or kg_data.get( |
| 141 | "project", {} |
| 142 | ).get("name", "") |
| 143 | |
| 144 | # Explicit action_items list (manifest format) |
| 145 | for raw in kg_data.get("action_items", []): |
| 146 | action = (raw.get("action") or "").strip() |
| 147 | if not action: |
| @@ -201,18 +200,20 @@ | |
| 201 | logger.warning("build_decision_timeline: query failed", exc_info=True) |
| 202 | return [] |
| 203 | |
| 204 | timeline = [] |
| 205 | for row in rows: |
| 206 | timeline.append({ |
| 207 | "name": row[0] or "", |
| 208 | "description": row[1] or "", |
| 209 | "domain": row[2] or "", |
| 210 | "status": row[3] or "", |
| 211 | "rationale": row[4] or "", |
| 212 | "date": row[5] or "", |
| 213 | }) |
| 214 | return timeline |
| 215 | |
| 216 | # ── Auto-link to code ───────────────────────────────────────────────────── |
| 217 | |
| 218 | @staticmethod |
| @@ -230,18 +231,14 @@ | |
| 230 | ------- |
| 231 | int — number of new ANNOTATES edges created |
| 232 | """ |
| 233 | # Fetch all knowledge nodes |
| 234 | knowledge_cypher = ( |
| 235 | "MATCH (k) " |
| 236 | "WHERE k:Concept OR k:Decision OR k:Rule " |
| 237 | "RETURN labels(k)[0], k.name" |
| 238 | ) |
| 239 | code_cypher = ( |
| 240 | "MATCH (c) " |
| 241 | "WHERE c:Function OR c:Class OR c:Method " |
| 242 | "RETURN labels(c)[0], c.name" |
| 243 | ) |
| 244 | |
| 245 | try: |
| 246 | k_result = store.query(knowledge_cypher) |
| 247 | c_result = store.query(code_cypher) |
| @@ -248,53 +245,41 @@ | |
| 248 | except Exception: |
| 249 | logger.warning("auto_link_to_code: initial queries failed", exc_info=True) |
| 250 | return 0 |
| 251 | |
| 252 | knowledge_nodes: list[tuple[str, str]] = [ |
| 253 | (str(row[0]), str(row[1])) |
| 254 | for row in (k_result.result_set or []) |
| 255 | if row[0] and row[1] |
| 256 | ] |
| 257 | code_nodes: list[tuple[str, str]] = [ |
| 258 | (str(row[0]), str(row[1])) |
| 259 | for row in (c_result.result_set or []) |
| 260 | if row[0] and row[1] |
| 261 | ] |
| 262 | |
| 263 | if not knowledge_nodes or not code_nodes: |
| 264 | return 0 |
| 265 | |
| 266 | linked = 0 |
| 267 | for k_label, k_name in knowledge_nodes: |
| 268 | # Extract significant tokens (length >= 4) from the knowledge name |
| 269 | tokens = [ |
| 270 | w.lower() |
| 271 | for w in k_name.replace("_", " ").replace("-", " ").split() |
| 272 | if len(w) >= 4 |
| 273 | ] |
| 274 | if not tokens: |
| 275 | continue |
| 276 | |
| 277 | for c_label, c_name in code_nodes: |
| 278 | c_lower = c_name.lower() |
| 279 | if any(tok in c_lower for tok in tokens): |
| 280 | # Create ANNOTATES edge from knowledge node to code node |
| 281 | cypher = ( |
| 282 | "MATCH (k:" |
| 283 | + k_label |
| 284 | + " {name: $kn}), (c:" |
| 285 | + c_label |
| 286 | + " {name: $cn}) " |
| 287 | "MERGE (k)-[r:ANNOTATES]->(c)" |
| 288 | ) |
| 289 | try: |
| 290 | store.query(cypher, {"kn": k_name, "cn": c_name}) |
| 291 | linked += 1 |
| 292 | except Exception: |
| 293 | logger.debug( |
| 294 | "auto_link_to_code: could not link %s → %s", k_name, c_name |
| 295 | ) |
| 296 | |
| 297 | return linked |
| 298 | |
| 299 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 300 | |
| 301 |
| --- navegador/planopticon_pipeline.py | |
| +++ navegador/planopticon_pipeline.py | |
| @@ -18,15 +18,14 @@ | |
| 18 | """ |
| 19 | |
| 20 | from __future__ import annotations |
| 21 | |
| 22 | import logging |
| 23 | from dataclasses import dataclass |
| 24 | from pathlib import Path |
| 25 | from typing import Any |
| 26 | |
| 27 | from navegador.graph.store import GraphStore |
| 28 | |
| 29 | logger = logging.getLogger(__name__) |
| 30 | |
| 31 | |
| @@ -135,13 +134,13 @@ | |
| 134 | knowledge_graph.json, or any combination that may contain an |
| 135 | ``action_items`` list or ``entities``/``nodes`` with task types. |
| 136 | """ |
| 137 | items: list[ActionItem] = [] |
| 138 | |
| 139 | source = kg_data.get("video", {}).get("title", "") or kg_data.get("project", {}).get( |
| 140 | "name", "" |
| 141 | ) |
| 142 | |
| 143 | # Explicit action_items list (manifest format) |
| 144 | for raw in kg_data.get("action_items", []): |
| 145 | action = (raw.get("action") or "").strip() |
| 146 | if not action: |
| @@ -201,18 +200,20 @@ | |
| 200 | logger.warning("build_decision_timeline: query failed", exc_info=True) |
| 201 | return [] |
| 202 | |
| 203 | timeline = [] |
| 204 | for row in rows: |
| 205 | timeline.append( |
| 206 | { |
| 207 | "name": row[0] or "", |
| 208 | "description": row[1] or "", |
| 209 | "domain": row[2] or "", |
| 210 | "status": row[3] or "", |
| 211 | "rationale": row[4] or "", |
| 212 | "date": row[5] or "", |
| 213 | } |
| 214 | ) |
| 215 | return timeline |
| 216 | |
| 217 | # ── Auto-link to code ───────────────────────────────────────────────────── |
| 218 | |
| 219 | @staticmethod |
| @@ -230,18 +231,14 @@ | |
| 231 | ------- |
| 232 | int — number of new ANNOTATES edges created |
| 233 | """ |
| 234 | # Fetch all knowledge nodes |
| 235 | knowledge_cypher = ( |
| 236 | "MATCH (k) WHERE k:Concept OR k:Decision OR k:Rule RETURN labels(k)[0], k.name" |
| 237 | ) |
| 238 | code_cypher = ( |
| 239 | "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name" |
| 240 | ) |
| 241 | |
| 242 | try: |
| 243 | k_result = store.query(knowledge_cypher) |
| 244 | c_result = store.query(code_cypher) |
| @@ -248,53 +245,41 @@ | |
| 245 | except Exception: |
| 246 | logger.warning("auto_link_to_code: initial queries failed", exc_info=True) |
| 247 | return 0 |
| 248 | |
| 249 | knowledge_nodes: list[tuple[str, str]] = [ |
| 250 | (str(row[0]), str(row[1])) for row in (k_result.result_set or []) if row[0] and row[1] |
| 251 | ] |
| 252 | code_nodes: list[tuple[str, str]] = [ |
| 253 | (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1] |
| 254 | ] |
| 255 | |
| 256 | if not knowledge_nodes or not code_nodes: |
| 257 | return 0 |
| 258 | |
| 259 | linked = 0 |
| 260 | for k_label, k_name in knowledge_nodes: |
| 261 | # Extract significant tokens (length >= 4) from the knowledge name |
| 262 | tokens = [ |
| 263 | w.lower() for w in k_name.replace("_", " ").replace("-", " ").split() if len(w) >= 4 |
| 264 | ] |
| 265 | if not tokens: |
| 266 | continue |
| 267 | |
| 268 | for c_label, c_name in code_nodes: |
| 269 | c_lower = c_name.lower() |
| 270 | if any(tok in c_lower for tok in tokens): |
| 271 | # Create ANNOTATES edge from knowledge node to code node |
| 272 | cypher = ( |
| 273 | "MATCH (k:" + k_label + " {name: $kn}), (c:" + c_label + " {name: $cn}) " |
| 274 | "MERGE (k)-[r:ANNOTATES]->(c)" |
| 275 | ) |
| 276 | try: |
| 277 | store.query(cypher, {"kn": k_name, "cn": c_name}) |
| 278 | linked += 1 |
| 279 | except Exception: |
| 280 | logger.debug("auto_link_to_code: could not link %s → %s", k_name, c_name) |
| 281 | |
| 282 | return linked |
| 283 | |
| 284 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 285 | |
| 286 |
+8
-27
| --- navegador/pm.py | ||
| +++ navegador/pm.py | ||
| @@ -101,14 +101,11 @@ | ||
| 101 | 101 | } |
| 102 | 102 | if token: |
| 103 | 103 | headers["Authorization"] = f"Bearer {token}" |
| 104 | 104 | |
| 105 | 105 | per_page = min(limit, 100) |
| 106 | - url = ( | |
| 107 | - f"https://api.github.com/repos/{repo}/issues" | |
| 108 | - f"?state={state}&per_page={per_page}&page=1" | |
| 109 | - ) | |
| 106 | + url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1" | |
| 110 | 107 | |
| 111 | 108 | try: |
| 112 | 109 | req = urllib.request.Request(url, headers=headers) |
| 113 | 110 | with urllib.request.urlopen(req, timeout=15) as resp: |
| 114 | 111 | import json |
| @@ -226,17 +223,13 @@ | ||
| 226 | 223 | |
| 227 | 224 | Returns |
| 228 | 225 | ------- |
| 229 | 226 | int — number of edges created |
| 230 | 227 | """ |
| 231 | - ticket_cypher = ( | |
| 232 | - "MATCH (t:Rule) WHERE t.domain = $domain " | |
| 233 | - "RETURN t.name, t.description" | |
| 234 | - ) | |
| 228 | + ticket_cypher = "MATCH (t:Rule) WHERE t.domain = $domain RETURN t.name, t.description" | |
| 235 | 229 | code_cypher = ( |
| 236 | - "MATCH (c) WHERE c:Function OR c:Class OR c:Method " | |
| 237 | - "RETURN labels(c)[0], c.name" | |
| 230 | + "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name" | |
| 238 | 231 | ) |
| 239 | 232 | |
| 240 | 233 | try: |
| 241 | 234 | t_result = self.store.query(ticket_cypher, {"domain": domain}) |
| 242 | 235 | c_result = self.store.query(code_cypher) |
| @@ -243,49 +236,37 @@ | ||
| 243 | 236 | except Exception: |
| 244 | 237 | logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True) |
| 245 | 238 | return 0 |
| 246 | 239 | |
| 247 | 240 | tickets = [ |
| 248 | - (str(row[0]), str(row[1] or "")) | |
| 249 | - for row in (t_result.result_set or []) | |
| 250 | - if row[0] | |
| 241 | + (str(row[0]), str(row[1] or "")) for row in (t_result.result_set or []) if row[0] | |
| 251 | 242 | ] |
| 252 | 243 | code_nodes = [ |
| 253 | - (str(row[0]), str(row[1])) | |
| 254 | - for row in (c_result.result_set or []) | |
| 255 | - if row[0] and row[1] | |
| 244 | + (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1] | |
| 256 | 245 | ] |
| 257 | 246 | |
| 258 | 247 | if not tickets or not code_nodes: |
| 259 | 248 | return 0 |
| 260 | 249 | |
| 261 | 250 | linked = 0 |
| 262 | 251 | for t_name, t_desc in tickets: |
| 263 | 252 | combined = f"{t_name} {t_desc}" |
| 264 | - tokens = { | |
| 265 | - w.lower() | |
| 266 | - for w in re.split(r"[\s\W]+", combined) | |
| 267 | - if len(w) >= 4 | |
| 268 | - } | |
| 253 | + tokens = {w.lower() for w in re.split(r"[\s\W]+", combined) if len(w) >= 4} | |
| 269 | 254 | if not tokens: |
| 270 | 255 | continue |
| 271 | 256 | |
| 272 | 257 | for c_label, c_name in code_nodes: |
| 273 | 258 | if any(tok in c_name.lower() for tok in tokens): |
| 274 | 259 | cypher = ( |
| 275 | - "MATCH (t:Rule {name: $tn}), (c:" | |
| 276 | - + c_label | |
| 277 | - + " {name: $cn}) " | |
| 260 | + "MATCH (t:Rule {name: $tn}), (c:" + c_label + " {name: $cn}) " | |
| 278 | 261 | "MERGE (t)-[r:ANNOTATES]->(c)" |
| 279 | 262 | ) |
| 280 | 263 | try: |
| 281 | 264 | self.store.query(cypher, {"tn": t_name, "cn": c_name}) |
| 282 | 265 | linked += 1 |
| 283 | 266 | except Exception: |
| 284 | - logger.debug( | |
| 285 | - "TicketIngester: could not link %s → %s", t_name, c_name | |
| 286 | - ) | |
| 267 | + logger.debug("TicketIngester: could not link %s → %s", t_name, c_name) | |
| 287 | 268 | return linked |
| 288 | 269 | |
| 289 | 270 | @staticmethod |
| 290 | 271 | def _github_severity(labels: list[str]) -> str: |
| 291 | 272 | """Map GitHub label names to navegador severity levels.""" |
| 292 | 273 |
| --- navegador/pm.py | |
| +++ navegador/pm.py | |
| @@ -101,14 +101,11 @@ | |
| 101 | } |
| 102 | if token: |
| 103 | headers["Authorization"] = f"Bearer {token}" |
| 104 | |
| 105 | per_page = min(limit, 100) |
| 106 | url = ( |
| 107 | f"https://api.github.com/repos/{repo}/issues" |
| 108 | f"?state={state}&per_page={per_page}&page=1" |
| 109 | ) |
| 110 | |
| 111 | try: |
| 112 | req = urllib.request.Request(url, headers=headers) |
| 113 | with urllib.request.urlopen(req, timeout=15) as resp: |
| 114 | import json |
| @@ -226,17 +223,13 @@ | |
| 226 | |
| 227 | Returns |
| 228 | ------- |
| 229 | int — number of edges created |
| 230 | """ |
| 231 | ticket_cypher = ( |
| 232 | "MATCH (t:Rule) WHERE t.domain = $domain " |
| 233 | "RETURN t.name, t.description" |
| 234 | ) |
| 235 | code_cypher = ( |
| 236 | "MATCH (c) WHERE c:Function OR c:Class OR c:Method " |
| 237 | "RETURN labels(c)[0], c.name" |
| 238 | ) |
| 239 | |
| 240 | try: |
| 241 | t_result = self.store.query(ticket_cypher, {"domain": domain}) |
| 242 | c_result = self.store.query(code_cypher) |
| @@ -243,49 +236,37 @@ | |
| 243 | except Exception: |
| 244 | logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True) |
| 245 | return 0 |
| 246 | |
| 247 | tickets = [ |
| 248 | (str(row[0]), str(row[1] or "")) |
| 249 | for row in (t_result.result_set or []) |
| 250 | if row[0] |
| 251 | ] |
| 252 | code_nodes = [ |
| 253 | (str(row[0]), str(row[1])) |
| 254 | for row in (c_result.result_set or []) |
| 255 | if row[0] and row[1] |
| 256 | ] |
| 257 | |
| 258 | if not tickets or not code_nodes: |
| 259 | return 0 |
| 260 | |
| 261 | linked = 0 |
| 262 | for t_name, t_desc in tickets: |
| 263 | combined = f"{t_name} {t_desc}" |
| 264 | tokens = { |
| 265 | w.lower() |
| 266 | for w in re.split(r"[\s\W]+", combined) |
| 267 | if len(w) >= 4 |
| 268 | } |
| 269 | if not tokens: |
| 270 | continue |
| 271 | |
| 272 | for c_label, c_name in code_nodes: |
| 273 | if any(tok in c_name.lower() for tok in tokens): |
| 274 | cypher = ( |
| 275 | "MATCH (t:Rule {name: $tn}), (c:" |
| 276 | + c_label |
| 277 | + " {name: $cn}) " |
| 278 | "MERGE (t)-[r:ANNOTATES]->(c)" |
| 279 | ) |
| 280 | try: |
| 281 | self.store.query(cypher, {"tn": t_name, "cn": c_name}) |
| 282 | linked += 1 |
| 283 | except Exception: |
| 284 | logger.debug( |
| 285 | "TicketIngester: could not link %s → %s", t_name, c_name |
| 286 | ) |
| 287 | return linked |
| 288 | |
| 289 | @staticmethod |
| 290 | def _github_severity(labels: list[str]) -> str: |
| 291 | """Map GitHub label names to navegador severity levels.""" |
| 292 |
| --- navegador/pm.py | |
| +++ navegador/pm.py | |
| @@ -101,14 +101,11 @@ | |
| 101 | } |
| 102 | if token: |
| 103 | headers["Authorization"] = f"Bearer {token}" |
| 104 | |
| 105 | per_page = min(limit, 100) |
| 106 | url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1" |
| 107 | |
| 108 | try: |
| 109 | req = urllib.request.Request(url, headers=headers) |
| 110 | with urllib.request.urlopen(req, timeout=15) as resp: |
| 111 | import json |
| @@ -226,17 +223,13 @@ | |
| 223 | |
| 224 | Returns |
| 225 | ------- |
| 226 | int — number of edges created |
| 227 | """ |
| 228 | ticket_cypher = "MATCH (t:Rule) WHERE t.domain = $domain RETURN t.name, t.description" |
| 229 | code_cypher = ( |
| 230 | "MATCH (c) WHERE c:Function OR c:Class OR c:Method RETURN labels(c)[0], c.name" |
| 231 | ) |
| 232 | |
| 233 | try: |
| 234 | t_result = self.store.query(ticket_cypher, {"domain": domain}) |
| 235 | c_result = self.store.query(code_cypher) |
| @@ -243,49 +236,37 @@ | |
| 236 | except Exception: |
| 237 | logger.warning("TicketIngester._link_to_code: queries failed", exc_info=True) |
| 238 | return 0 |
| 239 | |
| 240 | tickets = [ |
| 241 | (str(row[0]), str(row[1] or "")) for row in (t_result.result_set or []) if row[0] |
| 242 | ] |
| 243 | code_nodes = [ |
| 244 | (str(row[0]), str(row[1])) for row in (c_result.result_set or []) if row[0] and row[1] |
| 245 | ] |
| 246 | |
| 247 | if not tickets or not code_nodes: |
| 248 | return 0 |
| 249 | |
| 250 | linked = 0 |
| 251 | for t_name, t_desc in tickets: |
| 252 | combined = f"{t_name} {t_desc}" |
| 253 | tokens = {w.lower() for w in re.split(r"[\s\W]+", combined) if len(w) >= 4} |
| 254 | if not tokens: |
| 255 | continue |
| 256 | |
| 257 | for c_label, c_name in code_nodes: |
| 258 | if any(tok in c_name.lower() for tok in tokens): |
| 259 | cypher = ( |
| 260 | "MATCH (t:Rule {name: $tn}), (c:" + c_label + " {name: $cn}) " |
| 261 | "MERGE (t)-[r:ANNOTATES]->(c)" |
| 262 | ) |
| 263 | try: |
| 264 | self.store.query(cypher, {"tn": t_name, "cn": c_name}) |
| 265 | linked += 1 |
| 266 | except Exception: |
| 267 | logger.debug("TicketIngester: could not link %s → %s", t_name, c_name) |
| 268 | return linked |
| 269 | |
| 270 | @staticmethod |
| 271 | def _github_severity(labels: list[str]) -> str: |
| 272 | """Map GitHub label names to navegador severity levels.""" |
| 273 |
+2
-6
| --- navegador/refactor.py | ||
| +++ navegador/refactor.py | ||
| @@ -61,13 +61,11 @@ | ||
| 61 | 61 | def __init__(self, store: GraphStore) -> None: |
| 62 | 62 | self.store = store |
| 63 | 63 | |
| 64 | 64 | # ── Public API ──────────────────────────────────────────────────────────── |
| 65 | 65 | |
| 66 | - def find_references( | |
| 67 | - self, name: str, file_path: str = "" | |
| 68 | - ) -> list[dict[str, Any]]: | |
| 66 | + def find_references(self, name: str, file_path: str = "") -> list[dict[str, Any]]: | |
| 69 | 67 | """ |
| 70 | 68 | Return all graph nodes whose name matches *name*. |
| 71 | 69 | |
| 72 | 70 | Optionally filter to a specific file with *file_path*. |
| 73 | 71 | """ |
| @@ -151,13 +149,11 @@ | ||
| 151 | 149 | |
| 152 | 150 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 153 | 151 | |
| 154 | 152 | def _count_edges(self, name: str) -> int: |
| 155 | 153 | """Count edges incident on nodes named *name*.""" |
| 156 | - cypher = ( | |
| 157 | - "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c" | |
| 158 | - ) | |
| 154 | + cypher = "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c" | |
| 159 | 155 | result = self.store.query(cypher, {"name": name}) |
| 160 | 156 | rows = result.result_set or [] |
| 161 | 157 | if rows: |
| 162 | 158 | return rows[0][0] or 0 |
| 163 | 159 | return 0 |
| 164 | 160 |
| --- navegador/refactor.py | |
| +++ navegador/refactor.py | |
| @@ -61,13 +61,11 @@ | |
| 61 | def __init__(self, store: GraphStore) -> None: |
| 62 | self.store = store |
| 63 | |
| 64 | # ── Public API ──────────────────────────────────────────────────────────── |
| 65 | |
| 66 | def find_references( |
| 67 | self, name: str, file_path: str = "" |
| 68 | ) -> list[dict[str, Any]]: |
| 69 | """ |
| 70 | Return all graph nodes whose name matches *name*. |
| 71 | |
| 72 | Optionally filter to a specific file with *file_path*. |
| 73 | """ |
| @@ -151,13 +149,11 @@ | |
| 151 | |
| 152 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 153 | |
| 154 | def _count_edges(self, name: str) -> int: |
| 155 | """Count edges incident on nodes named *name*.""" |
| 156 | cypher = ( |
| 157 | "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c" |
| 158 | ) |
| 159 | result = self.store.query(cypher, {"name": name}) |
| 160 | rows = result.result_set or [] |
| 161 | if rows: |
| 162 | return rows[0][0] or 0 |
| 163 | return 0 |
| 164 |
| --- navegador/refactor.py | |
| +++ navegador/refactor.py | |
| @@ -61,13 +61,11 @@ | |
| 61 | def __init__(self, store: GraphStore) -> None: |
| 62 | self.store = store |
| 63 | |
| 64 | # ── Public API ──────────────────────────────────────────────────────────── |
| 65 | |
| 66 | def find_references(self, name: str, file_path: str = "") -> list[dict[str, Any]]: |
| 67 | """ |
| 68 | Return all graph nodes whose name matches *name*. |
| 69 | |
| 70 | Optionally filter to a specific file with *file_path*. |
| 71 | """ |
| @@ -151,13 +149,11 @@ | |
| 149 | |
| 150 | # ── Helpers ─────────────────────────────────────────────────────────────── |
| 151 | |
| 152 | def _count_edges(self, name: str) -> int: |
| 153 | """Count edges incident on nodes named *name*.""" |
| 154 | cypher = "MATCH (n)-[r]-() WHERE n.name = $name RETURN count(r) AS c" |
| 155 | result = self.store.query(cypher, {"name": name}) |
| 156 | rows = result.result_set or [] |
| 157 | if rows: |
| 158 | return rows[0][0] or 0 |
| 159 | return 0 |
| 160 |
+3
-9
| --- navegador/sdk.py | ||
| +++ navegador/sdk.py | ||
| @@ -94,13 +94,11 @@ | ||
| 94 | 94 | Returns: |
| 95 | 95 | Dict with counts: files, functions, classes, edges, skipped. |
| 96 | 96 | """ |
| 97 | 97 | from navegador.ingestion import RepoIngester |
| 98 | 98 | |
| 99 | - return RepoIngester(self._store).ingest( | |
| 100 | - repo_path, clear=clear, incremental=incremental | |
| 101 | - ) | |
| 99 | + return RepoIngester(self._store).ingest(repo_path, clear=clear, incremental=incremental) | |
| 102 | 100 | |
| 103 | 101 | # ── Context loading ─────────────────────────────────────────────────────── |
| 104 | 102 | |
| 105 | 103 | def file_context(self, file_path: str) -> Any: |
| 106 | 104 | """ |
| @@ -114,13 +112,11 @@ | ||
| 114 | 112 | """ |
| 115 | 113 | from navegador.context.loader import ContextLoader |
| 116 | 114 | |
| 117 | 115 | return ContextLoader(self._store).load_file(file_path) |
| 118 | 116 | |
| 119 | - def function_context( | |
| 120 | - self, name: str, file_path: str = "", depth: int = 2 | |
| 121 | - ) -> Any: | |
| 117 | + def function_context(self, name: str, file_path: str = "", depth: int = 2) -> Any: | |
| 122 | 118 | """ |
| 123 | 119 | Return a ContextBundle for a function — callers, callees, decorators. |
| 124 | 120 | |
| 125 | 121 | Args: |
| 126 | 122 | name: Function name. |
| @@ -130,13 +126,11 @@ | ||
| 130 | 126 | Returns: |
| 131 | 127 | :class:`~navegador.context.loader.ContextBundle` |
| 132 | 128 | """ |
| 133 | 129 | from navegador.context.loader import ContextLoader |
| 134 | 130 | |
| 135 | - return ContextLoader(self._store).load_function( | |
| 136 | - name, file_path=file_path, depth=depth | |
| 137 | - ) | |
| 131 | + return ContextLoader(self._store).load_function(name, file_path=file_path, depth=depth) | |
| 138 | 132 | |
| 139 | 133 | def class_context(self, name: str, file_path: str = "") -> Any: |
| 140 | 134 | """ |
| 141 | 135 | Return a ContextBundle for a class — methods, inheritance, references. |
| 142 | 136 | |
| 143 | 137 |
| --- navegador/sdk.py | |
| +++ navegador/sdk.py | |
| @@ -94,13 +94,11 @@ | |
| 94 | Returns: |
| 95 | Dict with counts: files, functions, classes, edges, skipped. |
| 96 | """ |
| 97 | from navegador.ingestion import RepoIngester |
| 98 | |
| 99 | return RepoIngester(self._store).ingest( |
| 100 | repo_path, clear=clear, incremental=incremental |
| 101 | ) |
| 102 | |
| 103 | # ── Context loading ─────────────────────────────────────────────────────── |
| 104 | |
| 105 | def file_context(self, file_path: str) -> Any: |
| 106 | """ |
| @@ -114,13 +112,11 @@ | |
| 114 | """ |
| 115 | from navegador.context.loader import ContextLoader |
| 116 | |
| 117 | return ContextLoader(self._store).load_file(file_path) |
| 118 | |
| 119 | def function_context( |
| 120 | self, name: str, file_path: str = "", depth: int = 2 |
| 121 | ) -> Any: |
| 122 | """ |
| 123 | Return a ContextBundle for a function — callers, callees, decorators. |
| 124 | |
| 125 | Args: |
| 126 | name: Function name. |
| @@ -130,13 +126,11 @@ | |
| 130 | Returns: |
| 131 | :class:`~navegador.context.loader.ContextBundle` |
| 132 | """ |
| 133 | from navegador.context.loader import ContextLoader |
| 134 | |
| 135 | return ContextLoader(self._store).load_function( |
| 136 | name, file_path=file_path, depth=depth |
| 137 | ) |
| 138 | |
| 139 | def class_context(self, name: str, file_path: str = "") -> Any: |
| 140 | """ |
| 141 | Return a ContextBundle for a class — methods, inheritance, references. |
| 142 | |
| 143 |
| --- navegador/sdk.py | |
| +++ navegador/sdk.py | |
| @@ -94,13 +94,11 @@ | |
| 94 | Returns: |
| 95 | Dict with counts: files, functions, classes, edges, skipped. |
| 96 | """ |
| 97 | from navegador.ingestion import RepoIngester |
| 98 | |
| 99 | return RepoIngester(self._store).ingest(repo_path, clear=clear, incremental=incremental) |
| 100 | |
| 101 | # ── Context loading ─────────────────────────────────────────────────────── |
| 102 | |
| 103 | def file_context(self, file_path: str) -> Any: |
| 104 | """ |
| @@ -114,13 +112,11 @@ | |
| 112 | """ |
| 113 | from navegador.context.loader import ContextLoader |
| 114 | |
| 115 | return ContextLoader(self._store).load_file(file_path) |
| 116 | |
| 117 | def function_context(self, name: str, file_path: str = "", depth: int = 2) -> Any: |
| 118 | """ |
| 119 | Return a ContextBundle for a function — callers, callees, decorators. |
| 120 | |
| 121 | Args: |
| 122 | name: Function name. |
| @@ -130,13 +126,11 @@ | |
| 126 | Returns: |
| 127 | :class:`~navegador.context.loader.ContextBundle` |
| 128 | """ |
| 129 | from navegador.context.loader import ContextLoader |
| 130 | |
| 131 | return ContextLoader(self._store).load_function(name, file_path=file_path, depth=depth) |
| 132 | |
| 133 | def class_context(self, name: str, file_path: str = "") -> Any: |
| 134 | """ |
| 135 | Return a ContextBundle for a class — methods, inheritance, references. |
| 136 | |
| 137 |
+2
-5
| --- navegador/security.py | ||
| +++ navegador/security.py | ||
| @@ -21,11 +21,11 @@ | ||
| 21 | 21 | """A single sensitive-content finding.""" |
| 22 | 22 | |
| 23 | 23 | pattern_name: str |
| 24 | 24 | line_number: int |
| 25 | 25 | match_text: str # the matched text — stored already-redacted |
| 26 | - severity: str # "high" or "medium" | |
| 26 | + severity: str # "high" or "medium" | |
| 27 | 27 | |
| 28 | 28 | |
| 29 | 29 | # --------------------------------------------------------------------------- |
| 30 | 30 | # Pattern registry |
| 31 | 31 | # --------------------------------------------------------------------------- |
| @@ -68,13 +68,11 @@ | ||
| 68 | 68 | "high", |
| 69 | 69 | ), |
| 70 | 70 | # Password in assignment |
| 71 | 71 | ( |
| 72 | 72 | "password_assignment", |
| 73 | - re.compile( | |
| 74 | - r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']' | |
| 75 | - ), | |
| 73 | + re.compile(r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'), | |
| 76 | 74 | "high", |
| 77 | 75 | ), |
| 78 | 76 | # PEM private key header |
| 79 | 77 | ( |
| 80 | 78 | "private_key_pem", |
| @@ -121,11 +119,10 @@ | ||
| 121 | 119 | Returns a list of :class:`SensitiveMatch` objects, one per finding. |
| 122 | 120 | The ``match_text`` field contains the matched string already rendered as |
| 123 | 121 | ``[REDACTED]`` so callers never need to touch the raw secret. |
| 124 | 122 | """ |
| 125 | 123 | findings: list[SensitiveMatch] = [] |
| 126 | - lines = text.splitlines() | |
| 127 | 124 | |
| 128 | 125 | for pattern_name, regex, severity in _PATTERNS: |
| 129 | 126 | for m in regex.finditer(text): |
| 130 | 127 | # Determine line number (1-based) by counting newlines before match start |
| 131 | 128 | line_number = text.count("\n", 0, m.start()) + 1 |
| 132 | 129 |
| --- navegador/security.py | |
| +++ navegador/security.py | |
| @@ -21,11 +21,11 @@ | |
| 21 | """A single sensitive-content finding.""" |
| 22 | |
| 23 | pattern_name: str |
| 24 | line_number: int |
| 25 | match_text: str # the matched text — stored already-redacted |
| 26 | severity: str # "high" or "medium" |
| 27 | |
| 28 | |
| 29 | # --------------------------------------------------------------------------- |
| 30 | # Pattern registry |
| 31 | # --------------------------------------------------------------------------- |
| @@ -68,13 +68,11 @@ | |
| 68 | "high", |
| 69 | ), |
| 70 | # Password in assignment |
| 71 | ( |
| 72 | "password_assignment", |
| 73 | re.compile( |
| 74 | r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']' |
| 75 | ), |
| 76 | "high", |
| 77 | ), |
| 78 | # PEM private key header |
| 79 | ( |
| 80 | "private_key_pem", |
| @@ -121,11 +119,10 @@ | |
| 121 | Returns a list of :class:`SensitiveMatch` objects, one per finding. |
| 122 | The ``match_text`` field contains the matched string already rendered as |
| 123 | ``[REDACTED]`` so callers never need to touch the raw secret. |
| 124 | """ |
| 125 | findings: list[SensitiveMatch] = [] |
| 126 | lines = text.splitlines() |
| 127 | |
| 128 | for pattern_name, regex, severity in _PATTERNS: |
| 129 | for m in regex.finditer(text): |
| 130 | # Determine line number (1-based) by counting newlines before match start |
| 131 | line_number = text.count("\n", 0, m.start()) + 1 |
| 132 |
| --- navegador/security.py | |
| +++ navegador/security.py | |
| @@ -21,11 +21,11 @@ | |
| 21 | """A single sensitive-content finding.""" |
| 22 | |
| 23 | pattern_name: str |
| 24 | line_number: int |
| 25 | match_text: str # the matched text — stored already-redacted |
| 26 | severity: str # "high" or "medium" |
| 27 | |
| 28 | |
| 29 | # --------------------------------------------------------------------------- |
| 30 | # Pattern registry |
| 31 | # --------------------------------------------------------------------------- |
| @@ -68,13 +68,11 @@ | |
| 68 | "high", |
| 69 | ), |
| 70 | # Password in assignment |
| 71 | ( |
| 72 | "password_assignment", |
| 73 | re.compile(r'(?i)(?:password|passwd|secret)\s*[=:]\s*["\']([^"\']{4,})["\']'), |
| 74 | "high", |
| 75 | ), |
| 76 | # PEM private key header |
| 77 | ( |
| 78 | "private_key_pem", |
| @@ -121,11 +119,10 @@ | |
| 119 | Returns a list of :class:`SensitiveMatch` objects, one per finding. |
| 120 | The ``match_text`` field contains the matched string already rendered as |
| 121 | ``[REDACTED]`` so callers never need to touch the raw secret. |
| 122 | """ |
| 123 | findings: list[SensitiveMatch] = [] |
| 124 | |
| 125 | for pattern_name, regex, severity in _PATTERNS: |
| 126 | for m in regex.finditer(text): |
| 127 | # Determine line number (1-based) by counting newlines before match start |
| 128 | line_number = text.count("\n", 0, m.start()) + 1 |
| 129 |
+54
-43
| --- navegador/vcs.py | ||
| +++ navegador/vcs.py | ||
| @@ -17,11 +17,10 @@ | ||
| 17 | 17 | |
| 18 | 18 | import subprocess |
| 19 | 19 | from abc import ABC, abstractmethod |
| 20 | 20 | from pathlib import Path |
| 21 | 21 | |
| 22 | - | |
| 23 | 22 | # ── Abstract base ────────────────────────────────────────────────────────────── |
| 24 | 23 | |
| 25 | 24 | |
| 26 | 25 | class VCSAdapter(ABC): |
| 27 | 26 | """Abstract base class for VCS backends.""" |
| @@ -123,30 +122,34 @@ | ||
| 123 | 122 | Return up to *limit* log entries for *file_path*. |
| 124 | 123 | |
| 125 | 124 | Each entry has the keys: ``hash``, ``author``, ``date``, ``message``. |
| 126 | 125 | """ |
| 127 | 126 | fmt = "%H%x1f%an%x1f%ai%x1f%s" |
| 128 | - result = self._run([ | |
| 129 | - "log", | |
| 130 | - f"--max-count={limit}", | |
| 131 | - f"--format={fmt}", | |
| 132 | - "--", | |
| 133 | - file_path, | |
| 134 | - ]) | |
| 127 | + result = self._run( | |
| 128 | + [ | |
| 129 | + "log", | |
| 130 | + f"--max-count={limit}", | |
| 131 | + f"--format={fmt}", | |
| 132 | + "--", | |
| 133 | + file_path, | |
| 134 | + ] | |
| 135 | + ) | |
| 135 | 136 | |
| 136 | 137 | entries: list[dict] = [] |
| 137 | 138 | for line in result.stdout.strip().splitlines(): |
| 138 | 139 | if not line: |
| 139 | 140 | continue |
| 140 | 141 | parts = line.split("\x1f", 3) |
| 141 | 142 | if len(parts) == 4: |
| 142 | - entries.append({ | |
| 143 | - "hash": parts[0], | |
| 144 | - "author": parts[1], | |
| 145 | - "date": parts[2], | |
| 146 | - "message": parts[3], | |
| 147 | - }) | |
| 143 | + entries.append( | |
| 144 | + { | |
| 145 | + "hash": parts[0], | |
| 146 | + "author": parts[1], | |
| 147 | + "date": parts[2], | |
| 148 | + "message": parts[3], | |
| 149 | + } | |
| 150 | + ) | |
| 148 | 151 | return entries |
| 149 | 152 | |
| 150 | 153 | def blame(self, file_path: str) -> list[dict]: |
| 151 | 154 | """ |
| 152 | 155 | Return per-line blame data for *file_path*. |
| @@ -182,21 +185,23 @@ | ||
| 182 | 185 | i += 1 |
| 183 | 186 | # Read key-value pairs until we hit the content line (starts with \t) |
| 184 | 187 | while i < len(lines) and not lines[i].startswith("\t"): |
| 185 | 188 | kv = lines[i] |
| 186 | 189 | if kv.startswith("author "): |
| 187 | - current_author = kv[len("author "):] | |
| 190 | + current_author = kv[len("author ") :] | |
| 188 | 191 | i += 1 |
| 189 | 192 | # The content line starts with a tab |
| 190 | 193 | if i < len(lines) and lines[i].startswith("\t"): |
| 191 | 194 | content = lines[i][1:] # strip leading tab |
| 192 | - entries.append({ | |
| 193 | - "line": line_number, | |
| 194 | - "hash": current_hash, | |
| 195 | - "author": current_author, | |
| 196 | - "content": content, | |
| 197 | - }) | |
| 195 | + entries.append( | |
| 196 | + { | |
| 197 | + "line": line_number, | |
| 198 | + "hash": current_hash, | |
| 199 | + "author": current_author, | |
| 200 | + "content": content, | |
| 201 | + } | |
| 202 | + ) | |
| 198 | 203 | i += 1 |
| 199 | 204 | else: |
| 200 | 205 | i += 1 |
| 201 | 206 | |
| 202 | 207 | return entries |
| @@ -223,14 +228,11 @@ | ||
| 223 | 228 | check=check, |
| 224 | 229 | ) |
| 225 | 230 | |
| 226 | 231 | def is_repo(self) -> bool: |
| 227 | 232 | """Return True when *repo_path* looks like a Fossil checkout.""" |
| 228 | - return ( | |
| 229 | - (self.repo_path / ".fslckout").exists() | |
| 230 | - or (self.repo_path / "_FOSSIL_").exists() | |
| 231 | - ) | |
| 233 | + return (self.repo_path / ".fslckout").exists() or (self.repo_path / "_FOSSIL_").exists() | |
| 232 | 234 | |
| 233 | 235 | def current_branch(self) -> str: |
| 234 | 236 | """ |
| 235 | 237 | Return the name of the current Fossil branch. |
| 236 | 238 | |
| @@ -265,16 +267,21 @@ | ||
| 265 | 267 | |
| 266 | 268 | Runs ``fossil timeline --limit <n> --type ci --path <file>`` and |
| 267 | 269 | parses the output into a list of dicts with keys: |
| 268 | 270 | ``hash``, ``author``, ``date``, ``message``. |
| 269 | 271 | """ |
| 270 | - result = self._run([ | |
| 271 | - "timeline", | |
| 272 | - "--limit", str(limit), | |
| 273 | - "--type", "ci", | |
| 274 | - "--path", file_path, | |
| 275 | - ]) | |
| 272 | + result = self._run( | |
| 273 | + [ | |
| 274 | + "timeline", | |
| 275 | + "--limit", | |
| 276 | + str(limit), | |
| 277 | + "--type", | |
| 278 | + "ci", | |
| 279 | + "--path", | |
| 280 | + file_path, | |
| 281 | + ] | |
| 282 | + ) | |
| 276 | 283 | return _parse_fossil_timeline(result.stdout) |
| 277 | 284 | |
| 278 | 285 | def blame(self, file_path: str) -> list[dict]: |
| 279 | 286 | """ |
| 280 | 287 | Return per-line blame data for *file_path*. |
| @@ -317,16 +324,18 @@ | ||
| 317 | 324 | r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$", |
| 318 | 325 | line, |
| 319 | 326 | ) |
| 320 | 327 | if m: |
| 321 | 328 | time_part, hash_part, message, author = m.groups() |
| 322 | - entries.append({ | |
| 323 | - "hash": hash_part, | |
| 324 | - "author": author or "", | |
| 325 | - "date": f"{current_date} {time_part}".strip(), | |
| 326 | - "message": message.rstrip(), | |
| 327 | - }) | |
| 329 | + entries.append( | |
| 330 | + { | |
| 331 | + "hash": hash_part, | |
| 332 | + "author": author or "", | |
| 333 | + "date": f"{current_date} {time_part}".strip(), | |
| 334 | + "message": message.rstrip(), | |
| 335 | + } | |
| 336 | + ) | |
| 328 | 337 | |
| 329 | 338 | return entries |
| 330 | 339 | |
| 331 | 340 | |
| 332 | 341 | def _parse_fossil_annotate(output: str) -> list[dict]: |
| @@ -349,16 +358,18 @@ | ||
| 349 | 358 | # Pattern: "<version> <author> <date>: <content>" |
| 350 | 359 | m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw) |
| 351 | 360 | if m: |
| 352 | 361 | version, author, content = m.groups() |
| 353 | 362 | line_number += 1 |
| 354 | - entries.append({ | |
| 355 | - "line": line_number, | |
| 356 | - "hash": version, | |
| 357 | - "author": author, | |
| 358 | - "content": content, | |
| 359 | - }) | |
| 363 | + entries.append( | |
| 364 | + { | |
| 365 | + "line": line_number, | |
| 366 | + "hash": version, | |
| 367 | + "author": author, | |
| 368 | + "content": content, | |
| 369 | + } | |
| 370 | + ) | |
| 360 | 371 | |
| 361 | 372 | return entries |
| 362 | 373 | |
| 363 | 374 | |
| 364 | 375 | # ── Factory ──────────────────────────────────────────────────────────────────── |
| 365 | 376 |
| --- navegador/vcs.py | |
| +++ navegador/vcs.py | |
| @@ -17,11 +17,10 @@ | |
| 17 | |
| 18 | import subprocess |
| 19 | from abc import ABC, abstractmethod |
| 20 | from pathlib import Path |
| 21 | |
| 22 | |
| 23 | # ── Abstract base ────────────────────────────────────────────────────────────── |
| 24 | |
| 25 | |
| 26 | class VCSAdapter(ABC): |
| 27 | """Abstract base class for VCS backends.""" |
| @@ -123,30 +122,34 @@ | |
| 123 | Return up to *limit* log entries for *file_path*. |
| 124 | |
| 125 | Each entry has the keys: ``hash``, ``author``, ``date``, ``message``. |
| 126 | """ |
| 127 | fmt = "%H%x1f%an%x1f%ai%x1f%s" |
| 128 | result = self._run([ |
| 129 | "log", |
| 130 | f"--max-count={limit}", |
| 131 | f"--format={fmt}", |
| 132 | "--", |
| 133 | file_path, |
| 134 | ]) |
| 135 | |
| 136 | entries: list[dict] = [] |
| 137 | for line in result.stdout.strip().splitlines(): |
| 138 | if not line: |
| 139 | continue |
| 140 | parts = line.split("\x1f", 3) |
| 141 | if len(parts) == 4: |
| 142 | entries.append({ |
| 143 | "hash": parts[0], |
| 144 | "author": parts[1], |
| 145 | "date": parts[2], |
| 146 | "message": parts[3], |
| 147 | }) |
| 148 | return entries |
| 149 | |
| 150 | def blame(self, file_path: str) -> list[dict]: |
| 151 | """ |
| 152 | Return per-line blame data for *file_path*. |
| @@ -182,21 +185,23 @@ | |
| 182 | i += 1 |
| 183 | # Read key-value pairs until we hit the content line (starts with \t) |
| 184 | while i < len(lines) and not lines[i].startswith("\t"): |
| 185 | kv = lines[i] |
| 186 | if kv.startswith("author "): |
| 187 | current_author = kv[len("author "):] |
| 188 | i += 1 |
| 189 | # The content line starts with a tab |
| 190 | if i < len(lines) and lines[i].startswith("\t"): |
| 191 | content = lines[i][1:] # strip leading tab |
| 192 | entries.append({ |
| 193 | "line": line_number, |
| 194 | "hash": current_hash, |
| 195 | "author": current_author, |
| 196 | "content": content, |
| 197 | }) |
| 198 | i += 1 |
| 199 | else: |
| 200 | i += 1 |
| 201 | |
| 202 | return entries |
| @@ -223,14 +228,11 @@ | |
| 223 | check=check, |
| 224 | ) |
| 225 | |
| 226 | def is_repo(self) -> bool: |
| 227 | """Return True when *repo_path* looks like a Fossil checkout.""" |
| 228 | return ( |
| 229 | (self.repo_path / ".fslckout").exists() |
| 230 | or (self.repo_path / "_FOSSIL_").exists() |
| 231 | ) |
| 232 | |
| 233 | def current_branch(self) -> str: |
| 234 | """ |
| 235 | Return the name of the current Fossil branch. |
| 236 | |
| @@ -265,16 +267,21 @@ | |
| 265 | |
| 266 | Runs ``fossil timeline --limit <n> --type ci --path <file>`` and |
| 267 | parses the output into a list of dicts with keys: |
| 268 | ``hash``, ``author``, ``date``, ``message``. |
| 269 | """ |
| 270 | result = self._run([ |
| 271 | "timeline", |
| 272 | "--limit", str(limit), |
| 273 | "--type", "ci", |
| 274 | "--path", file_path, |
| 275 | ]) |
| 276 | return _parse_fossil_timeline(result.stdout) |
| 277 | |
| 278 | def blame(self, file_path: str) -> list[dict]: |
| 279 | """ |
| 280 | Return per-line blame data for *file_path*. |
| @@ -317,16 +324,18 @@ | |
| 317 | r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$", |
| 318 | line, |
| 319 | ) |
| 320 | if m: |
| 321 | time_part, hash_part, message, author = m.groups() |
| 322 | entries.append({ |
| 323 | "hash": hash_part, |
| 324 | "author": author or "", |
| 325 | "date": f"{current_date} {time_part}".strip(), |
| 326 | "message": message.rstrip(), |
| 327 | }) |
| 328 | |
| 329 | return entries |
| 330 | |
| 331 | |
| 332 | def _parse_fossil_annotate(output: str) -> list[dict]: |
| @@ -349,16 +358,18 @@ | |
| 349 | # Pattern: "<version> <author> <date>: <content>" |
| 350 | m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw) |
| 351 | if m: |
| 352 | version, author, content = m.groups() |
| 353 | line_number += 1 |
| 354 | entries.append({ |
| 355 | "line": line_number, |
| 356 | "hash": version, |
| 357 | "author": author, |
| 358 | "content": content, |
| 359 | }) |
| 360 | |
| 361 | return entries |
| 362 | |
| 363 | |
| 364 | # ── Factory ──────────────────────────────────────────────────────────────────── |
| 365 |
| --- navegador/vcs.py | |
| +++ navegador/vcs.py | |
| @@ -17,11 +17,10 @@ | |
| 17 | |
| 18 | import subprocess |
| 19 | from abc import ABC, abstractmethod |
| 20 | from pathlib import Path |
| 21 | |
| 22 | # ── Abstract base ────────────────────────────────────────────────────────────── |
| 23 | |
| 24 | |
| 25 | class VCSAdapter(ABC): |
| 26 | """Abstract base class for VCS backends.""" |
| @@ -123,30 +122,34 @@ | |
| 122 | Return up to *limit* log entries for *file_path*. |
| 123 | |
| 124 | Each entry has the keys: ``hash``, ``author``, ``date``, ``message``. |
| 125 | """ |
| 126 | fmt = "%H%x1f%an%x1f%ai%x1f%s" |
| 127 | result = self._run( |
| 128 | [ |
| 129 | "log", |
| 130 | f"--max-count={limit}", |
| 131 | f"--format={fmt}", |
| 132 | "--", |
| 133 | file_path, |
| 134 | ] |
| 135 | ) |
| 136 | |
| 137 | entries: list[dict] = [] |
| 138 | for line in result.stdout.strip().splitlines(): |
| 139 | if not line: |
| 140 | continue |
| 141 | parts = line.split("\x1f", 3) |
| 142 | if len(parts) == 4: |
| 143 | entries.append( |
| 144 | { |
| 145 | "hash": parts[0], |
| 146 | "author": parts[1], |
| 147 | "date": parts[2], |
| 148 | "message": parts[3], |
| 149 | } |
| 150 | ) |
| 151 | return entries |
| 152 | |
| 153 | def blame(self, file_path: str) -> list[dict]: |
| 154 | """ |
| 155 | Return per-line blame data for *file_path*. |
| @@ -182,21 +185,23 @@ | |
| 185 | i += 1 |
| 186 | # Read key-value pairs until we hit the content line (starts with \t) |
| 187 | while i < len(lines) and not lines[i].startswith("\t"): |
| 188 | kv = lines[i] |
| 189 | if kv.startswith("author "): |
| 190 | current_author = kv[len("author ") :] |
| 191 | i += 1 |
| 192 | # The content line starts with a tab |
| 193 | if i < len(lines) and lines[i].startswith("\t"): |
| 194 | content = lines[i][1:] # strip leading tab |
| 195 | entries.append( |
| 196 | { |
| 197 | "line": line_number, |
| 198 | "hash": current_hash, |
| 199 | "author": current_author, |
| 200 | "content": content, |
| 201 | } |
| 202 | ) |
| 203 | i += 1 |
| 204 | else: |
| 205 | i += 1 |
| 206 | |
| 207 | return entries |
| @@ -223,14 +228,11 @@ | |
| 228 | check=check, |
| 229 | ) |
| 230 | |
| 231 | def is_repo(self) -> bool: |
| 232 | """Return True when *repo_path* looks like a Fossil checkout.""" |
| 233 | return (self.repo_path / ".fslckout").exists() or (self.repo_path / "_FOSSIL_").exists() |
| 234 | |
| 235 | def current_branch(self) -> str: |
| 236 | """ |
| 237 | Return the name of the current Fossil branch. |
| 238 | |
| @@ -265,16 +267,21 @@ | |
| 267 | |
| 268 | Runs ``fossil timeline --limit <n> --type ci --path <file>`` and |
| 269 | parses the output into a list of dicts with keys: |
| 270 | ``hash``, ``author``, ``date``, ``message``. |
| 271 | """ |
| 272 | result = self._run( |
| 273 | [ |
| 274 | "timeline", |
| 275 | "--limit", |
| 276 | str(limit), |
| 277 | "--type", |
| 278 | "ci", |
| 279 | "--path", |
| 280 | file_path, |
| 281 | ] |
| 282 | ) |
| 283 | return _parse_fossil_timeline(result.stdout) |
| 284 | |
| 285 | def blame(self, file_path: str) -> list[dict]: |
| 286 | """ |
| 287 | Return per-line blame data for *file_path*. |
| @@ -317,16 +324,18 @@ | |
| 324 | r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$", |
| 325 | line, |
| 326 | ) |
| 327 | if m: |
| 328 | time_part, hash_part, message, author = m.groups() |
| 329 | entries.append( |
| 330 | { |
| 331 | "hash": hash_part, |
| 332 | "author": author or "", |
| 333 | "date": f"{current_date} {time_part}".strip(), |
| 334 | "message": message.rstrip(), |
| 335 | } |
| 336 | ) |
| 337 | |
| 338 | return entries |
| 339 | |
| 340 | |
| 341 | def _parse_fossil_annotate(output: str) -> list[dict]: |
| @@ -349,16 +358,18 @@ | |
| 358 | # Pattern: "<version> <author> <date>: <content>" |
| 359 | m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw) |
| 360 | if m: |
| 361 | version, author, content = m.groups() |
| 362 | line_number += 1 |
| 363 | entries.append( |
| 364 | { |
| 365 | "line": line_number, |
| 366 | "hash": version, |
| 367 | "author": author, |
| 368 | "content": content, |
| 369 | } |
| 370 | ) |
| 371 | |
| 372 | return entries |
| 373 | |
| 374 | |
| 375 | # ── Factory ──────────────────────────────────────────────────────────────────── |
| 376 |
+1
-1
| --- pyproject.toml | ||
| +++ pyproject.toml | ||
| @@ -2,11 +2,11 @@ | ||
| 2 | 2 | requires = ["setuptools>=69.0", "wheel"] |
| 3 | 3 | build-backend = "setuptools.build_meta" |
| 4 | 4 | |
| 5 | 5 | [project] |
| 6 | 6 | name = "navegador" |
| 7 | -version = "0.7.1" | |
| 7 | +version = "0.7.2" | |
| 8 | 8 | description = "AST + knowledge graph context engine for AI coding agents" |
| 9 | 9 | readme = "README.md" |
| 10 | 10 | license = "MIT" |
| 11 | 11 | requires-python = ">=3.12" |
| 12 | 12 | authors = [ |
| 13 | 13 |
| --- pyproject.toml | |
| +++ pyproject.toml | |
| @@ -2,11 +2,11 @@ | |
| 2 | requires = ["setuptools>=69.0", "wheel"] |
| 3 | build-backend = "setuptools.build_meta" |
| 4 | |
| 5 | [project] |
| 6 | name = "navegador" |
| 7 | version = "0.7.1" |
| 8 | description = "AST + knowledge graph context engine for AI coding agents" |
| 9 | readme = "README.md" |
| 10 | license = "MIT" |
| 11 | requires-python = ">=3.12" |
| 12 | authors = [ |
| 13 |
| --- pyproject.toml | |
| +++ pyproject.toml | |
| @@ -2,11 +2,11 @@ | |
| 2 | requires = ["setuptools>=69.0", "wheel"] |
| 3 | build-backend = "setuptools.build_meta" |
| 4 | |
| 5 | [project] |
| 6 | name = "navegador" |
| 7 | version = "0.7.2" |
| 8 | description = "AST + knowledge graph context engine for AI coding agents" |
| 9 | readme = "README.md" |
| 10 | license = "MIT" |
| 11 | requires-python = ">=3.12" |
| 12 | authors = [ |
| 13 |