Navegador

feat: multi-repo, coordinated rename, CODEOWNERS, ADR, OpenAPI/GraphQL ingestion MultiRepoManager: register, ingest, and search across repos. SymbolRenamer: graph-assisted multi-file rename with preview. CodeownersIngester: parse CODEOWNERS, create ownership edges. ADRIngester: MADR format parsing to Decision nodes. APISchemaIngester: OpenAPI and GraphQL to graph nodes. Closes #16, closes #26, closes #39, closes #40, closes #41

lmata 2026-03-23 05:32 trunk
Commit dda2b32e58819ad2a427c53a158530044469302a81a5d3e1359f57af73878c91
--- a/navegador/adr.py
+++ b/navegador/adr.py
@@ -0,0 +1,31 @@
1
+"""
2
+ADR ingestion — structured Architecture Decision Records as Decision nodes.
3
+
4
+Parses ADR markdown files in standard MADR format (or a relaxed variant)
5
+and creates Decision nodes in the navegador knowledge graph.
6
+
7
+Usage:
8
+ from navegador.adr import ADRIngester
9
+
10
+ ingester = ADRIngester(store)
11
+ stats = ingester.ingest("/path/to/docs/decisions")
12
+"""
13
+
14
+from __future__ import annotations
15
+
16
+import logging
17
+import re
18
+from pathlib import Path
19
+from typing import Any
20
+
21
+from navegador.graph.schema import NodeLabel
22
+from navegador.graph.store import GraphStore
23
+
24
+logger = logging.getLogger(__name__)
25
+
26
+# ── Regex helpers ─────────────────────────────────────────────────────────────
27
+
28
+_H1 = re.compile(r"^#\s+(.+)$", re.MULTI
29
+ r"^#{1,3}\s+S = re.compile(r"^#{1,3}\s+Status\
30
+", re.MULTILINE | re.DOTALL)
31
+_RAT
--- a/navegador/adr.py
+++ b/navegador/adr.py
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/adr.py
+++ b/navegador/adr.py
@@ -0,0 +1,31 @@
1 """
2 ADR ingestion — structured Architecture Decision Records as Decision nodes.
3
4 Parses ADR markdown files in standard MADR format (or a relaxed variant)
5 and creates Decision nodes in the navegador knowledge graph.
6
7 Usage:
8 from navegador.adr import ADRIngester
9
10 ingester = ADRIngester(store)
11 stats = ingester.ingest("/path/to/docs/decisions")
12 """
13
14 from __future__ import annotations
15
16 import logging
17 import re
18 from pathlib import Path
19 from typing import Any
20
21 from navegador.graph.schema import NodeLabel
22 from navegador.graph.store import GraphStore
23
24 logger = logging.getLogger(__name__)
25
26 # ── Regex helpers ─────────────────────────────────────────────────────────────
27
28 _H1 = re.compile(r"^#\s+(.+)$", re.MULTI
29 r"^#{1,3}\s+S = re.compile(r"^#{1,3}\s+Status\
30 ", re.MULTILINE | re.DOTALL)
31 _RAT
--- a/navegador/api_schema.py
+++ b/navegador/api_schema.py
@@ -0,0 +1,222 @@
1
+"""
2
+OpenAPI and GraphQL schema ingestion — API contracts as graph nodes.
3
+
4
+Parses OpenAPI/Swagger YAML or JSON files and GraphQL schema files, then
5
+creates API endpoint nodes in the navegador graph.
6
+
7
+Usage:
8
+ from navegador.api_schema import APISchemaIngester
9
+
10
+ ingester = APISchemaIngester(store)
11
+ stats = ingester.ingest_openapi("/path/to/openapi.yaml")
12
+ stats = ingester.ingest_graphql("/path/to/schema.graphql")
13
+"""
14
+
15
+from __future__ import annotations
16
+
17
+import json
18
+import logging
19
+import re
20
+from pathlib import Path
21
+from typing import Any
22
+
23
+from navegador.graph.schema import EdgeType, NodeLabelath
24
+from typing import Any
25
+
26
+from navegador.graph.store import GraphStore
27
+
28
+logger = logging.getLogger(__name__)
29
+
30
+# ── New node label for API endpoints ─────────────────────────────────────────
31
+#
32
+# We store API endpoints as Function nodes with a synthetic label convention
33
+# so they appear in search results alongside regular code symbols. A dedicated
34
+# label would require schema migration; using Function keeps things simple and
35
+# compatible with the existing graph.
36
+#
37
+# Alternatively callers can use the raw create_node with a custom label string.
38
+
39
+_API_NODE_LABEL = "Function" # reuse for discoverability
40
+
41
+
42
+class APISchemaIngester:
43
+ """
44
+ Ingest API schema files (OpenAPI YAML/JSON, GraphQL SDL) as graph nodes.
45
+
46
+ Each endpoint / type becomes a Function-labelled node with a distinctive
47
+ file_path prefix so they can be queried separately.
48
+ """
49
+
50
+ def __init__(self, store: GraphStore) -> None:
51
+ self.store = store
52
+
53
+ # ── OpenAPI ───────────────────────────────────────────────────────────────
54
+
55
+ def ingest_openapi(self, path: str | Path) -> dict[str, Any]:
56
+ """
57
+ Parse an OpenAPI 2.x / 3.x YAML or JSON file.
58
+
59
+ Each path+method combination becomes a node. Returns stats dict with
60
+ keys: endpoints, schemas.
61
+
62
+ (text):
63
+ad_yaml_or_json(path)
64
+
65
+ ba or {}
66
+ )
67
+ return {"endpoints": 0, "schemas": 0}
68
+
69
+ endpoints = 0
70
+ schemas = 0
71
+ base_url = str(path)
72
+
73
+ # ── Paths / endpoints ─────────────────────────────────────────────────
74
+ for api_path, path_item in (spec.get("paths") or {}).items():
75
+ if not isinstance(path_item, dict):
76
+ continue
77
+ for method in ("get", "post", "put", "patch", "delete", "head", "options"):
78
+ operation = path_item.get(method)
79
+ if not isinstance(operation, dict):
80
+ continue
81
+
82
+ op_id = operation.get("operationId") or f"{method.upper()} {api_path}"
83
+ summary = operation.get("summary") or operation.get("description") or ""
84
+ tags = ", ".join(operation.get("tags") or [])
85
+
86
+ self.store.create_node(
87
+ _API_NODE_LABEL,
88
+ {
89
+ "name": op_id,
90
+ "file_path": base_url,
91
+ "line_start": 0,
92
+ "line_end": 0,
93
+ "docstring": summary,
94
+ "source": "",
95
+ "signature": f"{method.upper()} {api_path}",
96
+ "domain": tags,
97
+ },
98
+ )
99
+ endpoints += 1
100
+
101
+ # ── Component schemas / definitions ───────────────────────────────────
102
+ component_schemas = (
103
+ (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {}
104
+ )
105
+ for schema_name, schema_body in component_schemas.items():
106
+ if not isinstance(schema_body, dict):
107
+ continue
108
+ description = schema_body.get("description") or ""
109
+ self.store.create_node(
110
+ "Class",
111
+ {
112
+ "name": schema_name,
113
+ "file_path": base_url,
114
+ "line_start": 0,
115
+ "line_end": 0,
116
+ "docstring": description,
117
+ "source": "",
118
+ },
119
+ )
120
+ schemas += 1
121
+
122
+ stats = {"endpoints": endpoints, "schemas": schemas}
123
+ logger.info("APISchemaIngester (OpenAPI): %s", stats)
124
+ return stats
125
+
126
+ # ── GraphQL ───────────────────────────────────────────────────────────────
127
+
128
+ def ingest_graphql(self, path: str | Path) -> dict[str, Any]:
129
+ """
130
+ Parse a GraphQL SDL schema file using regex-based extraction.
131
+
132
+ Types (type, input, interface, enum, union) become Class nodes.
133
+ Query / Mutation / Subscription fields become Function nodes.
134
+ Returns stats dict with keys: types, fields.
135
+ """
136
+ path = Path(path)
137
+ try:
138
+ text = path.read_text(encoding="utf-8", errors="replace")
139
+ except OSError as exc:
140
+ logger.warning("APISchemaIngester: cannot read %s: %s", path, exc)
141
+ return {"types": 0, "fields": 0}
142
+
143
+ base_url = str(path)
144
+ types_created = 0
145
+ fields_created = 0
146
+
147
+ # ── Type definitions ──────────────────────────────────────────────────
148
+ # Matches: type Foo { ... } / input Bar { ... } / interface X { ... }
149
+ type_pattern = re.compile(
150
+ r"(?:^|\n)\s*(?:type|input|interface|enum|union)\s+(\w+)"
151
+ r"(?:[^{]*)?\{([^}]*)\}",
152
+ re.MULTILINE | re.DOTALL,
153
+ )
154
+
155
+ root_types = {"Query", "Mutation", "Subscription"}
156
+
157
+ for m in type_pattern.finditer(text):
158
+ type_name = m.group(1)
159
+ body = m.group(2)
160
+
161
+ if type_name in root_types:
162
+ # Fields on Query / Mutation / Subscription → Function nodes
163
+ field_pattern = re.compile(
164
+ r"^\s*(\w+)\s*(?:\([^)]*\))?\s*:\s*([^\n!]+)", re.MULTILINE
165
+ )
166
+ for fm in field_pattern.finditer(body):
167
+ field_name = fm.group(1).strip()
168
+ return_type = fm.group(2).strip().rstrip(",")
169
+ self.store.create_node(
170
+ _API_NODE_LABEL,
171
+ {
172
+ "name": field_name,
173
+ "file_path": base_url,
174
+ "line_start": 0,
175
+ "line_end": 0,
176
+ "docstring": "",
177
+ "source": "",
178
+ "signature": f"{type_name}.{field_name}: {return_type}",
179
+ "domain": type_name,
180
+ },
181
+ )
182
+ fields_created += 1
183
+ else:
184
+ # Regular type → Class node
185
+ self.store.create_node(
186
+ "Class",
187
+ {
188
+ "name": type_name,
189
+ "file_path": base_url,
190
+ "line_start": 0,
191
+ "line_end": 0,
192
+ "docstring": "",
193
+ l # type: ignore[import]
194
+
195
+ return yaml.safe_load(text)
196
+ except ImportError:
197
+ pass
198
+
199
+ # Minimal hand-rolled YAML → dict for simple key: value structures
200
+ return _minimal_yaml_load(text)
201
+
202
+
203
+# ── Minimal YAML loader (stdlib only) ─────────────────────────────────────────
204
+
205
+
206
+def _minimal_yaml_load(text: str) -> dict[str, Any]:
207
+ """
208
+ Extremely simplified YAML loader for flat/shallow OpenAPI specs.
209
+
210
+ Handles: key: value, key: 'string', key: "string", nested dicts via
211
+ indentation, lists via '- item'. Does NOT handle anchors, multi-line
212
+ values, or complex YAML features.
213
+ """
214
+ lines = text.splitlines()
215
+ result: dict[str, Any] = {}
216
+ stack: list[tuple[int, dict | list]] = [(0, result)]
217
+
218
+ for raw_line in lines:
219
+ if not raw_line.strip() or raw_line.strip().startswith("#"):
220
+ continue
221
+
222
+ i
--- a/navegador/api_schema.py
+++ b/navegador/api_schema.py
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/api_schema.py
+++ b/navegador/api_schema.py
@@ -0,0 +1,222 @@
1 """
2 OpenAPI and GraphQL schema ingestion — API contracts as graph nodes.
3
4 Parses OpenAPI/Swagger YAML or JSON files and GraphQL schema files, then
5 creates API endpoint nodes in the navegador graph.
6
7 Usage:
8 from navegador.api_schema import APISchemaIngester
9
10 ingester = APISchemaIngester(store)
11 stats = ingester.ingest_openapi("/path/to/openapi.yaml")
12 stats = ingester.ingest_graphql("/path/to/schema.graphql")
13 """
14
15 from __future__ import annotations
16
17 import json
18 import logging
19 import re
20 from pathlib import Path
21 from typing import Any
22
23 from navegador.graph.schema import EdgeType, NodeLabelath
24 from typing import Any
25
26 from navegador.graph.store import GraphStore
27
28 logger = logging.getLogger(__name__)
29
30 # ── New node label for API endpoints ─────────────────────────────────────────
31 #
32 # We store API endpoints as Function nodes with a synthetic label convention
33 # so they appear in search results alongside regular code symbols. A dedicated
34 # label would require schema migration; using Function keeps things simple and
35 # compatible with the existing graph.
36 #
37 # Alternatively callers can use the raw create_node with a custom label string.
38
39 _API_NODE_LABEL = "Function" # reuse for discoverability
40
41
42 class APISchemaIngester:
43 """
44 Ingest API schema files (OpenAPI YAML/JSON, GraphQL SDL) as graph nodes.
45
46 Each endpoint / type becomes a Function-labelled node with a distinctive
47 file_path prefix so they can be queried separately.
48 """
49
50 def __init__(self, store: GraphStore) -> None:
51 self.store = store
52
53 # ── OpenAPI ───────────────────────────────────────────────────────────────
54
55 def ingest_openapi(self, path: str | Path) -> dict[str, Any]:
56 """
57 Parse an OpenAPI 2.x / 3.x YAML or JSON file.
58
59 Each path+method combination becomes a node. Returns stats dict with
60 keys: endpoints, schemas.
61
62 (text):
63 ad_yaml_or_json(path)
64
65 ba or {}
66 )
67 return {"endpoints": 0, "schemas": 0}
68
69 endpoints = 0
70 schemas = 0
71 base_url = str(path)
72
73 # ── Paths / endpoints ─────────────────────────────────────────────────
74 for api_path, path_item in (spec.get("paths") or {}).items():
75 if not isinstance(path_item, dict):
76 continue
77 for method in ("get", "post", "put", "patch", "delete", "head", "options"):
78 operation = path_item.get(method)
79 if not isinstance(operation, dict):
80 continue
81
82 op_id = operation.get("operationId") or f"{method.upper()} {api_path}"
83 summary = operation.get("summary") or operation.get("description") or ""
84 tags = ", ".join(operation.get("tags") or [])
85
86 self.store.create_node(
87 _API_NODE_LABEL,
88 {
89 "name": op_id,
90 "file_path": base_url,
91 "line_start": 0,
92 "line_end": 0,
93 "docstring": summary,
94 "source": "",
95 "signature": f"{method.upper()} {api_path}",
96 "domain": tags,
97 },
98 )
99 endpoints += 1
100
101 # ── Component schemas / definitions ───────────────────────────────────
102 component_schemas = (
103 (spec.get("components") or {}).get("schemas") or spec.get("definitions") or {}
104 )
105 for schema_name, schema_body in component_schemas.items():
106 if not isinstance(schema_body, dict):
107 continue
108 description = schema_body.get("description") or ""
109 self.store.create_node(
110 "Class",
111 {
112 "name": schema_name,
113 "file_path": base_url,
114 "line_start": 0,
115 "line_end": 0,
116 "docstring": description,
117 "source": "",
118 },
119 )
120 schemas += 1
121
122 stats = {"endpoints": endpoints, "schemas": schemas}
123 logger.info("APISchemaIngester (OpenAPI): %s", stats)
124 return stats
125
126 # ── GraphQL ───────────────────────────────────────────────────────────────
127
128 def ingest_graphql(self, path: str | Path) -> dict[str, Any]:
129 """
130 Parse a GraphQL SDL schema file using regex-based extraction.
131
132 Types (type, input, interface, enum, union) become Class nodes.
133 Query / Mutation / Subscription fields become Function nodes.
134 Returns stats dict with keys: types, fields.
135 """
136 path = Path(path)
137 try:
138 text = path.read_text(encoding="utf-8", errors="replace")
139 except OSError as exc:
140 logger.warning("APISchemaIngester: cannot read %s: %s", path, exc)
141 return {"types": 0, "fields": 0}
142
143 base_url = str(path)
144 types_created = 0
145 fields_created = 0
146
147 # ── Type definitions ──────────────────────────────────────────────────
148 # Matches: type Foo { ... } / input Bar { ... } / interface X { ... }
149 type_pattern = re.compile(
150 r"(?:^|\n)\s*(?:type|input|interface|enum|union)\s+(\w+)"
151 r"(?:[^{]*)?\{([^}]*)\}",
152 re.MULTILINE | re.DOTALL,
153 )
154
155 root_types = {"Query", "Mutation", "Subscription"}
156
157 for m in type_pattern.finditer(text):
158 type_name = m.group(1)
159 body = m.group(2)
160
161 if type_name in root_types:
162 # Fields on Query / Mutation / Subscription → Function nodes
163 field_pattern = re.compile(
164 r"^\s*(\w+)\s*(?:\([^)]*\))?\s*:\s*([^\n!]+)", re.MULTILINE
165 )
166 for fm in field_pattern.finditer(body):
167 field_name = fm.group(1).strip()
168 return_type = fm.group(2).strip().rstrip(",")
169 self.store.create_node(
170 _API_NODE_LABEL,
171 {
172 "name": field_name,
173 "file_path": base_url,
174 "line_start": 0,
175 "line_end": 0,
176 "docstring": "",
177 "source": "",
178 "signature": f"{type_name}.{field_name}: {return_type}",
179 "domain": type_name,
180 },
181 )
182 fields_created += 1
183 else:
184 # Regular type → Class node
185 self.store.create_node(
186 "Class",
187 {
188 "name": type_name,
189 "file_path": base_url,
190 "line_start": 0,
191 "line_end": 0,
192 "docstring": "",
193 l # type: ignore[import]
194
195 return yaml.safe_load(text)
196 except ImportError:
197 pass
198
199 # Minimal hand-rolled YAML → dict for simple key: value structures
200 return _minimal_yaml_load(text)
201
202
203 # ── Minimal YAML loader (stdlib only) ─────────────────────────────────────────
204
205
206 def _minimal_yaml_load(text: str) -> dict[str, Any]:
207 """
208 Extremely simplified YAML loader for flat/shallow OpenAPI specs.
209
210 Handles: key: value, key: 'string', key: "string", nested dicts via
211 indentation, lists via '- item'. Does NOT handle anchors, multi-line
212 values, or complex YAML features.
213 """
214 lines = text.splitlines()
215 result: dict[str, Any] = {}
216 stack: list[tuple[int, dict | list]] = [(0, result)]
217
218 for raw_line in lines:
219 if not raw_line.strip() or raw_line.strip().startswith("#"):
220 continue
221
222 i
--- a/navegador/codeowners.py
+++ b/navegador/codeowners.py
@@ -0,0 +1,4 @@
1
+"""
2
+CODEOWNERS integration — map ownership to Person and Domain nodes.
3
+
4
+Parse
--- a/navegador/codeowners.py
+++ b/navegador/codeowners.py
@@ -0,0 +1,4 @@
 
 
 
 
--- a/navegador/codeowners.py
+++ b/navegador/codeowners.py
@@ -0,0 +1,4 @@
1 """
2 CODEOWNERS integration — map ownership to Person and Domain nodes.
3
4 Parse
--- a/navegador/multirepo.py
+++ b/navegador/multirepo.py
@@ -0,0 +1,341 @@
1
+"""
2
+Multi-repo support — index and query across multiple repositories.
3
+
4
+Issue: #62 adds WorkspaceMode (UNIFIED / FEDERATED) and WorkspaceManager.
5
+
6
+Usage::
7
+
8
+ from navegador.multirepo import MultiRepoManager, WorkspaceMode, WorkspaceManager
9
+
10
+ # Legacy: single shared graph
11
+ mgr = MultiRepoManager(store)
12
+ mgr.add_repo("backend", "/path/to/backend")
13
+ mgr.add_repo("frontend", "/path/to/frontend")
14
+ stats = mgr.ingest_all()
15
+ results = mgr.cross_repo_search("authenticate")
16
+
17
+ # v0.4: workspace with explicit mode
18
+ ws = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
19
+ ws.add_repo("backend", "/path/to/backend")
20
+ ws.add_repo("frontend", "/path/to/frontend")
21
+ stats = ws.ingest_all()
22
+ results = ws.search("authenticate")
23
+
24
+ # Federated: each repo gets its own graph; cross-repo queries merge results
25
+ ws_fed = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
26
+ ws_fed.add_repo("backend", "/path/to/backend")
27
+ results = ws_fed.search("authenticate")
28
+"""
29
+
30
+from __future__ import annotations
31
+
32
+import json
33
+import logging
34
+from enum import Enum
35
+from pathlib import Path
36
+from typing import Any
37
+
38
+from navEdgeType, NodeLabel
39
+from navegador.graph.store import GraphStore
40
+
41
+logger = logging.getLogger(__name__)
42
+
43
+# Key used to store repo registry as a special node in the graph
44
+_REGISTRY_LABEL = "RepoRegistry"
45
+
46
+
47
+# ── WorkspaceMode ─────────────────────────────────────────────────────────────
48
+
49
+
50
+class WorkspaceMode(str, Enum):
51
+ """
52
+ Controls how a multi-repo workspace stores its graph data.
53
+
54
+ UNIFIED
55
+ All repositories share one graph. Cross-repo traversal is trivial
56
+ but repo isolation is not enforced.
57
+
58
+ FEDERATED
59
+ Each repository gets its own named graph. Cross-repo queries are
60
+ executed against each graph in turn and the results are merged.
61
+ Provides namespace isolation — nodes in repo A cannot accidentally
62
+ collide with nodes in repo B.
63
+ """
64
+
65
+ UNIFIED = "unified"
66
+ FEDERATED = "federated"
67
+
68
+
69
+# ── WorkspaceManager ────────────────�
70
+ame": name,
71
+�─────────────────────────�
72
+ �────────────────────────
73
+
74
+
75
+class WorkspaceManager:
76
+ """
77
+ Multi-repo workspace with explicit UNIFIED or FEDERATED mode.
78
+
79
+ In UNIFIED mode this is a thin wrapper around :class:`MultiRepoManager`
80
+ backed by a single shared :class:`~navegador.graph.store.GraphStore`.
81
+
82
+ In FEDERATED mode each repo is tracked with its own graph name. Queries
83
+ fan out across all per-repo graphs and merge the result lists.
84
+ """
85
+
86
+ def __init__(self, store: GraphStore, mode: WorkspaceMode = WorkspaceMode.UNIFIED) -> None:
87
+ self.store = store
88
+ self.mode = mode
89
+ # repo name → {"path": str, "graph_name": str}
90
+ self._repos: dict[str, dict[str, str]] = {}
91
+
92
+ # ── Registration ──────────────────────────────────────────────────────────
93
+
94
+ def add_repo(self, name: str, path: str | Path) -> None:
95
+ """Register a repository by name and filesystem path."""
96
+ resolved = str(Path(path).resolve())
97
+ graph_name = f"navegador_{name}" if self.mode == WorkspaceMode.FEDERATED else "navegador"
98
+ self._repos[name] = {"path": resolved, "graph_name": graph_name}
99
+
100
+ # Persist registration as a Repository node in the shared store
101
+ self.store.create_node(
102
+ NodeLabel.Repository,
103
+ {
104
+ "name": name,
105
+ "path": resolved,
106
+ "description": f"workspace:{self.mode.value}",
107
+ "language": "",
108
+ "file_path": resolved,
109
+ },
110
+ )
111
+ logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved)
112
+
113
+ def list_repos(self) -> list[dict[str, str]]:
114
+ """R, Any] = {}
115
+
116
+ "WorkspaceManager: r.error("WorkspaceManager: failed t, Any] = {}
117
+
118
+ ), Any] = {}
119
+
120
+ summary[name] = {"error": str(exc)}
121
+
122
+ return summary
123
+
124
+ # ── Search ────────────────────────────────────────────────────────────────
125
+
126
+ def search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
127
+ """
128
+ Search across all repositories.
129
+
130
+ In UNIFIED mode queries the single shared graph.
131
+ In FEDERATED mode fans out across each per-repo graph and merges.
132
+
133
+ Returns
134
+ -------
135
+ list of dicts with keys: label, name, file_path, repo
136
+ """
137
+ if self.mode == WorkspaceMode.UNIFIED:
138
+ return self._search_store(self.store, query, limit)
139
+
140
+ # Federated: merge results from each repo's graph
141
+ all_results: list[dict[str, Any]] = []
142
+ seen: set[tuple[str, str]] = set()
143
+
144
+ for name, info in self._repos.items():
145
+ try:
146
+ target_store = self._federated_store(info["graph_name"])
147
+ results = self._search_store(target_store, query, limit)
148
+ for r in results:
149
+ key = (r.get("label", ""), r.get("name", ""))
150
+ if key not in seen:
151
+ seen.add(key)
152
+ r["repo"] = name
153
+ all_results.append(r)
154
+ except Exception:
155
+ logger.debug(, Any] = {}
156
+
157
+ "WorkspaceManager: ngester(target_store, Any] = {}
158
+
159
+ )
160
+ stats = ingester.ingest(path, clear=False)
161
+ summary[name] = stats
162
+ except Exception as exc: # noqa: BLE001
163
+ logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc)
164
+ summary[name] = {"error": str(exc)}
165
+
166
+ return summary
167
+
168
+ # ── Search ────────────────────────────────────────────────────────────────
169
+
170
+ def search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
171
+ """
172
+ Search across all repositories.
173
+
174
+ In UNIFIED mode queries the single shared graph.
175
+ In FEDERATED mode fans out across each per-repo graph and merges.
176
+
177
+ Returns
178
+ -------
179
+ list of dicts with keys: label, name, file_path, repo
180
+ """
181
+ if self.mode == WorkspaceMode.UNIFIED:
182
+ return self._search_store(self.store, query, limit)
183
+
184
+ # Federated: merge results from��──────�r:Repository) RETURN r.na
185
+ )
186
+ her, {"q": query})
187
+ name": row[0], "path": row[1]} for row in rows]
188
+
189
+ # ── Ingestion ─────────────────────────────────────────────────────────────
190
+
191
+ def ingest_all(self, clear: bool = False) -> dict[str, Any]:
192
+ """
193
+ Ingest every registered repository.
194
+
195
+ Returns a summary dict keyed by repo name, each value being the
196
+ ingestion stats returned by RepoIngester.
197
+ """
198
+ from navegador.ingestion.parser import RepoIngester
199
+
200
+ repos = self.list_repos()
201
+ if not repos:
202
+ logger.warning("MultiRepo: no repositories registered")
203
+ return {}
204
+
205
+ if clear:
206
+ self.store.clear()
207
+
208
+ summary: dict[str, Any] = {}
209
+ for repo in repos:
210
+ name = repo["name"]
211
+ path = repo["path"]
212
+ logger.info("MultiRepo: ingesting %s from %s", name, path)
213
+ try:
214
+ ingester = RepoIngester(self.store)
215
+ stats = ingester.ingest(path, clear=False)
216
+ summary[name] = stats
217
+ except Exception as exc: # noqa: BLE001
218
+ logger.error("MultiRepo: failed to ingest %s: %s", name, exc)
219
+ summary[name] = {"error": str(exc)}
220
+
221
+ return summary
222
+
223
+ # ── Search ────────────────────────────────────────────────────────────────
224
+
225
+ def cross_repo_search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
226
+ """
227
+ Full-text name search across all node types in all registered repos.
228
+
229
+ Returns a list of dicts with keys: label, name, file_path.
230
+ """
231
+ cypher = (
232
+ "MATCH (n) "
233
+ "WHERE toLower(n.name) CONTAINS toLower($q) "
234
+ "RETURN labels(n)[0] AS label, n.name AS name, "
235
+ " coalesce(n.file_path, n.path, '') AS file_path "
236
+ f"LIMIT {int(limit)}"
237
+ )
238
+ result = self.store.query(cypher, {"q": query})
239
+ rows = result.result_set or []
240
+ return [
241
+ {"label": row[0], "name": r
242
+ g importaceManager.
243
+
244
+Usage::
245
+
246
+ f"""
247
+Multi-repo support — index and query across multiple repositories.
248
+
249
+Issue: #62 adds WorkspaceMode (UNIFIED / FEDERATED) and WorkspaceManager.
250
+
251
+Usage::
252
+
253
+ from navegador.multirepo import MultiRepoManager, WorkspaceMode, WorkspaceManager
254
+
255
+ # Legacy: single shared graph
256
+ mgr = MultiRepoManager(store)
257
+ mgr.add_repo("backend", "/path/to/backend")
258
+ mgr.add_repo("frontend", "/path/to/frontend")
259
+ stats = mgr.ingest_all()
260
+ results = mgr.cross_repo_search("authenticate")
261
+
262
+ # v0.4: workspace with explicit mode
263
+ ws = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
264
+ ws.add_repo("backend", "/path/to/backend")
265
+ ws.add_repo("frontend", "/path/to/frontend")
266
+ stats = ws.ingest_all()
267
+ results = ws.search("authenticate")
268
+
269
+ # Federated: each repo gets its own graph; cross-repo queries merge results
270
+ ws_fed = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
271
+ ws_fed.add_repo("backend", "/path/to/backend")
272
+ results = ws_fed.search("authenticate")
273
+"""
274
+
275
+from __future__ import annotations
276
+
277
+import logging
278
+from enum import Enum
279
+from pathlib import Path
280
+from typing import Any
281
+
282
+from navegador.graph.schema import NodeLabel
283
+from navegador.graph.store import GraphStore
284
+
285
+logger = logging.getLogger(__name__)
286
+
287
+# Key used to store repo registry as a special node in the graph
288
+_REGISTRY_LABEL = "RepoRegistry"
289
+
290
+
291
+# ── WorkspaceMode ─────────────────────────────────────────────────────────────
292
+
293
+
294
+class WorkspaceMode(str, Enum):
295
+ """
296
+ Controls how a multi-repo workspace stores its graph data.
297
+
298
+ UNIFIED
299
+ All repositories share one graph. Cross-repo traversal is trivial
300
+ but repo isolation is not enforced.
301
+
302
+ FEDERATED
303
+ Each repository gets its own named graph. Cross-repo queries are
304
+ executed against each graph in turn and the results are merged.
305
+ Provides namespace isolation — nodes in repo A cannot accidentally
306
+ collide with nodes in repo B.
307
+ """
308
+
309
+ UNIFIED = "unified"
310
+ FEDERATED = "federated"
311
+
312
+
313
+# ── WorkspaceManager ──────────────────────────────────────────────────────────
314
+
315
+
316
+class WorkspaceManager:
317
+ """
318
+ Multi-repo workspace with explicit UNIFIED or FEDERATED mode.
319
+
320
+ In UNIFIED mode this is a thin wrapper around :class:`MultiRepoManager`
321
+ backed by a single shared :class:`~navegador.graph.store.GraphStore`.
322
+
323
+ In FEDERATED mode each repo is tracked with its own graph name. Queries
324
+ fan out across all per-repo graphs and merge the result lists.
325
+ """
326
+
327
+ def __init__(self, store: GraphStore, mode: WorkspaceMode = WorkspaceMode.UNIFIED) -> None:
328
+ self.store = store
329
+ self.mode = mode
330
+ # repo name → {"path": str, "graph_name": str}
331
+ self._repos: dict[str, dict[str, str]] = {}
332
+
333
+ # ── Registration ──────────────────────────────────────────────────────────
334
+
335
+ def add_repo(self, name: str, path: str | Path) -> None:
336
+ """Register a repository by name and filesystem path."""
337
+ resolved = str(Path(path).resolve())
338
+ graph_name = f"navegador_{name}" if self.mode == WorkspaceMode.FEDERATED else "navegador"
339
+ self._repos[name] = {"path": resolved, "graph_name": graph_name}
340
+
341
+ # Persist registration as a Repos
--- a/navegador/multirepo.py
+++ b/navegador/multirepo.py
@@ -0,0 +1,341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/multirepo.py
+++ b/navegador/multirepo.py
@@ -0,0 +1,341 @@
1 """
2 Multi-repo support — index and query across multiple repositories.
3
4 Issue: #62 adds WorkspaceMode (UNIFIED / FEDERATED) and WorkspaceManager.
5
6 Usage::
7
8 from navegador.multirepo import MultiRepoManager, WorkspaceMode, WorkspaceManager
9
10 # Legacy: single shared graph
11 mgr = MultiRepoManager(store)
12 mgr.add_repo("backend", "/path/to/backend")
13 mgr.add_repo("frontend", "/path/to/frontend")
14 stats = mgr.ingest_all()
15 results = mgr.cross_repo_search("authenticate")
16
17 # v0.4: workspace with explicit mode
18 ws = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
19 ws.add_repo("backend", "/path/to/backend")
20 ws.add_repo("frontend", "/path/to/frontend")
21 stats = ws.ingest_all()
22 results = ws.search("authenticate")
23
24 # Federated: each repo gets its own graph; cross-repo queries merge results
25 ws_fed = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
26 ws_fed.add_repo("backend", "/path/to/backend")
27 results = ws_fed.search("authenticate")
28 """
29
30 from __future__ import annotations
31
32 import json
33 import logging
34 from enum import Enum
35 from pathlib import Path
36 from typing import Any
37
38 from navEdgeType, NodeLabel
39 from navegador.graph.store import GraphStore
40
41 logger = logging.getLogger(__name__)
42
43 # Key used to store repo registry as a special node in the graph
44 _REGISTRY_LABEL = "RepoRegistry"
45
46
47 # ── WorkspaceMode ─────────────────────────────────────────────────────────────
48
49
50 class WorkspaceMode(str, Enum):
51 """
52 Controls how a multi-repo workspace stores its graph data.
53
54 UNIFIED
55 All repositories share one graph. Cross-repo traversal is trivial
56 but repo isolation is not enforced.
57
58 FEDERATED
59 Each repository gets its own named graph. Cross-repo queries are
60 executed against each graph in turn and the results are merged.
61 Provides namespace isolation — nodes in repo A cannot accidentally
62 collide with nodes in repo B.
63 """
64
65 UNIFIED = "unified"
66 FEDERATED = "federated"
67
68
69 # ── WorkspaceManager ────────────────�
70 ame": name,
71 �─────────────────────────�
72 �────────────────────────
73
74
75 class WorkspaceManager:
76 """
77 Multi-repo workspace with explicit UNIFIED or FEDERATED mode.
78
79 In UNIFIED mode this is a thin wrapper around :class:`MultiRepoManager`
80 backed by a single shared :class:`~navegador.graph.store.GraphStore`.
81
82 In FEDERATED mode each repo is tracked with its own graph name. Queries
83 fan out across all per-repo graphs and merge the result lists.
84 """
85
86 def __init__(self, store: GraphStore, mode: WorkspaceMode = WorkspaceMode.UNIFIED) -> None:
87 self.store = store
88 self.mode = mode
89 # repo name → {"path": str, "graph_name": str}
90 self._repos: dict[str, dict[str, str]] = {}
91
92 # ── Registration ──────────────────────────────────────────────────────────
93
94 def add_repo(self, name: str, path: str | Path) -> None:
95 """Register a repository by name and filesystem path."""
96 resolved = str(Path(path).resolve())
97 graph_name = f"navegador_{name}" if self.mode == WorkspaceMode.FEDERATED else "navegador"
98 self._repos[name] = {"path": resolved, "graph_name": graph_name}
99
100 # Persist registration as a Repository node in the shared store
101 self.store.create_node(
102 NodeLabel.Repository,
103 {
104 "name": name,
105 "path": resolved,
106 "description": f"workspace:{self.mode.value}",
107 "language": "",
108 "file_path": resolved,
109 },
110 )
111 logger.info("WorkspaceManager (%s): registered %s → %s", self.mode.value, name, resolved)
112
113 def list_repos(self) -> list[dict[str, str]]:
114 """R, Any] = {}
115
116 "WorkspaceManager: r.error("WorkspaceManager: failed t, Any] = {}
117
118 ), Any] = {}
119
120 summary[name] = {"error": str(exc)}
121
122 return summary
123
124 # ── Search ────────────────────────────────────────────────────────────────
125
126 def search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
127 """
128 Search across all repositories.
129
130 In UNIFIED mode queries the single shared graph.
131 In FEDERATED mode fans out across each per-repo graph and merges.
132
133 Returns
134 -------
135 list of dicts with keys: label, name, file_path, repo
136 """
137 if self.mode == WorkspaceMode.UNIFIED:
138 return self._search_store(self.store, query, limit)
139
140 # Federated: merge results from each repo's graph
141 all_results: list[dict[str, Any]] = []
142 seen: set[tuple[str, str]] = set()
143
144 for name, info in self._repos.items():
145 try:
146 target_store = self._federated_store(info["graph_name"])
147 results = self._search_store(target_store, query, limit)
148 for r in results:
149 key = (r.get("label", ""), r.get("name", ""))
150 if key not in seen:
151 seen.add(key)
152 r["repo"] = name
153 all_results.append(r)
154 except Exception:
155 logger.debug(, Any] = {}
156
157 "WorkspaceManager: ngester(target_store, Any] = {}
158
159 )
160 stats = ingester.ingest(path, clear=False)
161 summary[name] = stats
162 except Exception as exc: # noqa: BLE001
163 logger.error("WorkspaceManager: failed to ingest %s: %s", name, exc)
164 summary[name] = {"error": str(exc)}
165
166 return summary
167
168 # ── Search ────────────────────────────────────────────────────────────────
169
170 def search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
171 """
172 Search across all repositories.
173
174 In UNIFIED mode queries the single shared graph.
175 In FEDERATED mode fans out across each per-repo graph and merges.
176
177 Returns
178 -------
179 list of dicts with keys: label, name, file_path, repo
180 """
181 if self.mode == WorkspaceMode.UNIFIED:
182 return self._search_store(self.store, query, limit)
183
184 # Federated: merge results from��──────�r:Repository) RETURN r.na
185 )
186 her, {"q": query})
187 name": row[0], "path": row[1]} for row in rows]
188
189 # ── Ingestion ─────────────────────────────────────────────────────────────
190
191 def ingest_all(self, clear: bool = False) -> dict[str, Any]:
192 """
193 Ingest every registered repository.
194
195 Returns a summary dict keyed by repo name, each value being the
196 ingestion stats returned by RepoIngester.
197 """
198 from navegador.ingestion.parser import RepoIngester
199
200 repos = self.list_repos()
201 if not repos:
202 logger.warning("MultiRepo: no repositories registered")
203 return {}
204
205 if clear:
206 self.store.clear()
207
208 summary: dict[str, Any] = {}
209 for repo in repos:
210 name = repo["name"]
211 path = repo["path"]
212 logger.info("MultiRepo: ingesting %s from %s", name, path)
213 try:
214 ingester = RepoIngester(self.store)
215 stats = ingester.ingest(path, clear=False)
216 summary[name] = stats
217 except Exception as exc: # noqa: BLE001
218 logger.error("MultiRepo: failed to ingest %s: %s", name, exc)
219 summary[name] = {"error": str(exc)}
220
221 return summary
222
223 # ── Search ────────────────────────────────────────────────────────────────
224
225 def cross_repo_search(self, query: str, limit: int = 20) -> list[dict[str, Any]]:
226 """
227 Full-text name search across all node types in all registered repos.
228
229 Returns a list of dicts with keys: label, name, file_path.
230 """
231 cypher = (
232 "MATCH (n) "
233 "WHERE toLower(n.name) CONTAINS toLower($q) "
234 "RETURN labels(n)[0] AS label, n.name AS name, "
235 " coalesce(n.file_path, n.path, '') AS file_path "
236 f"LIMIT {int(limit)}"
237 )
238 result = self.store.query(cypher, {"q": query})
239 rows = result.result_set or []
240 return [
241 {"label": row[0], "name": r
242 g importaceManager.
243
244 Usage::
245
246 f"""
247 Multi-repo support — index and query across multiple repositories.
248
249 Issue: #62 adds WorkspaceMode (UNIFIED / FEDERATED) and WorkspaceManager.
250
251 Usage::
252
253 from navegador.multirepo import MultiRepoManager, WorkspaceMode, WorkspaceManager
254
255 # Legacy: single shared graph
256 mgr = MultiRepoManager(store)
257 mgr.add_repo("backend", "/path/to/backend")
258 mgr.add_repo("frontend", "/path/to/frontend")
259 stats = mgr.ingest_all()
260 results = mgr.cross_repo_search("authenticate")
261
262 # v0.4: workspace with explicit mode
263 ws = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
264 ws.add_repo("backend", "/path/to/backend")
265 ws.add_repo("frontend", "/path/to/frontend")
266 stats = ws.ingest_all()
267 results = ws.search("authenticate")
268
269 # Federated: each repo gets its own graph; cross-repo queries merge results
270 ws_fed = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
271 ws_fed.add_repo("backend", "/path/to/backend")
272 results = ws_fed.search("authenticate")
273 """
274
275 from __future__ import annotations
276
277 import logging
278 from enum import Enum
279 from pathlib import Path
280 from typing import Any
281
282 from navegador.graph.schema import NodeLabel
283 from navegador.graph.store import GraphStore
284
285 logger = logging.getLogger(__name__)
286
287 # Key used to store repo registry as a special node in the graph
288 _REGISTRY_LABEL = "RepoRegistry"
289
290
291 # ── WorkspaceMode ─────────────────────────────────────────────────────────────
292
293
294 class WorkspaceMode(str, Enum):
295 """
296 Controls how a multi-repo workspace stores its graph data.
297
298 UNIFIED
299 All repositories share one graph. Cross-repo traversal is trivial
300 but repo isolation is not enforced.
301
302 FEDERATED
303 Each repository gets its own named graph. Cross-repo queries are
304 executed against each graph in turn and the results are merged.
305 Provides namespace isolation — nodes in repo A cannot accidentally
306 collide with nodes in repo B.
307 """
308
309 UNIFIED = "unified"
310 FEDERATED = "federated"
311
312
313 # ── WorkspaceManager ──────────────────────────────────────────────────────────
314
315
316 class WorkspaceManager:
317 """
318 Multi-repo workspace with explicit UNIFIED or FEDERATED mode.
319
320 In UNIFIED mode this is a thin wrapper around :class:`MultiRepoManager`
321 backed by a single shared :class:`~navegador.graph.store.GraphStore`.
322
323 In FEDERATED mode each repo is tracked with its own graph name. Queries
324 fan out across all per-repo graphs and merge the result lists.
325 """
326
327 def __init__(self, store: GraphStore, mode: WorkspaceMode = WorkspaceMode.UNIFIED) -> None:
328 self.store = store
329 self.mode = mode
330 # repo name → {"path": str, "graph_name": str}
331 self._repos: dict[str, dict[str, str]] = {}
332
333 # ── Registration ──────────────────────────────────────────────────────────
334
335 def add_repo(self, name: str, path: str | Path) -> None:
336 """Register a repository by name and filesystem path."""
337 resolved = str(Path(path).resolve())
338 graph_name = f"navegador_{name}" if self.mode == WorkspaceMode.FEDERATED else "navegador"
339 self._repos[name] = {"path": resolved, "graph_name": graph_name}
340
341 # Persist registration as a Repos
--- a/navegador/refactor.py
+++ b/navegador/refactor.py
@@ -0,0 +1,122 @@
1
+"""
2
+Coordinated rename — graph-assisted multi-file symbol refactoring.
3
+
4
+Usage:
5
+ from navegador.refactor import SymbolRenamer
6
+
7
+ renamer = SymbolRenamer(store)
8
+ preview = renamer.preview_rename("old_name", "new_name")
9
+ print(preview.affected_files)
10
+ result = renamer.apply_rename("old_name", "new_name")
11
+"""
12
+
13
+from __future__ import annotations
14
+
15
+import logging
16
+from dataclasses import dataclass, field
17
+from typing import Any
18
+
19
+from navegador.graph.store import GraphStore
20
+
21
+logger = logging.getLogger(__name__)
22
+
23
+
24
+# ── Data models ───────────────────────────────────────────────────────────────
25
+
26
+
27
+@dataclass
28
+class RenamePreview:
29
+ """Shows what would change if the rename were applied."""
30
+
31
+ old_name: str
32
+ new_name: str
33
+ affected_files: list[str] = field(default_factory=list)
34
+ affected_nodes: list[dict[str, Any]] = field(default_factory=list)
35
+ edges_updated: int = 0
36
+
37
+
38
+@dataclass
39
+class RenameResult:
40
+ """Records what actually changed after applying the rename."""
41
+
42
+ old_name: str
43
+ new_name: str
44
+ affected_files: list[str] = field(default_factory=list)
45
+ affected_nodes: list[dict[str, Any]] = field(default_factory=list)
46
+ edges_updated: int = 0
47
+
48
+
49
+# ── Core class ────────────────────────────────────────────────────────────────
50
+
51
+
52
+class SymbolRenamer:
53
+ """
54
+ Graph-assisted multi-file symbol refactoring.
55
+
56
+ Operates entirely on the graph: it finds nodes whose ``name`` matches the
57
+ symbol and updates them in place. It does *not* edit source files on disk
58
+ (that is left to the editor / agent layer).
59
+ """
60
+
61
+ def __init__(self, store: GraphStore) -> None:
62
+ self.store = store
63
+
64
+ # ── Public API ────────────────────────────────────────────────────────────
65
+
66
+ def find_references(
67
+
68
+ ) -> list[dict[str, Any]]:re) -> None:
69
+ Returnctoring.
70
+
71
+Usage:
72
+"""
73
+Coordinated rename — graph-assisted multi-file symbol refactoring.
74
+
75
+Usage:
76
+ from navegador.refactor import SymbolRenamer
77
+
78
+ renamer = SymbolRenamer(store)
79
+ preview = renamer.preview_rename("old_name", "new_name")
80
+ print(preview.affected_files)
81
+ result = renamer.apply_rename("old_name", "new_name")
82
+"""
83
+
84
+from __future__ import annotations
85
+
86
+import logging
87
+from dataclasses import dataclass, field
88
+from typing import Any
89
+
90
+from navegador.graph.store import GraphStore
91
+
92
+logger = logging.getLogger(__name__)
93
+
94
+
95
+# ── Data models ───────────────────────────────────────────────────────────────
96
+
97
+
98
+@dataclass
99
+class RenamePreview:
100
+ """Shows what would change if the rename were applied."""
101
+
102
+ old_name: str
103
+ new_name: str
104
+ affected_files: list[str] = field(default_factory=list)
105
+ affected_nodes: list[dict[str, Any]] = field(default_factory=list)
106
+ edges_updated: int = 0
107
+
108
+
109
+@dataclass
110
+class RenameResult:
111
+ """Records what actually changed after applying the rename."""
112
+
113
+ old_name: str
114
+ new_name: str
115
+ affected_files: list[str] = field(default_factory=list)
116
+ affected_nodes: list[dict[str, Any]] = field(default_factory=list)
117
+ edges_updated: int = 0
118
+
119
+
120
+# ── Core class ────────────────────────(
121
+───────�-[r]-() WHERE n.name = $name RET)
122
+──────────� ""
--- a/navegador/refactor.py
+++ b/navegador/refactor.py
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/refactor.py
+++ b/navegador/refactor.py
@@ -0,0 +1,122 @@
1 """
2 Coordinated rename — graph-assisted multi-file symbol refactoring.
3
4 Usage:
5 from navegador.refactor import SymbolRenamer
6
7 renamer = SymbolRenamer(store)
8 preview = renamer.preview_rename("old_name", "new_name")
9 print(preview.affected_files)
10 result = renamer.apply_rename("old_name", "new_name")
11 """
12
13 from __future__ import annotations
14
15 import logging
16 from dataclasses import dataclass, field
17 from typing import Any
18
19 from navegador.graph.store import GraphStore
20
21 logger = logging.getLogger(__name__)
22
23
24 # ── Data models ───────────────────────────────────────────────────────────────
25
26
27 @dataclass
28 class RenamePreview:
29 """Shows what would change if the rename were applied."""
30
31 old_name: str
32 new_name: str
33 affected_files: list[str] = field(default_factory=list)
34 affected_nodes: list[dict[str, Any]] = field(default_factory=list)
35 edges_updated: int = 0
36
37
38 @dataclass
39 class RenameResult:
40 """Records what actually changed after applying the rename."""
41
42 old_name: str
43 new_name: str
44 affected_files: list[str] = field(default_factory=list)
45 affected_nodes: list[dict[str, Any]] = field(default_factory=list)
46 edges_updated: int = 0
47
48
49 # ── Core class ────────────────────────────────────────────────────────────────
50
51
52 class SymbolRenamer:
53 """
54 Graph-assisted multi-file symbol refactoring.
55
56 Operates entirely on the graph: it finds nodes whose ``name`` matches the
57 symbol and updates them in place. It does *not* edit source files on disk
58 (that is left to the editor / agent layer).
59 """
60
61 def __init__(self, store: GraphStore) -> None:
62 self.store = store
63
64 # ── Public API ────────────────────────────────────────────────────────────
65
66 def find_references(
67
68 ) -> list[dict[str, Any]]:re) -> None:
69 Returnctoring.
70
71 Usage:
72 """
73 Coordinated rename — graph-assisted multi-file symbol refactoring.
74
75 Usage:
76 from navegador.refactor import SymbolRenamer
77
78 renamer = SymbolRenamer(store)
79 preview = renamer.preview_rename("old_name", "new_name")
80 print(preview.affected_files)
81 result = renamer.apply_rename("old_name", "new_name")
82 """
83
84 from __future__ import annotations
85
86 import logging
87 from dataclasses import dataclass, field
88 from typing import Any
89
90 from navegador.graph.store import GraphStore
91
92 logger = logging.getLogger(__name__)
93
94
95 # ── Data models ───────────────────────────────────────────────────────────────
96
97
98 @dataclass
99 class RenamePreview:
100 """Shows what would change if the rename were applied."""
101
102 old_name: str
103 new_name: str
104 affected_files: list[str] = field(default_factory=list)
105 affected_nodes: list[dict[str, Any]] = field(default_factory=list)
106 edges_updated: int = 0
107
108
109 @dataclass
110 class RenameResult:
111 """Records what actually changed after applying the rename."""
112
113 old_name: str
114 new_name: str
115 affected_files: list[str] = field(default_factory=list)
116 affected_nodes: list[dict[str, Any]] = field(default_factory=list)
117 edges_updated: int = 0
118
119
120 # ── Core class ────────────────────────(
121 ───────�-[r]-() WHERE n.name = $name RET)
122 ──────────� ""
--- a/tests/test_v04_features.py
+++ b/tests/test_v04_features.py
@@ -0,0 +1,754 @@
1
+"""
2
+Tests for navegador v0.4 features:
3
+ #16 — Multi-repo support (MultiRepoManager)
4
+ #26 — Coordinated rename (SymbolRenamer)
5
+ #39 — CODEOWNERS integration (CodeownersIngester)
6
+ #40 — ADR ingestion (ADRIngester)
7
+ #41 — OpenAPI / GraphQL schema (APISchemaIngester)
8
+"""
9
+
10
+from __future__ import annotations
11
+
12
+import json
13
+import tempfile
14
+from pathlib import Path
15
+from unittest.mock import MagicMock, call, patch
16
+
17
+import pytest
18
+from click.testing import CliRunner
19
+
20
+from navegador.cli.commands import main
21
+
22
+
23
+# ── Shared helpers ────────────────────────────────────────────────────────────
24
+
25
+
26
+def _mock_store():
27
+ store = MagicMock()
28
+ store.query.return_value = MagicMock(result_set=[])
29
+ return store
30
+
31
+
32
+def _write(path: Path, content: str) -> None:
33
+ path.parent.mkdir(parents=True, exist_ok=True)
34
+ path.write_text(content, encoding="utf-8")
35
+
36
+
37
+# ═════════════════════════════════════════════════════════════════════════════
38
+# #16 — MultiRepoManager
39
+# ═════════════════════════════════════════════════════════════════════════════
40
+
41
+
42
+class TestMultiRepoManagerAddRepo:
43
+ def test_creates_repository_node(self, tmp_path):
44
+ from navegador.multirepo import MultiRepoManager
45
+
46
+ store = _mock_store()
47
+ mgr = MultiRepoManager(store)
48
+ mgr.add_repo("backend", str(tmp_path))
49
+ store.create_node.assert_called_once()
50
+ args = store.create_node.call_args[0]
51
+ assert args[0] == "Repository"
52
+ assert args[1]["name"] == "backend"
53
+
54
+ def test_resolves_path(self, tmp_path):
55
+ from navegador.multirepo import MultiRepoManager
56
+
57
+ store = _mock_store()
58
+ mgr = MultiRepoManager(store)
59
+ mgr.add_repo("x", str(tmp_path))
60
+ props = store.create_node.call_args[0][1]
61
+ assert Path(props["path"]).is_absolute()
62
+
63
+
64
+class TestMultiRepoManagerListRepos:
65
+ def test_returns_empty_list_when_no_repos(self):
66
+ from navegador.multirepo import MultiRepoManager
67
+
68
+ store = _mock_store()
69
+ store.query.return_value = MagicMock(result_set=[])
70
+ mgr = MultiRepoManager(store)
71
+ assert mgr.list_repos() == []
72
+
73
+ def test_parses_result_set(self):
74
+ from navegador.multirepo import MultiRepoManager
75
+
76
+ store = _mock_store()
77
+ store.query.return_value = MagicMock(
78
+ result_set=[["backend", "/repos/backend"], ["frontend", "/repos/frontend"]]
79
+ )
80
+ mgr = MultiRepoManager(store)
81
+ repos = mgr.list_repos()
82
+ assert len(repos) == 2
83
+ assert repos[0] == {"name": "backend", "path": "/repos/backend"}
84
+ assert repos[1] == {"name": "frontend", "path": "/repos/frontend"}
85
+
86
+
87
+class TestMultiRepoManagerIngestAll:
88
+ def test_calls_repo_ingester_for_each_repo(self, tmp_path):
89
+ from navegador.multirepo import MultiRepoManager
90
+
91
+ store = _mock_store()
92
+ # list_repos() is called first; return one repo
93
+ store.query.return_value = MagicMock(
94
+ result_set=[["svc", str(tmp_path)]]
95
+ )
96
+ mgr = MultiRepoManager(store)
97
+
98
+ mock_ingester_instance = MagicMock()
99
+ mock_ingester_instance.ingest.return_value = {"files": 3, "functions": 10}
100
+ mock_ingester_cls = MagicMock(return_value=mock_ingester_instance)
101
+
102
+ # Patch the lazy import inside ingest_all
103
+ with patch("navegador.ingestion.parser.RepoIngester", mock_ingester_cls):
104
+ # Also patch the name that is imported lazily inside the method
105
+ import navegador.multirepo as _m
106
+ import navegador.ingestion.parser as _p
107
+ original = getattr(_p, "RepoIngester", None)
108
+ _p.RepoIngester = mock_ingester_cls
109
+ try:
110
+ summary = mgr.ingest_all()
111
+ finally:
112
+ if original is not None:
113
+ _p.RepoIngester = original
114
+
115
+ assert "svc" in summary
116
+ assert summary["svc"]["files"] == 3
117
+
118
+ def test_returns_empty_when_no_repos(self):
119
+ from navegador.multirepo import MultiRepoManager
120
+
121
+ store = _mock_store()
122
+ store.query.return_value = MagicMock(result_set=[])
123
+ mgr = MultiRepoManager(store)
124
+ assert mgr.ingest_all() == {}
125
+
126
+ def test_clear_flag_calls_store_clear_when_repos_exist(self, tmp_path):
127
+ from navegador.multirepo import MultiRepoManager
128
+
129
+ store = _mock_store()
130
+ # Return one repo so ingest_all proceeds past the empty check
131
+ store.query.return_value = MagicMock(
132
+ result_set=[["svc", str(tmp_path)]]
133
+ )
134
+ mgr = MultiRepoManager(store)
135
+
136
+ mock_ingester_instance = MagicMock()
137
+ mock_ingester_instance.ingest.return_value = {"files": 1}
138
+ mock_ingester_cls = MagicMock(return_value=mock_ingester_instance)
139
+
140
+ import navegador.ingestion.parser as _p
141
+ original = getattr(_p, "RepoIngester", None)
142
+ _p.RepoIngester = mock_ingester_cls
143
+ try:
144
+ mgr.ingest_all(clear=True)
145
+ finally:
146
+ if original is not None:
147
+ _p.RepoIngester = original
148
+
149
+ store.clear.assert_called_once()
150
+
151
+
152
+class TestMultiRepoManagerCrossRepoSearch:
153
+ def test_returns_results(self):
154
+ from navegador.multirepo import MultiRepoManager
155
+
156
+ store = _mock_store()
157
+ store.query.return_value = MagicMock(
158
+ result_set=[["Function", "authenticate", "auth.py"]]
159
+ )
160
+ mgr = MultiRepoManager(store)
161
+ results = mgr.cross_repo_search("authenticate")
162
+ assert len(results) == 1
163
+ assert results[0]["name"] == "authenticate"
164
+
165
+ def test_empty_when_no_match(self):
166
+ from navegador.multirepo import MultiRepoManager
167
+
168
+ store = _mock_store()
169
+ store.query.return_value = MagicMock(result_set=[])
170
+ mgr = MultiRepoManager(store)
171
+ assert mgr.cross_repo_search("zzz_nonexistent") == []
172
+
173
+ def test_limit_is_applied(self):
174
+ from navegador.multirepo import MultiRepoManager
175
+
176
+ store = _mock_store()
177
+ store.query.return_value = MagicMock(result_set=[])
178
+ mgr = MultiRepoManager(store)
179
+ mgr.cross_repo_search("foo", limit=5)
180
+ cypher = store.query.call_args[0][0]
181
+ assert "LIMIT 5" in cypher
182
+
183
+
184
+# ── CLI: repo ──────────────────────────────────────────────────────────────
185
+
186
+
187
+class TestRepoCLI:
188
+ def test_repo_add(self, tmp_path):
189
+ runner = CliRunner()
190
+ store = _mock_store()
191
+ with patch("navegador.cli.commands._get_store", return_value=store):
192
+ result = runner.invoke(
193
+ main, ["repo", "add", "myapp", str(tmp_path)]
194
+ )
195
+ assert result.exit_code == 0
196
+ assert "myapp" in result.output
197
+
198
+ def test_repo_list_empty(self):
199
+ runner = CliRunner()
200
+ store = _mock_store()
201
+ store.query.return_value = MagicMock(result_set=[])
202
+ with patch("navegador.cli.commands._get_store", return_value=store):
203
+ result = runner.invoke(main, ["repo", "list"])
204
+ assert result.exit_code == 0
205
+
206
+ def test_repo_search(self):
207
+ runner = CliRunner()
208
+ store = _mock_store()
209
+ store.query.return_value = MagicMock(result_set=[])
210
+ with patch("navegador.cli.commands._get_store", return_value=store):
211
+ result = runner.invoke(main, ["repo", "search", "foo"])
212
+ assert result.exit_code == 0
213
+
214
+
215
+# ═════════════════════════════════════════════════════════════════════════════
216
+# #26 — SymbolRenamer
217
+# ═════════════════════════════════════════════════════════════════════════════
218
+
219
+
220
+class TestSymbolRenamerFindReferences:
221
+ def test_returns_references(self):
222
+ from navegador.refactor import SymbolRenamer
223
+
224
+ store = _mock_store()
225
+ store.query.return_value = MagicMock(
226
+ result_set=[["Function", "foo", "a.py", 10]]
227
+ )
228
+ renamer = SymbolRenamer(store)
229
+ refs = renamer.find_references("foo")
230
+ assert len(refs) == 1
231
+ assert refs[0]["name"] == "foo"
232
+ assert refs[0]["file_path"] == "a.py"
233
+
234
+ def test_filters_by_file_path(self):
235
+ from navegador.refactor import SymbolRenamer
236
+
237
+ store = _mock_store()
238
+ store.query.return_value = MagicMock(result_set=[])
239
+ renamer = SymbolRenamer(store)
240
+ renamer.find_references("foo", file_path="a.py")
241
+ cypher = store.query.call_args[0][0]
242
+ assert "file_path" in cypher
243
+
244
+ def test_returns_empty_list_when_no_matches(self):
245
+ from navegador.refactor import SymbolRenamer
246
+
247
+ store = _mock_store()
248
+ store.query.return_value = MagicMock(result_set=[])
249
+ renamer = SymbolRenamer(store)
250
+ assert renamer.find_references("nonexistent") == []
251
+
252
+
253
+class TestSymbolRenamerPreview:
254
+ def test_preview_does_not_update_graph(self):
255
+ from navegador.refactor import SymbolRenamer
256
+
257
+ store = _mock_store()
258
+ store.query.return_value = MagicMock(result_set=[])
259
+ renamer = SymbolRenamer(store)
260
+ preview = renamer.preview_rename("old", "new")
261
+ # No SET query should have been issued
262
+ for c in store.query.call_args_list:
263
+ assert "SET n.name" not in (c[0][0] if c[0] else "")
264
+
265
+ assert preview.old_name == "old"
266
+ assert preview.new_name == "new"
267
+
268
+ def test_preview_collects_affected_files(self):
269
+ from navegador.refactor import SymbolRenamer
270
+
271
+ store = _mock_store()
272
+
273
+ def _side(cypher, params=None):
274
+ if "SET" not in cypher:
275
+ return MagicMock(
276
+ result_set=[["Function", "old", "a.py", 1], ["Function", "old", "b.py", 5]]
277
+ )
278
+ return MagicMock(result_set=[])
279
+
280
+ store.query.side_effect = _side
281
+ renamer = SymbolRenamer(store)
282
+ preview = renamer.preview_rename("old", "new")
283
+ assert set(preview.affected_files) == {"a.py", "b.py"}
284
+
285
+
286
+class TestSymbolRenamerApply:
287
+ def test_apply_issues_set_query(self):
288
+ from navegador.refactor import SymbolRenamer
289
+
290
+ store = _mock_store()
291
+ store.query.return_value = MagicMock(result_set=[])
292
+ renamer = SymbolRenamer(store)
293
+ renamer.apply_rename("old", "new")
294
+ cypher_calls = [c[0][0] for c in store.query.call_args_list]
295
+ assert any("SET n.name" in c for c in cypher_calls)
296
+
297
+ def test_apply_returns_result_with_names(self):
298
+ from navegador.refactor import SymbolRenamer
299
+
300
+ store = _mock_store()
301
+ store.query.return_value = MagicMock(result_set=[])
302
+ renamer = SymbolRenamer(store)
303
+ result = renamer.apply_rename("alpha", "beta")
304
+ assert result.old_name == "alpha"
305
+ assert result.new_name == "beta"
306
+
307
+
308
+# ── CLI: rename ───────────────────────────────────────────────────────────────
309
+
310
+
311
+class TestRenameCLI:
312
+ def test_rename_preview(self):
313
+ runner = CliRunner()
314
+ store = _mock_store()
315
+ store.query.return_value = MagicMock(result_set=[])
316
+ with patch("navegador.cli.commands._get_store", return_value=store):
317
+ result = runner.invoke(main, ["rename", "old_fn", "new_fn", "--preview"])
318
+ assert result.exit_code == 0
319
+
320
+ def test_rename_apply(self):
321
+ runner = CliRunner()
322
+ store = _mock_store()
323
+ store.query.return_value = MagicMock(result_set=[])
324
+ with patch("navegador.cli.commands._get_store", return_value=store):
325
+ result = runner.invoke(main, ["rename", "old_fn", "new_fn"])
326
+ assert result.exit_code == 0
327
+
328
+
329
+# ═════════════════════════════════════════════════════════════════════════════
330
+# #39 — CodeownersIngester
331
+# ═════════════════════════════════════════════════════════════════════════════
332
+
333
+
334
+class TestCodeownersIngesterParseFile:
335
+ def test_parses_basic_entries(self, tmp_path):
336
+ from navegador.codeowners import CodeownersIngester
337
+
338
+ co = tmp_path / "CODEOWNERS"
339
+ co.write_text("*.py @alice @bob\ndocs/ @carol\n")
340
+ ingester = CodeownersIngester(_mock_store())
341
+ entries = ingester._parse_codeowners(co)
342
+ assert len(entries) == 2
343
+ assert entries[0] == ("*.py", ["@alice", "@bob"])
344
+ assert entries[1] == ("docs/", ["@carol"])
345
+
346
+ def test_ignores_comments(self, tmp_path):
347
+ from navegador.codeowners import CodeownersIngester
348
+
349
+ co = tmp_path / "CODEOWNERS"
350
+ co.write_text("# comment\n*.py @alice\n")
351
+ ingester = CodeownersIngester(_mock_store())
352
+ entries = ingester._parse_codeowners(co)
353
+ assert len(entries) == 1
354
+
355
+ def test_ignores_blank_lines(self, tmp_path):
356
+ from navegador.codeowners import CodeownersIngester
357
+
358
+ co = tmp_path / "CODEOWNERS"
359
+ co.write_text("\n\n*.py @alice\n\n")
360
+ ingester = CodeownersIngester(_mock_store())
361
+ entries = ingester._parse_codeowners(co)
362
+ assert len(entries) == 1
363
+
364
+ def test_handles_email_owner(self, tmp_path):
365
+ from navegador.codeowners import CodeownersIngester
366
+
367
+ co = tmp_path / "CODEOWNERS"
368
+ co.write_text("*.go [email protected]\n")
369
+ ingester = CodeownersIngester(_mock_store())
370
+ entries = ingester._parse_codeowners(co)
371
+ assert entries[0][1] == ["[email protected]"]
372
+
373
+
374
+class TestCodeownersIngesterIngest:
375
+ def test_creates_person_nodes(self, tmp_path):
376
+ from navegador.codeowners import CodeownersIngester
377
+
378
+ co = tmp_path / "CODEOWNERS"
379
+ co.write_text("*.py @alice\n")
380
+ store = _mock_store()
381
+ ingester = CodeownersIngester(store)
382
+ stats = ingester.ingest(str(tmp_path))
383
+ assert stats["owners"] == 1
384
+ assert stats["patterns"] == 1
385
+ assert stats["edges"] == 1
386
+
387
+ def test_deduplicates_owners(self, tmp_path):
388
+ from navegador.codeowners import CodeownersIngester
389
+
390
+ co = tmp_path / "CODEOWNERS"
391
+ co.write_text("*.py @alice\ndocs/ @alice\n")
392
+ store = _mock_store()
393
+ ingester = CodeownersIngester(store)
394
+ stats = ingester.ingest(str(tmp_path))
395
+ # alice appears in both patterns but should only be created once
396
+ assert stats["owners"] == 1
397
+ assert stats["patterns"] == 2
398
+
399
+ def test_returns_zeros_when_no_codeowners(self, tmp_path):
400
+ from navegador.codeowners import CodeownersIngester
401
+
402
+ store = _mock_store()
403
+ stats = CodeownersIngester(store).ingest(str(tmp_path))
404
+ assert stats == {"owners": 0, "patterns": 0, "edges": 0}
405
+
406
+ def test_finds_github_codeowners(self, tmp_path):
407
+ from navegador.codeowners import CodeownersIngester
408
+
409
+ gh = tmp_path / ".github"
410
+ gh.mkdir()
411
+ (gh / "CODEOWNERS").write_text("* @team\n")
412
+ store = _mock_store()
413
+ stats = CodeownersIngester(store).ingest(str(tmp_path))
414
+ assert stats["owners"] == 1
415
+
416
+
417
+# ── CLI: codeowners ───────────────────────────────────────────────────────────
418
+
419
+
420
+class TestCodeownersCLI:
421
+ def test_cli_codeowners(self, tmp_path):
422
+ runner = CliRunner()
423
+ (tmp_path / "CODEOWNERS").write_text("*.py @alice\n")
424
+ store = _mock_store()
425
+ with patch("navegador.cli.commands._get_store", return_value=store):
426
+ result = runner.invoke(main, ["codeowners", str(tmp_path)])
427
+ assert result.exit_code == 0
428
+ assert "owner" in result.output
429
+
430
+
431
+# ═════════════════════════════════════════════════════════════════════════════
432
+# #40 — ADRIngester
433
+# ═════════════════════════════════════════════════════════════════════════════
434
+
435
+
436
+_SAMPLE_ADR = """\
437
+# Use FalkorDB as the graph database
438
+
439
+## Status
440
+
441
+Accepted
442
+
443
+## Context
444
+
445
+We need a property graph DB.
446
+
447
+## Decision
448
+
449
+We will use FalkorDB.
450
+
451
+## Rationale
452
+
453
+Best performance for our use case. Supports Cypher.
454
+
455
+## Date
456
+
457
+2024-01-15
458
+"""
459
+
460
+
461
+class TestADRIngesterParse:
462
+ def test_parses_title(self, tmp_path):
463
+ from navegador.adr import ADRIngester
464
+
465
+ f = tmp_path / "0001-use-falkordb.md"
466
+ f.write_text(_SAMPLE_ADR)
467
+ ingester = ADRIngester(_mock_store())
468
+ parsed = ingester._parse_adr(f)
469
+ assert parsed is not None
470
+ assert "FalkorDB" in parsed["description"]
471
+
472
+ def test_parses_status(self, tmp_path):
473
+ from navegador.adr import ADRIngester
474
+
475
+ f = tmp_path / "0001-test.md"
476
+ f.write_text(_SAMPLE_ADR)
477
+ ingester = ADRIngester(_mock_store())
478
+ parsed = ingester._parse_adr(f)
479
+ assert parsed["status"] == "accepted"
480
+
481
+ def test_parses_rationale(self, tmp_path):
482
+ from navegador.adr import ADRIngester
483
+
484
+ f = tmp_path / "0001-test.md"
485
+ f.write_text(_SAMPLE_ADR)
486
+ ingester = ADRIngester(_mock_store())
487
+ parsed = ingester._parse_adr(f)
488
+ assert "performance" in parsed["rationale"].lower()
489
+
490
+ def test_parses_date(self, tmp_path):
491
+ from navegador.adr import ADRIngester
492
+
493
+ f = tmp_path / "0001-test.md"
494
+ f.write_text(_SAMPLE_ADR)
495
+ ingester = ADRIngester(_mock_store())
496
+ parsed = ingester._parse_adr(f)
497
+ assert parsed["date"] == "2024-01-15"
498
+
499
+ def test_uses_stem_as_name(self, tmp_path):
500
+ from navegador.adr import ADRIngester
501
+
502
+ f = tmp_path / "0042-my-decision.md"
503
+ f.write_text(_SAMPLE_ADR)
504
+ ingester = ADRIngester(_mock_store())
505
+ parsed = ingester._parse_adr(f)
506
+ assert parsed["name"] == "0042-my-decision"
507
+
508
+ def test_returns_none_for_non_adr(self, tmp_path):
509
+ from navegador.adr import ADRIngester
510
+
511
+ f = tmp_path / "readme.md"
512
+ f.write_text("No heading here.")
513
+ ingester = ADRIngester(_mock_store())
514
+ assert ingester._parse_adr(f) is None
515
+
516
+
517
+class TestADRIngesterIngest:
518
+ def test_creates_decision_nodes(self, tmp_path):
519
+ from navegador.adr import ADRIngester
520
+
521
+ (tmp_path / "0001-first.md").write_text(_SAMPLE_ADR)
522
+ (tmp_path / "0002-second.md").write_text(_SAMPLE_ADR)
523
+ store = _mock_store()
524
+ stats = ADRIngester(store).ingest(str(tmp_path))
525
+ assert stats["decisions"] == 2
526
+ assert stats["skipped"] == 0
527
+
528
+ def test_skips_files_without_h1(self, tmp_path):
529
+ from navegador.adr import ADRIngester
530
+
531
+ (tmp_path / "empty.md").write_text("no heading\n")
532
+ store = _mock_store()
533
+ stats = ADRIngester(store).ingest(str(tmp_path))
534
+ assert stats["skipped"] == 1
535
+
536
+ def test_returns_zeros_for_empty_dir(self, tmp_path):
537
+ from navegador.adr import ADRIngester
538
+
539
+ store = _mock_store()
540
+ stats = ADRIngester(store).ingest(str(tmp_path))
541
+ assert stats == {"decisions": 0, "skipped": 0}
542
+
543
+ def test_nonexistent_dir_returns_zeros(self, tmp_path):
544
+ from navegador.adr import ADRIngester
545
+
546
+ store = _mock_store()
547
+ stats = ADRIngester(store).ingest(str(tmp_path / "no_such_dir"))
548
+ assert stats == {"decisions": 0, "skipped": 0}
549
+
550
+
551
+# ── CLI: adr ─────────────────────────────────────────────────────────────────
552
+
553
+
554
+class TestADRCLI:
555
+ def test_adr_ingest(self, tmp_path):
556
+ runner = CliRunner()
557
+ (tmp_path / "0001-test.md").write_text(_SAMPLE_ADR)
558
+ store = _mock_store()
559
+ with patch("navegador.cli.commands._get_store", return_value=store):
560
+ result = runner.invoke(main, ["adr", "ingest", str(tmp_path)])
561
+ assert result.exit_code == 0
562
+ assert "decision" in result.output.lower()
563
+
564
+
565
+# ═════════════════════════════════════════════════════════════════════════════
566
+# #41 — APISchemaIngester
567
+# ═════════════════════════════════════════════════════════════════════════════
568
+
569
+
570
+_OPENAPI_YAML = """\
571
+openapi: "3.0.0"
572
+info:
573
+ title: Test API
574
+ version: "1.0"
575
+paths:
576
+ /users:
577
+ get:
578
+ operationId: listUsers
579
+ summary: List all users
580
+ tags:
581
+ - users
582
+ post:
583
+ operationId: createUser
584
+ summary: Create a user
585
+components:
586
+ schemas:
587
+ User:
588
+ description: A user object
589
+ type: object
590
+"""
591
+
592
+_OPENAPI_JSON = {
593
+ "openapi": "3.0.0",
594
+ "info": {"title": "Test API", "version": "1.0"},
595
+ "paths": {
596
+ "/items": {
597
+ "get": {"operationId": "listItems", "summary": "List items"},
598
+ "post": {"summary": "Create item"},
599
+ }
600
+ },
601
+ "components": {
602
+ "schemas": {
603
+ "Item": {"description": "An item", "type": "object"}
604
+ }
605
+ },
606
+}
607
+
608
+_GRAPHQL_SCHEMA = """\
609
+type Query {
610
+ users: [User]
611
+ user(id: ID!): User
612
+}
613
+
614
+type Mutation {
615
+ createUser(name: String!): User
616
+}
617
+
618
+type User {
619
+ id: ID!
620
+ name: String!
621
+ email: String
622
+}
623
+
624
+input CreateUserInput {
625
+ name: String!
626
+ email: String
627
+}
628
+"""
629
+
630
+
631
+class TestAPISchemaIngesterOpenAPI:
632
+ def test_ingest_openapi_json(self, tmp_path):
633
+ from navegador.api_schema import APISchemaIngester
634
+
635
+ p = tmp_path / "api.json"
636
+ p.write_text(json.dumps(_OPENAPI_JSON))
637
+ store = _mock_store()
638
+ stats = APISchemaIngester(store).ingest_openapi(str(p))
639
+ assert stats["endpoints"] >= 2
640
+ assert stats["schemas"] >= 1
641
+
642
+ def test_ingest_creates_function_nodes(self, tmp_path):
643
+ from navegador.api_schema import APISchemaIngester
644
+
645
+ p = tmp_path / "api.json"
646
+ p.write_text(json.dumps(_OPENAPI_JSON))
647
+ store = _mock_store()
648
+ APISchemaIngester(store).ingest_openapi(str(p))
649
+ labels = [c[0][0] for c in store.create_node.call_args_list]
650
+ assert "Function" in labels
651
+
652
+ def test_ingest_creates_class_nodes_for_schemas(self, tmp_path):
653
+ from navegador.api_schema import APISchemaIngester
654
+
655
+ p = tmp_path / "api.json"
656
+ p.write_text(json.dumps(_OPENAPI_JSON))
657
+ store = _mock_store()
658
+ APISchemaIngester(store).ingest_openapi(str(p))
659
+ labels = [c[0][0] for c in store.create_node.call_args_list]
660
+ assert "Class" in labels
661
+
662
+ def test_missing_file_returns_zeros(self, tmp_path):
663
+ from navegador.api_schema import APISchemaIngester
664
+
665
+ store = _mock_store()
666
+ stats = APISchemaIngester(store).ingest_openapi(str(tmp_path / "no.yaml"))
667
+ assert stats == {"endpoints": 0, "schemas": 0}
668
+
669
+ def test_empty_paths_returns_zeros(self, tmp_path):
670
+ from navegador.api_schema import APISchemaIngester
671
+
672
+ p = tmp_path / "empty.json"
673
+ p.write_text(json.dumps({"openapi": "3.0.0", "info": {}}))
674
+ store = _mock_store()
675
+ stats = APISchemaIngester(store).ingest_openapi(str(p))
676
+ assert stats == {"endpoints": 0, "schemas": 0}
677
+
678
+
679
+class TestAPISchemaIngesterGraphQL:
680
+ def test_ingest_graphql_types(self, tmp_path):
681
+ from navegador.api_schema import APISchemaIngester
682
+
683
+ p = tmp_path / "schema.graphql"
684
+ p.write_text(_GRAPHQL_SCHEMA)
685
+ store = _mock_store()
686
+ stats = APISchemaIngester(store).ingest_graphql(str(p))
687
+ # User + CreateUserInput → type nodes
688
+ assert stats["types"] >= 1
689
+
690
+ def test_ingest_graphql_query_fields(self, tmp_path):
691
+ from navegador.api_schema import APISchemaIngester
692
+
693
+ p = tmp_path / "schema.graphql"
694
+ p.write_text(_GRAPHQL_SCHEMA)
695
+ store = _mock_store()
696
+ stats = APISchemaIngester(store).ingest_graphql(str(p))
697
+ # Query.users, Query.user, Mutation.createUser
698
+ assert stats["fields"] >= 2
699
+
700
+ def test_missing_file_returns_zeros(self, tmp_path):
701
+ from navegador.api_schema import APISchemaIngester
702
+
703
+ store = _mock_store()
704
+ stats = APISchemaIngester(store).ingest_graphql(str(tmp_path / "no.graphql"))
705
+ assert stats == {"types": 0, "fields": 0}
706
+
707
+
708
+# ── CLI: api ──────────────────────────────────────────────────────────────────
709
+
710
+
711
+class TestAPICLI:
712
+ def test_api_ingest_openapi_json(self, tmp_path):
713
+ runner = CliRunner()
714
+ p = tmp_path / "api.json"
715
+ p.write_text(json.dumps(_OPENAPI_JSON))
716
+ store = _mock_store()
717
+ with patch("navegador.cli.commands._get_store", return_value=store):
718
+ result = runner.invoke(
719
+ main, ["api", "ingest", str(p), "--type", "openapi"]
720
+ )
721
+ assert result.exit_code == 0
722
+
723
+ def test_api_ingest_graphql(self, tmp_path):
724
+ runner = CliRunner()
725
+ p = tmp_path / "schema.graphql"
726
+ p.write_text(_GRAPHQL_SCHEMA)
727
+ store = _mock_store()
728
+ with patch("navegador.cli.commands._get_store", return_value=store):
729
+ result = runner.invoke(
730
+ main, ["api", "ingest", str(p), "--type", "graphql"]
731
+ )
732
+ assert result.exit_code == 0
733
+
734
+ def test_api_ingest_auto_detects_graphql(self, tmp_path):
735
+ runner = CliRunner()
736
+ p = tmp_path / "schema.graphql"
737
+ p.write_text(_GRAPHQL_SCHEMA)
738
+ store = _mock_store()
739
+ with patch("navegador.cli.commands._get_store", return_value=store):
740
+ result = runner.invoke(main, ["api", "ingest", str(p)])
741
+ assert result.exit_code == 0
742
+
743
+ def test_api_ingest_json_output(self, tmp_path):
744
+ runner = CliRunner()
745
+ p = tmp_path / "api.json"
746
+ p.write_text(json.dumps(_OPENAPI_JSON))
747
+ store = _mock_store()
748
+ with patch("navegador.cli.commands._get_store", return_value=store):
749
+ result = runner.invoke(
750
+ main, ["api", "ingest", str(p), "--type", "openapi", "--json"]
751
+ )
752
+ assert result.exit_code == 0
753
+ data = json.loads(result.output)
754
+ assert "endpoints" in data
--- a/tests/test_v04_features.py
+++ b/tests/test_v04_features.py
@@ -0,0 +1,754 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_v04_features.py
+++ b/tests/test_v04_features.py
@@ -0,0 +1,754 @@
1 """
2 Tests for navegador v0.4 features:
3 #16 — Multi-repo support (MultiRepoManager)
4 #26 — Coordinated rename (SymbolRenamer)
5 #39 — CODEOWNERS integration (CodeownersIngester)
6 #40 — ADR ingestion (ADRIngester)
7 #41 — OpenAPI / GraphQL schema (APISchemaIngester)
8 """
9
10 from __future__ import annotations
11
12 import json
13 import tempfile
14 from pathlib import Path
15 from unittest.mock import MagicMock, call, patch
16
17 import pytest
18 from click.testing import CliRunner
19
20 from navegador.cli.commands import main
21
22
23 # ── Shared helpers ────────────────────────────────────────────────────────────
24
25
26 def _mock_store():
27 store = MagicMock()
28 store.query.return_value = MagicMock(result_set=[])
29 return store
30
31
32 def _write(path: Path, content: str) -> None:
33 path.parent.mkdir(parents=True, exist_ok=True)
34 path.write_text(content, encoding="utf-8")
35
36
37 # ═════════════════════════════════════════════════════════════════════════════
38 # #16 — MultiRepoManager
39 # ═════════════════════════════════════════════════════════════════════════════
40
41
42 class TestMultiRepoManagerAddRepo:
43 def test_creates_repository_node(self, tmp_path):
44 from navegador.multirepo import MultiRepoManager
45
46 store = _mock_store()
47 mgr = MultiRepoManager(store)
48 mgr.add_repo("backend", str(tmp_path))
49 store.create_node.assert_called_once()
50 args = store.create_node.call_args[0]
51 assert args[0] == "Repository"
52 assert args[1]["name"] == "backend"
53
54 def test_resolves_path(self, tmp_path):
55 from navegador.multirepo import MultiRepoManager
56
57 store = _mock_store()
58 mgr = MultiRepoManager(store)
59 mgr.add_repo("x", str(tmp_path))
60 props = store.create_node.call_args[0][1]
61 assert Path(props["path"]).is_absolute()
62
63
64 class TestMultiRepoManagerListRepos:
65 def test_returns_empty_list_when_no_repos(self):
66 from navegador.multirepo import MultiRepoManager
67
68 store = _mock_store()
69 store.query.return_value = MagicMock(result_set=[])
70 mgr = MultiRepoManager(store)
71 assert mgr.list_repos() == []
72
73 def test_parses_result_set(self):
74 from navegador.multirepo import MultiRepoManager
75
76 store = _mock_store()
77 store.query.return_value = MagicMock(
78 result_set=[["backend", "/repos/backend"], ["frontend", "/repos/frontend"]]
79 )
80 mgr = MultiRepoManager(store)
81 repos = mgr.list_repos()
82 assert len(repos) == 2
83 assert repos[0] == {"name": "backend", "path": "/repos/backend"}
84 assert repos[1] == {"name": "frontend", "path": "/repos/frontend"}
85
86
87 class TestMultiRepoManagerIngestAll:
88 def test_calls_repo_ingester_for_each_repo(self, tmp_path):
89 from navegador.multirepo import MultiRepoManager
90
91 store = _mock_store()
92 # list_repos() is called first; return one repo
93 store.query.return_value = MagicMock(
94 result_set=[["svc", str(tmp_path)]]
95 )
96 mgr = MultiRepoManager(store)
97
98 mock_ingester_instance = MagicMock()
99 mock_ingester_instance.ingest.return_value = {"files": 3, "functions": 10}
100 mock_ingester_cls = MagicMock(return_value=mock_ingester_instance)
101
102 # Patch the lazy import inside ingest_all
103 with patch("navegador.ingestion.parser.RepoIngester", mock_ingester_cls):
104 # Also patch the name that is imported lazily inside the method
105 import navegador.multirepo as _m
106 import navegador.ingestion.parser as _p
107 original = getattr(_p, "RepoIngester", None)
108 _p.RepoIngester = mock_ingester_cls
109 try:
110 summary = mgr.ingest_all()
111 finally:
112 if original is not None:
113 _p.RepoIngester = original
114
115 assert "svc" in summary
116 assert summary["svc"]["files"] == 3
117
118 def test_returns_empty_when_no_repos(self):
119 from navegador.multirepo import MultiRepoManager
120
121 store = _mock_store()
122 store.query.return_value = MagicMock(result_set=[])
123 mgr = MultiRepoManager(store)
124 assert mgr.ingest_all() == {}
125
126 def test_clear_flag_calls_store_clear_when_repos_exist(self, tmp_path):
127 from navegador.multirepo import MultiRepoManager
128
129 store = _mock_store()
130 # Return one repo so ingest_all proceeds past the empty check
131 store.query.return_value = MagicMock(
132 result_set=[["svc", str(tmp_path)]]
133 )
134 mgr = MultiRepoManager(store)
135
136 mock_ingester_instance = MagicMock()
137 mock_ingester_instance.ingest.return_value = {"files": 1}
138 mock_ingester_cls = MagicMock(return_value=mock_ingester_instance)
139
140 import navegador.ingestion.parser as _p
141 original = getattr(_p, "RepoIngester", None)
142 _p.RepoIngester = mock_ingester_cls
143 try:
144 mgr.ingest_all(clear=True)
145 finally:
146 if original is not None:
147 _p.RepoIngester = original
148
149 store.clear.assert_called_once()
150
151
152 class TestMultiRepoManagerCrossRepoSearch:
153 def test_returns_results(self):
154 from navegador.multirepo import MultiRepoManager
155
156 store = _mock_store()
157 store.query.return_value = MagicMock(
158 result_set=[["Function", "authenticate", "auth.py"]]
159 )
160 mgr = MultiRepoManager(store)
161 results = mgr.cross_repo_search("authenticate")
162 assert len(results) == 1
163 assert results[0]["name"] == "authenticate"
164
165 def test_empty_when_no_match(self):
166 from navegador.multirepo import MultiRepoManager
167
168 store = _mock_store()
169 store.query.return_value = MagicMock(result_set=[])
170 mgr = MultiRepoManager(store)
171 assert mgr.cross_repo_search("zzz_nonexistent") == []
172
173 def test_limit_is_applied(self):
174 from navegador.multirepo import MultiRepoManager
175
176 store = _mock_store()
177 store.query.return_value = MagicMock(result_set=[])
178 mgr = MultiRepoManager(store)
179 mgr.cross_repo_search("foo", limit=5)
180 cypher = store.query.call_args[0][0]
181 assert "LIMIT 5" in cypher
182
183
184 # ── CLI: repo ──────────────────────────────────────────────────────────────
185
186
187 class TestRepoCLI:
188 def test_repo_add(self, tmp_path):
189 runner = CliRunner()
190 store = _mock_store()
191 with patch("navegador.cli.commands._get_store", return_value=store):
192 result = runner.invoke(
193 main, ["repo", "add", "myapp", str(tmp_path)]
194 )
195 assert result.exit_code == 0
196 assert "myapp" in result.output
197
198 def test_repo_list_empty(self):
199 runner = CliRunner()
200 store = _mock_store()
201 store.query.return_value = MagicMock(result_set=[])
202 with patch("navegador.cli.commands._get_store", return_value=store):
203 result = runner.invoke(main, ["repo", "list"])
204 assert result.exit_code == 0
205
206 def test_repo_search(self):
207 runner = CliRunner()
208 store = _mock_store()
209 store.query.return_value = MagicMock(result_set=[])
210 with patch("navegador.cli.commands._get_store", return_value=store):
211 result = runner.invoke(main, ["repo", "search", "foo"])
212 assert result.exit_code == 0
213
214
215 # ═════════════════════════════════════════════════════════════════════════════
216 # #26 — SymbolRenamer
217 # ═════════════════════════════════════════════════════════════════════════════
218
219
220 class TestSymbolRenamerFindReferences:
221 def test_returns_references(self):
222 from navegador.refactor import SymbolRenamer
223
224 store = _mock_store()
225 store.query.return_value = MagicMock(
226 result_set=[["Function", "foo", "a.py", 10]]
227 )
228 renamer = SymbolRenamer(store)
229 refs = renamer.find_references("foo")
230 assert len(refs) == 1
231 assert refs[0]["name"] == "foo"
232 assert refs[0]["file_path"] == "a.py"
233
234 def test_filters_by_file_path(self):
235 from navegador.refactor import SymbolRenamer
236
237 store = _mock_store()
238 store.query.return_value = MagicMock(result_set=[])
239 renamer = SymbolRenamer(store)
240 renamer.find_references("foo", file_path="a.py")
241 cypher = store.query.call_args[0][0]
242 assert "file_path" in cypher
243
244 def test_returns_empty_list_when_no_matches(self):
245 from navegador.refactor import SymbolRenamer
246
247 store = _mock_store()
248 store.query.return_value = MagicMock(result_set=[])
249 renamer = SymbolRenamer(store)
250 assert renamer.find_references("nonexistent") == []
251
252
253 class TestSymbolRenamerPreview:
254 def test_preview_does_not_update_graph(self):
255 from navegador.refactor import SymbolRenamer
256
257 store = _mock_store()
258 store.query.return_value = MagicMock(result_set=[])
259 renamer = SymbolRenamer(store)
260 preview = renamer.preview_rename("old", "new")
261 # No SET query should have been issued
262 for c in store.query.call_args_list:
263 assert "SET n.name" not in (c[0][0] if c[0] else "")
264
265 assert preview.old_name == "old"
266 assert preview.new_name == "new"
267
268 def test_preview_collects_affected_files(self):
269 from navegador.refactor import SymbolRenamer
270
271 store = _mock_store()
272
273 def _side(cypher, params=None):
274 if "SET" not in cypher:
275 return MagicMock(
276 result_set=[["Function", "old", "a.py", 1], ["Function", "old", "b.py", 5]]
277 )
278 return MagicMock(result_set=[])
279
280 store.query.side_effect = _side
281 renamer = SymbolRenamer(store)
282 preview = renamer.preview_rename("old", "new")
283 assert set(preview.affected_files) == {"a.py", "b.py"}
284
285
286 class TestSymbolRenamerApply:
287 def test_apply_issues_set_query(self):
288 from navegador.refactor import SymbolRenamer
289
290 store = _mock_store()
291 store.query.return_value = MagicMock(result_set=[])
292 renamer = SymbolRenamer(store)
293 renamer.apply_rename("old", "new")
294 cypher_calls = [c[0][0] for c in store.query.call_args_list]
295 assert any("SET n.name" in c for c in cypher_calls)
296
297 def test_apply_returns_result_with_names(self):
298 from navegador.refactor import SymbolRenamer
299
300 store = _mock_store()
301 store.query.return_value = MagicMock(result_set=[])
302 renamer = SymbolRenamer(store)
303 result = renamer.apply_rename("alpha", "beta")
304 assert result.old_name == "alpha"
305 assert result.new_name == "beta"
306
307
308 # ── CLI: rename ───────────────────────────────────────────────────────────────
309
310
311 class TestRenameCLI:
312 def test_rename_preview(self):
313 runner = CliRunner()
314 store = _mock_store()
315 store.query.return_value = MagicMock(result_set=[])
316 with patch("navegador.cli.commands._get_store", return_value=store):
317 result = runner.invoke(main, ["rename", "old_fn", "new_fn", "--preview"])
318 assert result.exit_code == 0
319
320 def test_rename_apply(self):
321 runner = CliRunner()
322 store = _mock_store()
323 store.query.return_value = MagicMock(result_set=[])
324 with patch("navegador.cli.commands._get_store", return_value=store):
325 result = runner.invoke(main, ["rename", "old_fn", "new_fn"])
326 assert result.exit_code == 0
327
328
329 # ═════════════════════════════════════════════════════════════════════════════
330 # #39 — CodeownersIngester
331 # ═════════════════════════════════════════════════════════════════════════════
332
333
334 class TestCodeownersIngesterParseFile:
335 def test_parses_basic_entries(self, tmp_path):
336 from navegador.codeowners import CodeownersIngester
337
338 co = tmp_path / "CODEOWNERS"
339 co.write_text("*.py @alice @bob\ndocs/ @carol\n")
340 ingester = CodeownersIngester(_mock_store())
341 entries = ingester._parse_codeowners(co)
342 assert len(entries) == 2
343 assert entries[0] == ("*.py", ["@alice", "@bob"])
344 assert entries[1] == ("docs/", ["@carol"])
345
346 def test_ignores_comments(self, tmp_path):
347 from navegador.codeowners import CodeownersIngester
348
349 co = tmp_path / "CODEOWNERS"
350 co.write_text("# comment\n*.py @alice\n")
351 ingester = CodeownersIngester(_mock_store())
352 entries = ingester._parse_codeowners(co)
353 assert len(entries) == 1
354
355 def test_ignores_blank_lines(self, tmp_path):
356 from navegador.codeowners import CodeownersIngester
357
358 co = tmp_path / "CODEOWNERS"
359 co.write_text("\n\n*.py @alice\n\n")
360 ingester = CodeownersIngester(_mock_store())
361 entries = ingester._parse_codeowners(co)
362 assert len(entries) == 1
363
364 def test_handles_email_owner(self, tmp_path):
365 from navegador.codeowners import CodeownersIngester
366
367 co = tmp_path / "CODEOWNERS"
368 co.write_text("*.go [email protected]\n")
369 ingester = CodeownersIngester(_mock_store())
370 entries = ingester._parse_codeowners(co)
371 assert entries[0][1] == ["[email protected]"]
372
373
374 class TestCodeownersIngesterIngest:
375 def test_creates_person_nodes(self, tmp_path):
376 from navegador.codeowners import CodeownersIngester
377
378 co = tmp_path / "CODEOWNERS"
379 co.write_text("*.py @alice\n")
380 store = _mock_store()
381 ingester = CodeownersIngester(store)
382 stats = ingester.ingest(str(tmp_path))
383 assert stats["owners"] == 1
384 assert stats["patterns"] == 1
385 assert stats["edges"] == 1
386
387 def test_deduplicates_owners(self, tmp_path):
388 from navegador.codeowners import CodeownersIngester
389
390 co = tmp_path / "CODEOWNERS"
391 co.write_text("*.py @alice\ndocs/ @alice\n")
392 store = _mock_store()
393 ingester = CodeownersIngester(store)
394 stats = ingester.ingest(str(tmp_path))
395 # alice appears in both patterns but should only be created once
396 assert stats["owners"] == 1
397 assert stats["patterns"] == 2
398
399 def test_returns_zeros_when_no_codeowners(self, tmp_path):
400 from navegador.codeowners import CodeownersIngester
401
402 store = _mock_store()
403 stats = CodeownersIngester(store).ingest(str(tmp_path))
404 assert stats == {"owners": 0, "patterns": 0, "edges": 0}
405
406 def test_finds_github_codeowners(self, tmp_path):
407 from navegador.codeowners import CodeownersIngester
408
409 gh = tmp_path / ".github"
410 gh.mkdir()
411 (gh / "CODEOWNERS").write_text("* @team\n")
412 store = _mock_store()
413 stats = CodeownersIngester(store).ingest(str(tmp_path))
414 assert stats["owners"] == 1
415
416
417 # ── CLI: codeowners ───────────────────────────────────────────────────────────
418
419
420 class TestCodeownersCLI:
421 def test_cli_codeowners(self, tmp_path):
422 runner = CliRunner()
423 (tmp_path / "CODEOWNERS").write_text("*.py @alice\n")
424 store = _mock_store()
425 with patch("navegador.cli.commands._get_store", return_value=store):
426 result = runner.invoke(main, ["codeowners", str(tmp_path)])
427 assert result.exit_code == 0
428 assert "owner" in result.output
429
430
431 # ═════════════════════════════════════════════════════════════════════════════
432 # #40 — ADRIngester
433 # ═════════════════════════════════════════════════════════════════════════════
434
435
436 _SAMPLE_ADR = """\
437 # Use FalkorDB as the graph database
438
439 ## Status
440
441 Accepted
442
443 ## Context
444
445 We need a property graph DB.
446
447 ## Decision
448
449 We will use FalkorDB.
450
451 ## Rationale
452
453 Best performance for our use case. Supports Cypher.
454
455 ## Date
456
457 2024-01-15
458 """
459
460
461 class TestADRIngesterParse:
462 def test_parses_title(self, tmp_path):
463 from navegador.adr import ADRIngester
464
465 f = tmp_path / "0001-use-falkordb.md"
466 f.write_text(_SAMPLE_ADR)
467 ingester = ADRIngester(_mock_store())
468 parsed = ingester._parse_adr(f)
469 assert parsed is not None
470 assert "FalkorDB" in parsed["description"]
471
472 def test_parses_status(self, tmp_path):
473 from navegador.adr import ADRIngester
474
475 f = tmp_path / "0001-test.md"
476 f.write_text(_SAMPLE_ADR)
477 ingester = ADRIngester(_mock_store())
478 parsed = ingester._parse_adr(f)
479 assert parsed["status"] == "accepted"
480
481 def test_parses_rationale(self, tmp_path):
482 from navegador.adr import ADRIngester
483
484 f = tmp_path / "0001-test.md"
485 f.write_text(_SAMPLE_ADR)
486 ingester = ADRIngester(_mock_store())
487 parsed = ingester._parse_adr(f)
488 assert "performance" in parsed["rationale"].lower()
489
490 def test_parses_date(self, tmp_path):
491 from navegador.adr import ADRIngester
492
493 f = tmp_path / "0001-test.md"
494 f.write_text(_SAMPLE_ADR)
495 ingester = ADRIngester(_mock_store())
496 parsed = ingester._parse_adr(f)
497 assert parsed["date"] == "2024-01-15"
498
499 def test_uses_stem_as_name(self, tmp_path):
500 from navegador.adr import ADRIngester
501
502 f = tmp_path / "0042-my-decision.md"
503 f.write_text(_SAMPLE_ADR)
504 ingester = ADRIngester(_mock_store())
505 parsed = ingester._parse_adr(f)
506 assert parsed["name"] == "0042-my-decision"
507
508 def test_returns_none_for_non_adr(self, tmp_path):
509 from navegador.adr import ADRIngester
510
511 f = tmp_path / "readme.md"
512 f.write_text("No heading here.")
513 ingester = ADRIngester(_mock_store())
514 assert ingester._parse_adr(f) is None
515
516
517 class TestADRIngesterIngest:
518 def test_creates_decision_nodes(self, tmp_path):
519 from navegador.adr import ADRIngester
520
521 (tmp_path / "0001-first.md").write_text(_SAMPLE_ADR)
522 (tmp_path / "0002-second.md").write_text(_SAMPLE_ADR)
523 store = _mock_store()
524 stats = ADRIngester(store).ingest(str(tmp_path))
525 assert stats["decisions"] == 2
526 assert stats["skipped"] == 0
527
528 def test_skips_files_without_h1(self, tmp_path):
529 from navegador.adr import ADRIngester
530
531 (tmp_path / "empty.md").write_text("no heading\n")
532 store = _mock_store()
533 stats = ADRIngester(store).ingest(str(tmp_path))
534 assert stats["skipped"] == 1
535
536 def test_returns_zeros_for_empty_dir(self, tmp_path):
537 from navegador.adr import ADRIngester
538
539 store = _mock_store()
540 stats = ADRIngester(store).ingest(str(tmp_path))
541 assert stats == {"decisions": 0, "skipped": 0}
542
543 def test_nonexistent_dir_returns_zeros(self, tmp_path):
544 from navegador.adr import ADRIngester
545
546 store = _mock_store()
547 stats = ADRIngester(store).ingest(str(tmp_path / "no_such_dir"))
548 assert stats == {"decisions": 0, "skipped": 0}
549
550
551 # ── CLI: adr ─────────────────────────────────────────────────────────────────
552
553
554 class TestADRCLI:
555 def test_adr_ingest(self, tmp_path):
556 runner = CliRunner()
557 (tmp_path / "0001-test.md").write_text(_SAMPLE_ADR)
558 store = _mock_store()
559 with patch("navegador.cli.commands._get_store", return_value=store):
560 result = runner.invoke(main, ["adr", "ingest", str(tmp_path)])
561 assert result.exit_code == 0
562 assert "decision" in result.output.lower()
563
564
565 # ═════════════════════════════════════════════════════════════════════════════
566 # #41 — APISchemaIngester
567 # ═════════════════════════════════════════════════════════════════════════════
568
569
570 _OPENAPI_YAML = """\
571 openapi: "3.0.0"
572 info:
573 title: Test API
574 version: "1.0"
575 paths:
576 /users:
577 get:
578 operationId: listUsers
579 summary: List all users
580 tags:
581 - users
582 post:
583 operationId: createUser
584 summary: Create a user
585 components:
586 schemas:
587 User:
588 description: A user object
589 type: object
590 """
591
592 _OPENAPI_JSON = {
593 "openapi": "3.0.0",
594 "info": {"title": "Test API", "version": "1.0"},
595 "paths": {
596 "/items": {
597 "get": {"operationId": "listItems", "summary": "List items"},
598 "post": {"summary": "Create item"},
599 }
600 },
601 "components": {
602 "schemas": {
603 "Item": {"description": "An item", "type": "object"}
604 }
605 },
606 }
607
608 _GRAPHQL_SCHEMA = """\
609 type Query {
610 users: [User]
611 user(id: ID!): User
612 }
613
614 type Mutation {
615 createUser(name: String!): User
616 }
617
618 type User {
619 id: ID!
620 name: String!
621 email: String
622 }
623
624 input CreateUserInput {
625 name: String!
626 email: String
627 }
628 """
629
630
631 class TestAPISchemaIngesterOpenAPI:
632 def test_ingest_openapi_json(self, tmp_path):
633 from navegador.api_schema import APISchemaIngester
634
635 p = tmp_path / "api.json"
636 p.write_text(json.dumps(_OPENAPI_JSON))
637 store = _mock_store()
638 stats = APISchemaIngester(store).ingest_openapi(str(p))
639 assert stats["endpoints"] >= 2
640 assert stats["schemas"] >= 1
641
642 def test_ingest_creates_function_nodes(self, tmp_path):
643 from navegador.api_schema import APISchemaIngester
644
645 p = tmp_path / "api.json"
646 p.write_text(json.dumps(_OPENAPI_JSON))
647 store = _mock_store()
648 APISchemaIngester(store).ingest_openapi(str(p))
649 labels = [c[0][0] for c in store.create_node.call_args_list]
650 assert "Function" in labels
651
652 def test_ingest_creates_class_nodes_for_schemas(self, tmp_path):
653 from navegador.api_schema import APISchemaIngester
654
655 p = tmp_path / "api.json"
656 p.write_text(json.dumps(_OPENAPI_JSON))
657 store = _mock_store()
658 APISchemaIngester(store).ingest_openapi(str(p))
659 labels = [c[0][0] for c in store.create_node.call_args_list]
660 assert "Class" in labels
661
662 def test_missing_file_returns_zeros(self, tmp_path):
663 from navegador.api_schema import APISchemaIngester
664
665 store = _mock_store()
666 stats = APISchemaIngester(store).ingest_openapi(str(tmp_path / "no.yaml"))
667 assert stats == {"endpoints": 0, "schemas": 0}
668
669 def test_empty_paths_returns_zeros(self, tmp_path):
670 from navegador.api_schema import APISchemaIngester
671
672 p = tmp_path / "empty.json"
673 p.write_text(json.dumps({"openapi": "3.0.0", "info": {}}))
674 store = _mock_store()
675 stats = APISchemaIngester(store).ingest_openapi(str(p))
676 assert stats == {"endpoints": 0, "schemas": 0}
677
678
679 class TestAPISchemaIngesterGraphQL:
680 def test_ingest_graphql_types(self, tmp_path):
681 from navegador.api_schema import APISchemaIngester
682
683 p = tmp_path / "schema.graphql"
684 p.write_text(_GRAPHQL_SCHEMA)
685 store = _mock_store()
686 stats = APISchemaIngester(store).ingest_graphql(str(p))
687 # User + CreateUserInput → type nodes
688 assert stats["types"] >= 1
689
690 def test_ingest_graphql_query_fields(self, tmp_path):
691 from navegador.api_schema import APISchemaIngester
692
693 p = tmp_path / "schema.graphql"
694 p.write_text(_GRAPHQL_SCHEMA)
695 store = _mock_store()
696 stats = APISchemaIngester(store).ingest_graphql(str(p))
697 # Query.users, Query.user, Mutation.createUser
698 assert stats["fields"] >= 2
699
700 def test_missing_file_returns_zeros(self, tmp_path):
701 from navegador.api_schema import APISchemaIngester
702
703 store = _mock_store()
704 stats = APISchemaIngester(store).ingest_graphql(str(tmp_path / "no.graphql"))
705 assert stats == {"types": 0, "fields": 0}
706
707
708 # ── CLI: api ──────────────────────────────────────────────────────────────────
709
710
711 class TestAPICLI:
712 def test_api_ingest_openapi_json(self, tmp_path):
713 runner = CliRunner()
714 p = tmp_path / "api.json"
715 p.write_text(json.dumps(_OPENAPI_JSON))
716 store = _mock_store()
717 with patch("navegador.cli.commands._get_store", return_value=store):
718 result = runner.invoke(
719 main, ["api", "ingest", str(p), "--type", "openapi"]
720 )
721 assert result.exit_code == 0
722
723 def test_api_ingest_graphql(self, tmp_path):
724 runner = CliRunner()
725 p = tmp_path / "schema.graphql"
726 p.write_text(_GRAPHQL_SCHEMA)
727 store = _mock_store()
728 with patch("navegador.cli.commands._get_store", return_value=store):
729 result = runner.invoke(
730 main, ["api", "ingest", str(p), "--type", "graphql"]
731 )
732 assert result.exit_code == 0
733
734 def test_api_ingest_auto_detects_graphql(self, tmp_path):
735 runner = CliRunner()
736 p = tmp_path / "schema.graphql"
737 p.write_text(_GRAPHQL_SCHEMA)
738 store = _mock_store()
739 with patch("navegador.cli.commands._get_store", return_value=store):
740 result = runner.invoke(main, ["api", "ingest", str(p)])
741 assert result.exit_code == 0
742
743 def test_api_ingest_json_output(self, tmp_path):
744 runner = CliRunner()
745 p = tmp_path / "api.json"
746 p.write_text(json.dumps(_OPENAPI_JSON))
747 store = _mock_store()
748 with patch("navegador.cli.commands._get_store", return_value=store):
749 result = runner.invoke(
750 main, ["api", "ingest", str(p), "--type", "openapi", "--json"]
751 )
752 assert result.exit_code == 0
753 data = json.loads(result.output)
754 assert "endpoints" in data

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button