Navegador

navegador / tests / test_v04_batch3.py
Blame History Raw 1235 lines
1
"""
2
Tests for navegador v0.4 batch 3 — issues #7, #18, #53, #55, #58, #61, #62.
3
4
Covers:
5
#7 / #18 — PlanopticonPipeline (pipeline, action items, decision timeline, auto-link)
6
#53 — TicketIngester (GitHub, Linear stub, Jira stub)
7
#55 — FossilAdapter (current_branch, changed_files, file_history, blame)
8
#58 — DependencyIngester (npm, pip/requirements.txt, pip/pyproject.toml, cargo)
9
#61 — SubmoduleIngester (detect_submodules, ingest_with_submodules)
10
#62 — WorkspaceMode enum, WorkspaceManager (unified + federated)
11
"""
12
13
from __future__ import annotations
14
15
import json
16
import subprocess
17
import tempfile
18
from pathlib import Path
19
from unittest.mock import MagicMock, patch
20
21
import pytest
22
23
24
# ── Shared mock store factory ─────────────────────────────────────────────────
25
26
27
def _make_store():
28
store = MagicMock()
29
store.query.return_value = MagicMock(result_set=[])
30
return store
31
32
33
# =============================================================================
34
# #7 / #18 — PlanopticonPipeline
35
# =============================================================================
36
37
38
class TestPlanopticonPipelineDetectInput:
39
"""_detect_input correctly identifies file types from path."""
40
41
from navegador.planopticon_pipeline import PlanopticonPipeline as _Pipeline
42
43
def test_manifest_file(self, tmp_path):
44
from navegador.planopticon_pipeline import PlanopticonPipeline
45
46
f = tmp_path / "manifest.json"
47
f.write_text("{}")
48
itype, resolved = PlanopticonPipeline._detect_input(f)
49
assert itype == "manifest"
50
assert resolved == f
51
52
def test_interchange_file(self, tmp_path):
53
from navegador.planopticon_pipeline import PlanopticonPipeline
54
55
f = tmp_path / "interchange.json"
56
f.write_text("{}")
57
itype, _ = PlanopticonPipeline._detect_input(f)
58
assert itype == "interchange"
59
60
def test_batch_file(self, tmp_path):
61
from navegador.planopticon_pipeline import PlanopticonPipeline
62
63
f = tmp_path / "batch_manifest.json"
64
f.write_text("{}")
65
itype, _ = PlanopticonPipeline._detect_input(f)
66
assert itype == "batch"
67
68
def test_kg_file_default(self, tmp_path):
69
from navegador.planopticon_pipeline import PlanopticonPipeline
70
71
f = tmp_path / "knowledge_graph.json"
72
f.write_text("{}")
73
itype, _ = PlanopticonPipeline._detect_input(f)
74
assert itype == "kg"
75
76
def test_directory_with_manifest(self, tmp_path):
77
from navegador.planopticon_pipeline import PlanopticonPipeline
78
79
(tmp_path / "manifest.json").write_text("{}")
80
itype, resolved = PlanopticonPipeline._detect_input(tmp_path)
81
assert itype == "manifest"
82
83
def test_directory_without_known_files_raises(self, tmp_path):
84
from navegador.planopticon_pipeline import PlanopticonPipeline
85
86
with pytest.raises(FileNotFoundError):
87
PlanopticonPipeline._detect_input(tmp_path)
88
89
90
class TestPlanopticonPipelineRun:
91
"""PlanopticonPipeline.run delegates to PlanopticonIngester and auto-links."""
92
93
def test_run_returns_stats_with_linked_key(self, tmp_path):
94
from navegador.planopticon_pipeline import PlanopticonPipeline
95
96
kg_data = {"nodes": [], "relationships": [], "sources": []}
97
kg_file = tmp_path / "knowledge_graph.json"
98
kg_file.write_text(json.dumps(kg_data))
99
100
store = _make_store()
101
pipeline = PlanopticonPipeline(store, source_tag="test")
102
stats = pipeline.run(str(kg_file))
103
104
assert "nodes" in stats
105
assert "linked" in stats
106
107
def test_run_calls_ingester(self, tmp_path):
108
from navegador.planopticon_pipeline import PlanopticonPipeline
109
110
kg_data = {
111
"nodes": [{"id": "n1", "type": "concept", "name": "Auth"}],
112
"relationships": [],
113
"sources": [],
114
}
115
kg_file = tmp_path / "knowledge_graph.json"
116
kg_file.write_text(json.dumps(kg_data))
117
118
store = _make_store()
119
pipeline = PlanopticonPipeline(store)
120
stats = pipeline.run(str(kg_file), source_tag="Meeting")
121
122
assert isinstance(stats, dict)
123
# create_node should have been called at least once for the concept node
124
store.create_node.assert_called()
125
126
127
class TestExtractActionItems:
128
"""extract_action_items pulls action items from various KG data formats."""
129
130
def test_action_items_list(self):
131
from navegador.planopticon_pipeline import ActionItem, PlanopticonPipeline
132
133
kg_data = {
134
"action_items": [
135
{"action": "Write tests", "assignee": "Alice", "priority": "high"},
136
{"action": "Deploy service", "assignee": "", "priority": "info"},
137
]
138
}
139
items = PlanopticonPipeline.extract_action_items(kg_data)
140
assert len(items) == 2
141
assert all(isinstance(i, ActionItem) for i in items)
142
assert items[0].action == "Write tests"
143
assert items[0].assignee == "Alice"
144
assert items[1].action == "Deploy service"
145
146
def test_blank_actions_skipped(self):
147
from navegador.planopticon_pipeline import PlanopticonPipeline
148
149
kg_data = {"action_items": [{"action": " ", "assignee": "Bob"}]}
150
items = PlanopticonPipeline.extract_action_items(kg_data)
151
assert items == []
152
153
def test_entities_with_task_type(self):
154
from navegador.planopticon_pipeline import PlanopticonPipeline
155
156
kg_data = {
157
"entities": [
158
{"planning_type": "task", "name": "Refactor auth module"},
159
{"planning_type": "decision", "name": "Use PostgreSQL"},
160
]
161
}
162
items = PlanopticonPipeline.extract_action_items(kg_data)
163
assert len(items) == 1
164
assert items[0].action == "Refactor auth module"
165
166
def test_nodes_with_action_item_type(self):
167
from navegador.planopticon_pipeline import PlanopticonPipeline
168
169
kg_data = {
170
"nodes": [
171
{"type": "action_item", "name": "Update documentation"},
172
]
173
}
174
items = PlanopticonPipeline.extract_action_items(kg_data)
175
assert len(items) == 1
176
assert items[0].action == "Update documentation"
177
178
def test_empty_data_returns_empty_list(self):
179
from navegador.planopticon_pipeline import PlanopticonPipeline
180
181
assert PlanopticonPipeline.extract_action_items({}) == []
182
183
def test_action_item_to_dict(self):
184
from navegador.planopticon_pipeline import ActionItem
185
186
item = ActionItem(action="Do thing", assignee="Carol", priority="critical")
187
d = item.to_dict()
188
assert d["action"] == "Do thing"
189
assert d["assignee"] == "Carol"
190
assert d["priority"] == "critical"
191
192
193
class TestBuildDecisionTimeline:
194
"""build_decision_timeline queries the store and returns chronological list."""
195
196
def test_returns_list_from_store(self):
197
from navegador.planopticon_pipeline import PlanopticonPipeline
198
199
store = _make_store()
200
store.query.return_value = MagicMock(
201
result_set=[
202
["Use microservices", "Split monolith", "arch", "accepted", "Scalability", "2024-01-10"],
203
["Use PostgreSQL", "Relational DB", "data", "accepted", "ACID", "2024-02-01"],
204
]
205
)
206
timeline = PlanopticonPipeline.build_decision_timeline(store)
207
assert len(timeline) == 2
208
assert timeline[0]["name"] == "Use microservices"
209
assert timeline[0]["date"] == "2024-01-10"
210
211
def test_returns_empty_on_query_failure(self):
212
from navegador.planopticon_pipeline import PlanopticonPipeline
213
214
store = _make_store()
215
store.query.side_effect = Exception("DB error")
216
timeline = PlanopticonPipeline.build_decision_timeline(store)
217
assert timeline == []
218
219
def test_entry_has_required_keys(self):
220
from navegador.planopticon_pipeline import PlanopticonPipeline
221
222
store = _make_store()
223
store.query.return_value = MagicMock(
224
result_set=[["D1", "Desc", "domain", "accepted", "rationale", "2024-01-01"]]
225
)
226
timeline = PlanopticonPipeline.build_decision_timeline(store)
227
required_keys = {"name", "description", "domain", "status", "rationale", "date"}
228
assert required_keys.issubset(timeline[0].keys())
229
230
231
class TestAutoLinkToCode:
232
"""auto_link_to_code matches knowledge nodes to code by name similarity."""
233
234
def test_returns_zero_when_no_nodes(self):
235
from navegador.planopticon_pipeline import PlanopticonPipeline
236
237
store = _make_store()
238
store.query.return_value = MagicMock(result_set=[])
239
assert PlanopticonPipeline.auto_link_to_code(store) == 0
240
241
def test_links_matching_nodes(self):
242
from navegador.planopticon_pipeline import PlanopticonPipeline
243
244
store = _make_store()
245
246
# First call: knowledge nodes; second: code nodes; subsequent: merge queries
247
call_count = 0
248
def _query(cypher, params=None):
249
nonlocal call_count
250
call_count += 1
251
if call_count == 1:
252
# knowledge nodes — use "authenticate" (12 chars) which IS in "authenticate_user"
253
return MagicMock(result_set=[["Concept", "authenticate handler"]])
254
elif call_count == 2:
255
# code nodes
256
return MagicMock(result_set=[["Function", "authenticate_user"]])
257
else:
258
# MERGE query — no result needed
259
return MagicMock(result_set=[])
260
261
store.query.side_effect = _query
262
linked = PlanopticonPipeline.auto_link_to_code(store)
263
# "authenticate" (12 chars, ≥4) is contained in "authenticate_user"
264
assert linked >= 1
265
266
def test_short_tokens_skipped(self):
267
from navegador.planopticon_pipeline import PlanopticonPipeline
268
269
store = _make_store()
270
271
call_count = 0
272
def _query(cypher, params=None):
273
nonlocal call_count
274
call_count += 1
275
if call_count == 1:
276
return MagicMock(result_set=[["Concept", "API"]]) # all tokens < 4 chars
277
elif call_count == 2:
278
return MagicMock(result_set=[["Function", "api_handler"]])
279
return MagicMock(result_set=[])
280
281
store.query.side_effect = _query
282
linked = PlanopticonPipeline.auto_link_to_code(store)
283
# "api" is only 3 chars — should not match
284
assert linked == 0
285
286
def test_returns_zero_on_query_failure(self):
287
from navegador.planopticon_pipeline import PlanopticonPipeline
288
289
store = _make_store()
290
store.query.side_effect = Exception("boom")
291
result = PlanopticonPipeline.auto_link_to_code(store)
292
assert result == 0
293
294
295
# =============================================================================
296
# #53 — TicketIngester
297
# =============================================================================
298
299
300
class TestTicketIngesterGitHub:
301
"""TicketIngester.ingest_github_issues fetches and ingests GitHub issues."""
302
303
def _make_issue(self, number=1, title="Fix bug", body="Details", labels=None, assignees=None):
304
return {
305
"number": number,
306
"title": title,
307
"body": body,
308
"html_url": f"https://github.com/owner/repo/issues/{number}",
309
"labels": [{"name": l} for l in (labels or [])],
310
"assignees": [{"login": a} for a in (assignees or [])],
311
}
312
313
def test_ingest_creates_ticket_nodes(self):
314
from navegador.pm import TicketIngester
315
316
store = _make_store()
317
store.query.return_value = MagicMock(result_set=[])
318
ing = TicketIngester(store)
319
320
issues = [self._make_issue(1, "Bug report"), self._make_issue(2, "Feature request")]
321
with patch("urllib.request.urlopen") as mock_open:
322
cm = MagicMock()
323
cm.__enter__ = MagicMock(return_value=cm)
324
cm.__exit__ = MagicMock(return_value=False)
325
cm.read.return_value = json.dumps(issues).encode()
326
mock_open.return_value = cm
327
328
stats = ing.ingest_github_issues("owner/repo", token="test_token")
329
330
assert stats["tickets"] == 2
331
assert "linked" in stats
332
333
def test_pull_requests_filtered_out(self):
334
from navegador.pm import TicketIngester
335
336
store = _make_store()
337
store.query.return_value = MagicMock(result_set=[])
338
ing = TicketIngester(store)
339
340
# Mix of issue and PR
341
issue = self._make_issue(1, "Real issue")
342
pr = {**self._make_issue(2, "A PR"), "pull_request": {"url": "..."}}
343
344
with patch("urllib.request.urlopen") as mock_open:
345
cm = MagicMock()
346
cm.__enter__ = MagicMock(return_value=cm)
347
cm.__exit__ = MagicMock(return_value=False)
348
cm.read.return_value = json.dumps([issue, pr]).encode()
349
mock_open.return_value = cm
350
351
stats = ing.ingest_github_issues("owner/repo")
352
353
assert stats["tickets"] == 1 # PR filtered out
354
355
def test_assignees_become_person_nodes(self):
356
from navegador.pm import TicketIngester
357
358
store = _make_store()
359
store.query.return_value = MagicMock(result_set=[])
360
ing = TicketIngester(store)
361
362
issue = self._make_issue(1, "Assign me", assignees=["alice"])
363
364
with patch("urllib.request.urlopen") as mock_open:
365
cm = MagicMock()
366
cm.__enter__ = MagicMock(return_value=cm)
367
cm.__exit__ = MagicMock(return_value=False)
368
cm.read.return_value = json.dumps([issue]).encode()
369
mock_open.return_value = cm
370
371
ing.ingest_github_issues("owner/repo")
372
373
# Person node created for alice
374
person_calls = [
375
c for c in store.create_node.call_args_list
376
if c.args and hasattr(c.args[0], "value") and c.args[0].value == "Person"
377
]
378
assert len(person_calls) >= 1
379
380
def test_network_error_raises_runtime_error(self):
381
from navegador.pm import TicketIngester
382
383
store = _make_store()
384
ing = TicketIngester(store)
385
386
with patch("urllib.request.urlopen", side_effect=Exception("network error")):
387
with pytest.raises(RuntimeError, match="Failed to fetch GitHub issues"):
388
ing.ingest_github_issues("owner/repo")
389
390
391
class TestTicketIngesterSeverity:
392
"""_github_severity maps label names to severity levels."""
393
394
def test_critical_label(self):
395
from navegador.pm import TicketIngester
396
397
assert TicketIngester._github_severity(["critical"]) == "critical"
398
assert TicketIngester._github_severity(["blocker"]) == "critical"
399
400
def test_warning_label(self):
401
from navegador.pm import TicketIngester
402
403
assert TicketIngester._github_severity(["bug"]) == "warning"
404
assert TicketIngester._github_severity(["high"]) == "warning"
405
406
def test_default_info(self):
407
from navegador.pm import TicketIngester
408
409
assert TicketIngester._github_severity([]) == "info"
410
assert TicketIngester._github_severity(["enhancement"]) == "info"
411
412
413
class TestTicketIngesterStubs:
414
"""Linear and Jira raise NotImplementedError with helpful messages."""
415
416
def test_linear_raises_not_implemented(self):
417
from navegador.pm import TicketIngester
418
419
ing = TicketIngester(_make_store())
420
with pytest.raises(NotImplementedError, match="Linear"):
421
ing.ingest_linear("lin_apikey")
422
423
def test_jira_raises_not_implemented(self):
424
from navegador.pm import TicketIngester
425
426
ing = TicketIngester(_make_store())
427
with pytest.raises(NotImplementedError, match="Jira"):
428
ing.ingest_jira("https://company.atlassian.net", token="tok")
429
430
def test_linear_message_contains_guidance(self):
431
from navegador.pm import TicketIngester
432
433
ing = TicketIngester(_make_store())
434
with pytest.raises(NotImplementedError) as exc_info:
435
ing.ingest_linear("lin_key", project="MyProject")
436
assert "53" in str(exc_info.value) or "Linear" in str(exc_info.value)
437
438
def test_jira_message_contains_guidance(self):
439
from navegador.pm import TicketIngester
440
441
ing = TicketIngester(_make_store())
442
with pytest.raises(NotImplementedError) as exc_info:
443
ing.ingest_jira("https://x.atlassian.net")
444
assert "Jira" in str(exc_info.value) or "jira" in str(exc_info.value).lower()
445
446
447
# =============================================================================
448
# #55 — FossilAdapter
449
# =============================================================================
450
451
452
@pytest.fixture()
453
def fossil_dir(tmp_path):
454
d = tmp_path / "fossil_repo"
455
d.mkdir()
456
(d / ".fslckout").touch()
457
return d
458
459
460
class TestFossilAdapterCurrentBranch:
461
"""current_branch calls 'fossil branch current' and returns stripped output."""
462
463
def test_returns_branch_name(self, fossil_dir):
464
from navegador.vcs import FossilAdapter
465
466
adapter = FossilAdapter(fossil_dir)
467
mock_result = MagicMock()
468
mock_result.stdout = "trunk\n"
469
470
with patch("subprocess.run", return_value=mock_result):
471
branch = adapter.current_branch()
472
473
assert branch == "trunk"
474
475
def test_strips_whitespace(self, fossil_dir):
476
from navegador.vcs import FossilAdapter
477
478
adapter = FossilAdapter(fossil_dir)
479
mock_result = MagicMock()
480
mock_result.stdout = " feature-branch \n"
481
482
with patch("subprocess.run", return_value=mock_result):
483
branch = adapter.current_branch()
484
485
assert branch == "feature-branch"
486
487
def test_calls_fossil_branch_current(self, fossil_dir):
488
from navegador.vcs import FossilAdapter
489
490
adapter = FossilAdapter(fossil_dir)
491
mock_result = MagicMock()
492
mock_result.stdout = "main\n"
493
494
with patch("subprocess.run", return_value=mock_result) as mock_run:
495
adapter.current_branch()
496
497
call_args = mock_run.call_args
498
assert call_args[0][0] == ["fossil", "branch", "current"]
499
500
501
class TestFossilAdapterChangedFiles:
502
"""changed_files calls 'fossil changes --differ' and parses output."""
503
504
def test_returns_changed_file_list(self, fossil_dir):
505
from navegador.vcs import FossilAdapter
506
507
adapter = FossilAdapter(fossil_dir)
508
mock_result = MagicMock()
509
mock_result.stdout = "EDITED src/main.py\nADDED tests/test_new.py\n"
510
511
with patch("subprocess.run", return_value=mock_result):
512
files = adapter.changed_files()
513
514
assert "src/main.py" in files
515
assert "tests/test_new.py" in files
516
517
def test_empty_output_returns_empty_list(self, fossil_dir):
518
from navegador.vcs import FossilAdapter
519
520
adapter = FossilAdapter(fossil_dir)
521
mock_result = MagicMock()
522
mock_result.stdout = ""
523
524
with patch("subprocess.run", return_value=mock_result):
525
files = adapter.changed_files()
526
527
assert files == []
528
529
def test_calls_fossil_changes_differ(self, fossil_dir):
530
from navegador.vcs import FossilAdapter
531
532
adapter = FossilAdapter(fossil_dir)
533
mock_result = MagicMock()
534
mock_result.stdout = ""
535
536
with patch("subprocess.run", return_value=mock_result) as mock_run:
537
adapter.changed_files()
538
539
call_args = mock_run.call_args
540
assert call_args[0][0] == ["fossil", "changes", "--differ"]
541
542
def test_returns_list(self, fossil_dir):
543
from navegador.vcs import FossilAdapter
544
545
adapter = FossilAdapter(fossil_dir)
546
mock_result = MagicMock()
547
mock_result.stdout = "EDITED foo.py\n"
548
549
with patch("subprocess.run", return_value=mock_result):
550
result = adapter.changed_files()
551
552
assert isinstance(result, list)
553
554
555
class TestFossilAdapterFileHistory:
556
"""file_history calls 'fossil timeline' and parses output into entry dicts."""
557
558
SAMPLE_TIMELINE = """\
559
=== 2024-01-15 ===
560
14:23:07 [abc123def456] Add feature. (user: alice, tags: trunk)
561
09:00:00 [deadbeef1234] Fix typo. (user: bob, tags: trunk)
562
=== 2024-01-14 ===
563
22:10:00 [cafe0000abcd] Initial commit. (user: alice, tags: initial)
564
"""
565
566
def test_returns_list_of_dicts(self, fossil_dir):
567
from navegador.vcs import FossilAdapter
568
569
adapter = FossilAdapter(fossil_dir)
570
mock_result = MagicMock()
571
mock_result.stdout = self.SAMPLE_TIMELINE
572
573
with patch("subprocess.run", return_value=mock_result):
574
history = adapter.file_history("src/main.py")
575
576
assert isinstance(history, list)
577
assert len(history) >= 1
578
579
def test_entry_has_required_keys(self, fossil_dir):
580
from navegador.vcs import FossilAdapter
581
582
adapter = FossilAdapter(fossil_dir)
583
mock_result = MagicMock()
584
mock_result.stdout = self.SAMPLE_TIMELINE
585
586
with patch("subprocess.run", return_value=mock_result):
587
history = adapter.file_history("src/main.py")
588
589
for entry in history:
590
assert "hash" in entry
591
assert "author" in entry
592
assert "date" in entry
593
assert "message" in entry
594
595
def test_limit_passed_to_fossil(self, fossil_dir):
596
from navegador.vcs import FossilAdapter
597
598
adapter = FossilAdapter(fossil_dir)
599
mock_result = MagicMock()
600
mock_result.stdout = ""
601
602
with patch("subprocess.run", return_value=mock_result) as mock_run:
603
adapter.file_history("src/main.py", limit=5)
604
605
args = mock_run.call_args[0][0]
606
assert "5" in args
607
608
def test_empty_output_returns_empty_list(self, fossil_dir):
609
from navegador.vcs import FossilAdapter
610
611
adapter = FossilAdapter(fossil_dir)
612
mock_result = MagicMock()
613
mock_result.stdout = ""
614
615
with patch("subprocess.run", return_value=mock_result):
616
history = adapter.file_history("nonexistent.py")
617
618
assert history == []
619
620
621
class TestFossilAdapterBlame:
622
"""blame calls 'fossil annotate --log' and parses per-line output."""
623
624
SAMPLE_ANNOTATE = """\
625
1.1 alice 2024-01-15: def main():
626
1.1 alice 2024-01-15: pass
627
1.2 bob 2024-01-20: # added comment
628
"""
629
630
def test_returns_list(self, fossil_dir):
631
from navegador.vcs import FossilAdapter
632
633
adapter = FossilAdapter(fossil_dir)
634
mock_result = MagicMock()
635
mock_result.stdout = self.SAMPLE_ANNOTATE
636
637
with patch("subprocess.run", return_value=mock_result):
638
result = adapter.blame("src/main.py")
639
640
assert isinstance(result, list)
641
assert len(result) >= 1
642
643
def test_entry_has_required_keys(self, fossil_dir):
644
from navegador.vcs import FossilAdapter
645
646
adapter = FossilAdapter(fossil_dir)
647
mock_result = MagicMock()
648
mock_result.stdout = self.SAMPLE_ANNOTATE
649
650
with patch("subprocess.run", return_value=mock_result):
651
result = adapter.blame("src/main.py")
652
653
for entry in result:
654
assert "line" in entry
655
assert "hash" in entry
656
assert "author" in entry
657
assert "content" in entry
658
659
def test_line_numbers_sequential(self, fossil_dir):
660
from navegador.vcs import FossilAdapter
661
662
adapter = FossilAdapter(fossil_dir)
663
mock_result = MagicMock()
664
mock_result.stdout = self.SAMPLE_ANNOTATE
665
666
with patch("subprocess.run", return_value=mock_result):
667
result = adapter.blame("src/main.py")
668
669
if len(result) >= 2:
670
assert result[1]["line"] > result[0]["line"]
671
672
def test_calls_fossil_annotate(self, fossil_dir):
673
from navegador.vcs import FossilAdapter
674
675
adapter = FossilAdapter(fossil_dir)
676
mock_result = MagicMock()
677
mock_result.stdout = ""
678
679
with patch("subprocess.run", return_value=mock_result) as mock_run:
680
adapter.blame("src/main.py")
681
682
args = mock_run.call_args[0][0]
683
assert "fossil" in args
684
assert "annotate" in args
685
686
687
# =============================================================================
688
# #58 — DependencyIngester
689
# =============================================================================
690
691
692
class TestDependencyIngesterNPM:
693
"""ingest_npm parses package.json and creates dependency nodes."""
694
695
def test_ingests_dependencies(self, tmp_path):
696
from navegador.dependencies import DependencyIngester
697
698
pkg = {
699
"name": "myapp",
700
"dependencies": {"react": "^18.0.0", "lodash": "4.17.21"},
701
"devDependencies": {"jest": "^29.0.0"},
702
}
703
pkg_file = tmp_path / "package.json"
704
pkg_file.write_text(json.dumps(pkg))
705
706
store = _make_store()
707
ing = DependencyIngester(store)
708
stats = ing.ingest_npm(str(pkg_file))
709
710
assert stats["packages"] == 3 # 2 deps + 1 devDep
711
assert store.create_node.call_count >= 3
712
713
def test_empty_dependencies(self, tmp_path):
714
from navegador.dependencies import DependencyIngester
715
716
pkg = {"name": "empty", "dependencies": {}}
717
pkg_file = tmp_path / "package.json"
718
pkg_file.write_text(json.dumps(pkg))
719
720
store = _make_store()
721
stats = DependencyIngester(store).ingest_npm(str(pkg_file))
722
assert stats["packages"] == 0
723
724
def test_peer_dependencies_included(self, tmp_path):
725
from navegador.dependencies import DependencyIngester
726
727
pkg = {
728
"peerDependencies": {"react": ">=17"},
729
}
730
pkg_file = tmp_path / "package.json"
731
pkg_file.write_text(json.dumps(pkg))
732
733
store = _make_store()
734
stats = DependencyIngester(store).ingest_npm(str(pkg_file))
735
assert stats["packages"] == 1
736
737
def test_creates_depends_on_edge(self, tmp_path):
738
from navegador.dependencies import DependencyIngester
739
740
pkg = {"dependencies": {"axios": "^1.0.0"}}
741
pkg_file = tmp_path / "package.json"
742
pkg_file.write_text(json.dumps(pkg))
743
744
store = _make_store()
745
DependencyIngester(store).ingest_npm(str(pkg_file))
746
store.create_edge.assert_called()
747
748
749
class TestDependencyIngesterPip:
750
"""ingest_pip parses requirements.txt and creates dependency nodes."""
751
752
def test_requirements_txt(self, tmp_path):
753
from navegador.dependencies import DependencyIngester
754
755
req_file = tmp_path / "requirements.txt"
756
req_file.write_text(
757
"requests>=2.28.0\n"
758
"flask[async]==2.3.0\n"
759
"# a comment\n"
760
"\n"
761
"pytest>=7.0 # dev\n"
762
)
763
764
store = _make_store()
765
stats = DependencyIngester(store).ingest_pip(str(req_file))
766
assert stats["packages"] == 3
767
768
def test_skips_comments_and_blanks(self, tmp_path):
769
from navegador.dependencies import DependencyIngester
770
771
req_file = tmp_path / "requirements.txt"
772
req_file.write_text("# comment\n\n-r other.txt\n")
773
774
store = _make_store()
775
stats = DependencyIngester(store).ingest_pip(str(req_file))
776
assert stats["packages"] == 0
777
778
def test_pyproject_toml(self, tmp_path):
779
from navegador.dependencies import DependencyIngester
780
781
toml_content = """\
782
[project]
783
name = "myproject"
784
dependencies = [
785
"click>=8.0",
786
"rich>=12.0",
787
"pydantic>=2.0",
788
]
789
"""
790
pyproject = tmp_path / "pyproject.toml"
791
pyproject.write_text(toml_content)
792
793
store = _make_store()
794
stats = DependencyIngester(store).ingest_pip(str(pyproject))
795
assert stats["packages"] >= 3
796
797
798
class TestDependencyIngesterCargo:
799
"""ingest_cargo parses Cargo.toml and creates dependency nodes."""
800
801
def test_basic_cargo_toml(self, tmp_path):
802
from navegador.dependencies import DependencyIngester
803
804
cargo_content = """\
805
[package]
806
name = "myapp"
807
808
[dependencies]
809
serde = "1.0"
810
tokio = { version = "1.0", features = ["full"] }
811
812
[dev-dependencies]
813
criterion = "0.4"
814
"""
815
cargo_file = tmp_path / "Cargo.toml"
816
cargo_file.write_text(cargo_content)
817
818
store = _make_store()
819
stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
820
assert stats["packages"] == 3 # serde, tokio, criterion
821
822
def test_empty_cargo_toml(self, tmp_path):
823
from navegador.dependencies import DependencyIngester
824
825
cargo_file = tmp_path / "Cargo.toml"
826
cargo_file.write_text("[package]\nname = \"empty\"\n")
827
828
store = _make_store()
829
stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
830
assert stats["packages"] == 0
831
832
def test_build_dependencies_included(self, tmp_path):
833
from navegador.dependencies import DependencyIngester
834
835
cargo_content = "[build-dependencies]\nbuild-helper = \"0.3\"\n"
836
cargo_file = tmp_path / "Cargo.toml"
837
cargo_file.write_text(cargo_content)
838
839
store = _make_store()
840
stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
841
assert stats["packages"] == 1
842
843
844
# =============================================================================
845
# #61 — SubmoduleIngester
846
# =============================================================================
847
848
849
class TestDetectSubmodules:
850
"""detect_submodules parses .gitmodules into structured dicts."""
851
852
def test_no_gitmodules_returns_empty(self, tmp_path):
853
from navegador.submodules import SubmoduleIngester
854
855
result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
856
assert result == []
857
858
def test_single_submodule(self, tmp_path):
859
from navegador.submodules import SubmoduleIngester
860
861
gitmodules = tmp_path / ".gitmodules"
862
gitmodules.write_text(
863
'[submodule "vendor/lib"]\n'
864
" path = vendor/lib\n"
865
" url = https://github.com/org/lib.git\n"
866
)
867
868
result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
869
assert len(result) == 1
870
assert result[0]["name"] == "vendor/lib"
871
assert result[0]["path"] == "vendor/lib"
872
assert result[0]["url"] == "https://github.com/org/lib.git"
873
assert result[0]["abs_path"] == str(tmp_path / "vendor/lib")
874
875
def test_multiple_submodules(self, tmp_path):
876
from navegador.submodules import SubmoduleIngester
877
878
gitmodules = tmp_path / ".gitmodules"
879
gitmodules.write_text(
880
'[submodule "a"]\n path = sub/a\n url = https://example.com/a.git\n'
881
'[submodule "b"]\n path = sub/b\n url = https://example.com/b.git\n'
882
)
883
884
result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
885
assert len(result) == 2
886
names = {r["name"] for r in result}
887
assert names == {"a", "b"}
888
889
def test_missing_url_returns_empty_string(self, tmp_path):
890
from navegador.submodules import SubmoduleIngester
891
892
gitmodules = tmp_path / ".gitmodules"
893
gitmodules.write_text('[submodule "x"]\n path = sub/x\n')
894
895
result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
896
assert result[0]["url"] == ""
897
898
899
class TestIngestWithSubmodules:
900
"""ingest_with_submodules ingests parent + submodules, creates DEPENDS_ON edges."""
901
902
def test_no_gitmodules_ingests_parent_only(self, tmp_path):
903
from navegador.submodules import SubmoduleIngester
904
905
store = _make_store()
906
ing = SubmoduleIngester(store)
907
908
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
909
mock_inst = MagicMock()
910
mock_inst.ingest.return_value = {"files": 5, "nodes": 10}
911
MockIngester.return_value = mock_inst
912
913
stats = ing.ingest_with_submodules(str(tmp_path))
914
915
assert stats["parent"]["files"] == 5
916
assert stats["submodules"] == {}
917
assert stats["total_files"] == 5
918
919
def test_missing_submodule_path_recorded_as_error(self, tmp_path):
920
from navegador.submodules import SubmoduleIngester
921
922
gitmodules = tmp_path / ".gitmodules"
923
gitmodules.write_text(
924
'[submodule "missing"]\n path = does/not/exist\n url = https://x.com/r.git\n'
925
)
926
927
store = _make_store()
928
ing = SubmoduleIngester(store)
929
930
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
931
mock_inst = MagicMock()
932
mock_inst.ingest.return_value = {"files": 3, "nodes": 6}
933
MockIngester.return_value = mock_inst
934
935
stats = ing.ingest_with_submodules(str(tmp_path))
936
937
assert "missing" in stats["submodules"]
938
assert "error" in stats["submodules"]["missing"]
939
940
def test_existing_submodule_ingested(self, tmp_path):
941
from navegador.submodules import SubmoduleIngester
942
943
sub_dir = tmp_path / "libs" / "core"
944
sub_dir.mkdir(parents=True)
945
946
gitmodules = tmp_path / ".gitmodules"
947
gitmodules.write_text(
948
'[submodule "core"]\n path = libs/core\n url = https://x.com/core.git\n'
949
)
950
951
store = _make_store()
952
ing = SubmoduleIngester(store)
953
954
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
955
mock_inst = MagicMock()
956
mock_inst.ingest.return_value = {"files": 4, "nodes": 8}
957
MockIngester.return_value = mock_inst
958
959
stats = ing.ingest_with_submodules(str(tmp_path))
960
961
assert "core" in stats["submodules"]
962
assert stats["submodules"]["core"]["files"] == 4
963
assert stats["total_files"] == 8 # parent 4 + submodule 4
964
965
# DEPENDS_ON edge from parent → submodule
966
store.create_edge.assert_called()
967
968
969
# =============================================================================
970
# #62 — WorkspaceMode + WorkspaceManager
971
# =============================================================================
972
973
974
class TestWorkspaceMode:
975
"""WorkspaceMode enum has UNIFIED and FEDERATED values."""
976
977
def test_has_unified(self):
978
from navegador.multirepo import WorkspaceMode
979
980
assert WorkspaceMode.UNIFIED == "unified"
981
982
def test_has_federated(self):
983
from navegador.multirepo import WorkspaceMode
984
985
assert WorkspaceMode.FEDERATED == "federated"
986
987
def test_is_str_enum(self):
988
from navegador.multirepo import WorkspaceMode
989
990
assert isinstance(WorkspaceMode.UNIFIED, str)
991
assert isinstance(WorkspaceMode.FEDERATED, str)
992
993
def test_from_string(self):
994
from navegador.multirepo import WorkspaceMode
995
996
assert WorkspaceMode("unified") == WorkspaceMode.UNIFIED
997
assert WorkspaceMode("federated") == WorkspaceMode.FEDERATED
998
999
1000
class TestWorkspaceManagerUnified:
1001
"""WorkspaceManager in UNIFIED mode uses a single shared graph."""
1002
1003
def test_add_repo_creates_repository_node(self, tmp_path):
1004
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1005
1006
store = _make_store()
1007
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1008
wm.add_repo("backend", str(tmp_path))
1009
1010
store.create_node.assert_called()
1011
1012
def test_list_repos(self, tmp_path):
1013
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1014
1015
store = _make_store()
1016
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1017
wm.add_repo("backend", str(tmp_path))
1018
wm.add_repo("frontend", str(tmp_path))
1019
1020
repos = wm.list_repos()
1021
names = {r["name"] for r in repos}
1022
assert names == {"backend", "frontend"}
1023
1024
def test_ingest_all_calls_repo_ingester(self, tmp_path):
1025
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1026
1027
store = _make_store()
1028
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1029
wm.add_repo("repo1", str(tmp_path))
1030
1031
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1032
mock_inst = MagicMock()
1033
mock_inst.ingest.return_value = {"files": 2, "nodes": 5}
1034
MockIngester.return_value = mock_inst
1035
1036
summary = wm.ingest_all()
1037
1038
assert "repo1" in summary
1039
assert summary["repo1"]["files"] == 2
1040
1041
def test_ingest_all_no_repos_returns_empty(self):
1042
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1043
1044
wm = WorkspaceManager(_make_store(), mode=WorkspaceMode.UNIFIED)
1045
assert wm.ingest_all() == {}
1046
1047
def test_search_unified_queries_single_store(self):
1048
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1049
1050
store = _make_store()
1051
store.query.return_value = MagicMock(
1052
result_set=[["Function", "authenticate", "/src/auth.py"]]
1053
)
1054
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1055
wm.add_repo("repo", "/tmp/repo")
1056
1057
results = wm.search("authenticate")
1058
assert len(results) >= 1
1059
assert results[0]["name"] == "authenticate"
1060
1061
def test_ingest_error_recorded_in_summary(self, tmp_path):
1062
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1063
1064
store = _make_store()
1065
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1066
wm.add_repo("broken", str(tmp_path))
1067
1068
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1069
MockIngester.return_value.ingest.side_effect = RuntimeError("parse error")
1070
summary = wm.ingest_all()
1071
1072
assert "broken" in summary
1073
assert "error" in summary["broken"]
1074
1075
1076
class TestWorkspaceManagerFederated:
1077
"""WorkspaceManager in FEDERATED mode creates per-repo graphs."""
1078
1079
def test_add_repo_sets_federated_graph_name(self, tmp_path):
1080
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1081
1082
store = _make_store()
1083
wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1084
wm.add_repo("api", str(tmp_path))
1085
1086
repos = wm.list_repos()
1087
assert repos[0]["graph_name"] == "navegador_api"
1088
1089
def test_unified_graph_name_is_navegador(self, tmp_path):
1090
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1091
1092
store = _make_store()
1093
wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1094
wm.add_repo("api", str(tmp_path))
1095
1096
repos = wm.list_repos()
1097
assert repos[0]["graph_name"] == "navegador"
1098
1099
def test_federated_ingest_uses_per_repo_store(self, tmp_path):
1100
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1101
1102
store = _make_store()
1103
# select_graph returns a different mock each time
1104
store._client.select_graph.return_value = MagicMock()
1105
1106
wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1107
wm.add_repo("svc", str(tmp_path))
1108
1109
with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1110
mock_inst = MagicMock()
1111
mock_inst.ingest.return_value = {"files": 1, "nodes": 3}
1112
MockIngester.return_value = mock_inst
1113
1114
summary = wm.ingest_all()
1115
1116
assert "svc" in summary
1117
# select_graph should have been called with "navegador_svc"
1118
called_graphs = [
1119
c.args[0] for c in store._client.select_graph.call_args_list
1120
]
1121
assert any("navegador_svc" in g for g in called_graphs)
1122
1123
def test_federated_search_merges_results(self):
1124
from navegador.multirepo import WorkspaceManager, WorkspaceMode
1125
1126
store = _make_store()
1127
1128
# Each per-repo graph returns a result
1129
per_repo_store_mock = MagicMock()
1130
per_repo_store_mock.query.return_value = MagicMock(
1131
result_set=[["Function", "auth_check", "/src/auth.py"]]
1132
)
1133
store._client.select_graph.return_value = per_repo_store_mock
1134
1135
wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1136
wm._repos = {
1137
"backend": {"path": "/tmp/backend", "graph_name": "navegador_backend"},
1138
"frontend": {"path": "/tmp/frontend", "graph_name": "navegador_frontend"},
1139
}
1140
1141
results = wm.search("auth")
1142
# Two repos each return one result → 2 total (deduplicated to 1 because same name)
1143
assert len(results) >= 1
1144
1145
1146
# =============================================================================
1147
# CLI smoke tests
1148
# =============================================================================
1149
1150
1151
class TestCLIPMGroup:
1152
"""pm group is registered on the main CLI."""
1153
1154
def test_pm_group_exists(self):
1155
from click.testing import CliRunner
1156
1157
from navegador.cli.commands import main
1158
1159
runner = CliRunner()
1160
result = runner.invoke(main, ["pm", "--help"])
1161
assert result.exit_code == 0
1162
assert "ingest" in result.output
1163
1164
def test_pm_ingest_requires_github(self):
1165
from click.testing import CliRunner
1166
1167
from navegador.cli.commands import main
1168
1169
runner = CliRunner()
1170
result = runner.invoke(main, ["pm", "ingest"])
1171
assert result.exit_code != 0
1172
1173
1174
class TestCLIDepsGroup:
1175
"""deps group is registered on the main CLI."""
1176
1177
def test_deps_group_exists(self):
1178
from click.testing import CliRunner
1179
1180
from navegador.cli.commands import main
1181
1182
runner = CliRunner()
1183
result = runner.invoke(main, ["deps", "--help"])
1184
assert result.exit_code == 0
1185
assert "ingest" in result.output
1186
1187
1188
class TestCLISubmodulesGroup:
1189
"""submodules group is registered on the main CLI."""
1190
1191
def test_submodules_group_exists(self):
1192
from click.testing import CliRunner
1193
1194
from navegador.cli.commands import main
1195
1196
runner = CliRunner()
1197
result = runner.invoke(main, ["submodules", "--help"])
1198
assert result.exit_code == 0
1199
1200
def test_submodules_list_empty(self, tmp_path):
1201
from click.testing import CliRunner
1202
1203
from navegador.cli.commands import main
1204
1205
runner = CliRunner()
1206
result = runner.invoke(main, ["submodules", "list", str(tmp_path)])
1207
assert result.exit_code == 0
1208
assert "No submodules" in result.output
1209
1210
1211
class TestCLIWorkspaceGroup:
1212
"""workspace group is registered on the main CLI."""
1213
1214
def test_workspace_group_exists(self):
1215
from click.testing import CliRunner
1216
1217
from navegador.cli.commands import main
1218
1219
runner = CliRunner()
1220
result = runner.invoke(main, ["workspace", "--help"])
1221
assert result.exit_code == 0
1222
assert "ingest" in result.output
1223
1224
def test_workspace_ingest_requires_repos(self, tmp_path):
1225
from click.testing import CliRunner
1226
1227
from navegador.cli.commands import main
1228
1229
runner = CliRunner()
1230
result = runner.invoke(
1231
main,
1232
["workspace", "ingest", "--db", str(tmp_path / "g.db")],
1233
)
1234
assert result.exit_code != 0
1235

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button