Navegador

feat: PlanOpticon pipeline, PM integration, dependencies, Fossil, submodules, multi-repo workspace PlanopticonPipeline: end-to-end meeting→knowledge with auto-linking. TicketIngester: GitHub issues (Linear/Jira stubs). DependencyIngester: npm/pip/cargo package tracking. FossilAdapter: full VCS implementation. SubmoduleIngester: .gitmodules parsing and linked ingestion. WorkspaceManager: unified/federated multi-repo KG modes. Closes #7, closes #18, closes #53, closes #55, closes #58, closes #61, closes #62

lmata 2026-03-23 05:32 trunk
Commit 1d4baaf7c830b3c2810ed60c71dec1ebbc27d98d4fc5a766975696204ad49750
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1260,5 +1260,742 @@
12601260
async def _run():
12611261
async with stdio_server() as (read_stream, write_stream):
12621262
await server.run(read_stream, write_stream, server.create_initialization_options())
12631263
12641264
asyncio.run(_run())
1265
+
1266
+
1267
+# ── ANALYSIS: impact ──────────────────────────────────────────────────────────
1268
+
1269
+
1270
+@main.command()
1271
+@click.argument("name")
1272
+@click.option("--file", "file_path", default="", help="Narrow to a specific file.")
1273
+@click.option("--depth", default=3, show_default=True, help="Traversal depth.")
1274
+@DB_OPTION
1275
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1276
+def impact(name: str, file_path: str, depth: int, db: str, as_json: bool):
1277
+ """Blast-radius analysis — what does changing NAME affect?
1278
+
1279
+ Traverses CALLS, REFERENCES, INHERITS, IMPLEMENTS, ANNOTATES edges
1280
+ outward to find all downstream symbols and files affected by a change.
1281
+ """
1282
+ from navegador.analysis.impact import ImpactAnalyzer
1283
+
1284
+ result = ImpactAnalyzer(_get_store(db)).blast_radius(name, file_path=file_path, depth=depth)
1285
+
1286
+ if as_json:
1287
+ click.echo(json.dumps(result.to_dict(), indent=2))
1288
+ return
1289
+
1290
+ console.print(
1291
+ f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})"
1292
+ )
1293
+ if not result.affected_nodes:
1294
+ console.print("[yellow]No affected nodes found.[/yellow]")
1295
+ return
1296
+
1297
+ table = Table(title=f"Affected nodes ({len(result.affected_nodes)})")
1298
+ table.add_column("Type", style="cyan")
1299
+ table.add_column("Name", style="bold")
1300
+ table.add_column("File")
1301
+ table.add_column("Line", justify="right")
1302
+ for node in result.affected_nodes:
1303
+ table.add_row(
1304
+ node["type"], node["name"], node["file_path"], str(node["line_start"] or "")
1305
+ )
1306
+ console.print(table)
1307
+
1308
+ if result.affected_files:
1309
+ console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]")
1310
+ for fp in result.affected_files:
1311
+ console.print(f" {fp}")
1312
+
1313
+ if result.affected_knowledge:
1314
+ console.print(f"\n[bold]Affected knowledge ({len(result.affected_knowledge)}):[/bold]")
1315
+ for kn in result.affected_knowledge:
1316
+ console.print(f" [{kn['type']}] {kn['name']}")
1317
+
1318
+
1319
+# ── ANALYSIS: flow trace ──────────────────────────────────────────────────────
1320
+
1321
+
1322
+@main.command()
1323
+@click.argument("name")
1324
+@click.option("--file", "file_path", default="", help="Narrow to a specific file.")
1325
+@click.option("--depth", default=10, show_default=True, help="Maximum call depth.")
1326
+@DB_OPTION
1327
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1328
+def trace(name: str, file_path: str, depth: int, db: str, as_json: bool):
1329
+ """Execution flow trace — follow call chains from an entry point.
1330
+
1331
+ Traverses CALLS edges forward from NAME, returning all execution paths
1332
+ up to the given depth.
1333
+ """
1334
+ from navegador.analysis.flow import FlowTracer
1335
+
1336
+ chains = FlowTracer(_get_store(db)).trace(name, file_path=file_path, max_depth=depth)
1337
+
1338
+ if as_json:
1339
+ click.echo(json.dumps([c.to_list() for c in chains], indent=2))
1340
+ return
1341
+
1342
+ if not chains:
1343
+ console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].")
1344
+ return
1345
+
1346
+ console.print(
1347
+ f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)"
1348
+ )
1349
+ for i, chain in enumerate(chains, 1):
1350
+ steps = chain.to_list()
1351
+ path_str = " → ".join(
1352
+ [steps[0]["caller"]] + [s["callee"] for s in steps]
1353
+ ) if steps else name
1354
+ console.print(f" {i}. {path_str}")
1355
+
1356
+
1357
+# ── ANALYSIS: dead code ───────────────────────────────────────────────────────
1358
+
1359
+
1360
+@main.command()
1361
+@DB_OPTION
1362
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1363
+def deadcode(db: str, as_json: bool):
1364
+ """Detect dead code — unreachable functions, classes, and orphan files.
1365
+
1366
+ A function/class is dead if nothing calls, references, or imports it.
1367
+ An orphan file is one that no other file imports.
1368
+ """
1369
+ from navegador.analysis.deadcode import DeadCodeDetector
1370
+
1371
+ report = DeadCodeDetector(_get_store(db)).detect()
1372
+
1373
+ if as_json:
1374
+ click.echo(json.dumps(report.to_dict(), indent=2))
1375
+ return
1376
+
1377
+ summary = report.to_dict()["summary"]
1378
+ console.print(
1379
+ f"[bold]Dead code report:[/bold] "
1380
+ f"{summary['unreachable_functions']} dead functions, "
1381
+ f"{summary['unreachable_classes']} dead classes, "
1382
+ f"{summary['orphan_files']} orphan files"
1383
+ )
1384
+
1385
+ if report.unreachable_functions:
1386
+ fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})")
1387
+ fn_table.add_column("Type", style="cyan")
1388
+ fn_table.add_column("Name", style="bold")
1389
+ fn_table.add_column("File")
1390
+ fn_table.add_column("Line", justify="right")
1391
+ for fn in report.unreachable_functions:
1392
+ fn_table.add_row(fn["type"], fn["name"], fn["file_path"], str(fn["line_start"] or ""))
1393
+ console.print(fn_table)
1394
+
1395
+ if report.unreachable_classes:
1396
+ cls_table = Table(title=f"Unreachable classes ({len(report.unreachable_classes)})")
1397
+ cls_table.add_column("Name", style="bold")
1398
+ cls_table.add_column("File")
1399
+ cls_table.add_column("Line", justify="right")
1400
+ for cls in report.unreachable_classes:
1401
+ cls_table.add_row(cls["name"], cls["file_path"], str(cls["line_start"] or ""))
1402
+ console.print(cls_table)
1403
+
1404
+ if report.orphan_files:
1405
+ console.print(f"\n[bold]Orphan files ({len(report.orphan_files)}):[/bold]")
1406
+ for fp in report.orphan_files:
1407
+ console.print(f" {fp}")
1408
+
1409
+ if not any([report.unreachable_functions, report.unreachable_classes, report.orphan_files]):
1410
+ console.print("[green]No dead code found.[/green]")
1411
+
1412
+
1413
+# ── ANALYSIS: test mapping ────────────────────────────────────────────────────
1414
+
1415
+
1416
+@main.command()
1417
+@DB_OPTION
1418
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1419
+def testmap(db: str, as_json: bool):
1420
+ """Map test functions to production code via TESTS edges.
1421
+
1422
+ Finds functions starting with test_, resolves the production symbol
1423
+ via CALLS edges and name heuristics, then writes TESTS edges to the graph.
1424
+ """
1425
+ from navegador.analysis.testmap import TestMapper
1426
+
1427
+ result = TestMapper(_get_store(db)).map_tests()
1428
+
1429
+ if as_json:
1430
+ click.echo(json.dumps(result.to_dict(), indent=2))
1431
+ return
1432
+
1433
+ console.print(
1434
+ f"[bold]Test map:[/bold] {len(result.links)} linked, "
1435
+ f"{len(result.unmatched_tests)} unmatched, "
1436
+ f"{result.edges_created} TESTS edges created"
1437
+ )
1438
+
1439
+ if result.links:
1440
+ table = Table(title=f"Test -> production links ({len(result.links)})")
1441
+ table.add_column("Test", style="cyan")
1442
+ table.add_column("Production symbol", style="bold")
1443
+ table.add_column("File")
1444
+ table.add_column("Source")
1445
+ for lnk in result.links:
1446
+ table.add_row(lnk.test_name, lnk.prod_name, lnk.prod_file, lnk.source)
1447
+ console.print(table)
1448
+
1449
+ if result.unmatched_tests:
1450
+ console.print(f"\n[yellow]Unmatched tests ({len(result.unmatched_tests)}):[/yellow]")
1451
+ for t in result.unmatched_tests:
1452
+ console.print(f" {t['name']} ({t['file_path']})")
1453
+
1454
+
1455
+# ── ANALYSIS: cycles ──────────────────────────────────────────────────────────
1456
+
1457
+
1458
+@main.command()
1459
+@DB_OPTION
1460
+@click.option("--imports", "check_imports", is_flag=True, default=False,
1461
+ help="Check import cycles only.")
1462
+@click.option("--calls", "check_calls", is_flag=True, default=False,
1463
+ help="Check call cycles only.")
1464
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1465
+def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool):
1466
+ """Detect circular dependencies in import and call graphs.
1467
+
1468
+ By default checks both import cycles and call cycles.
1469
+ Use --imports or --calls to restrict to one graph.
1470
+ """
1471
+ from navegador.analysis.cycles import CycleDetector
1472
+
1473
+ detector = CycleDetector(_get_store(db))
1474
+ run_imports = check_imports or (not check_imports and not check_calls)
1475
+ run_calls = check_calls or (not check_imports and not check_calls)
1476
+
1477
+ import_cycles = detector.detect_import_cycles() if run_imports else []
1478
+ call_cycles = detector.detect_call_cycles() if run_calls else []
1479
+
1480
+ if as_json:
1481
+ click.echo(
1482
+ json.dumps(
1483
+ {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2
1484
+ )
1485
+ )
1486
+ return
1487
+
1488
+ if not import_cycles and not call_cycles:
1489
+ console.print("[green]No circular dependencies found.[/green]")
1490
+ return
1491
+
1492
+ if import_cycles:
1493
+ table = Table(title=f"Import cycles ({len(import_cycles)})")
1494
+ table.add_column("#", justify="right")
1495
+ table.add_column("Cycle")
1496
+ for i, cycle in enumerate(import_cycles, 1):
1497
+ table.add_row(str(i), " -> ".join(cycle) + f" -> {cycle[0]}")
1498
+ console.print(table)
1499
+
1500
+ if call_cycles:
1501
+ table = Table(title=f"Call cycles ({len(call_cycles)})")
1502
+ table.add_column("#", justify="right")
1503
+ table.add_column("Cycle")
1504
+ for i, cycle in enumerate(call_cycles, 1):
1505
+ table.add_row(str(i), " -> ".join(cycle) + f" -> {cycle[0]}")
1506
+ console.print(table)
1507
+
1508
+
1509
+# ── Multi-repo (#16) ─────────────────────────────────────────────────────────
1510
+
1511
+
1512
+@main.group()
1513
+def repo():
1514
+ """Manage and query across multiple repositories."""
1515
+
1516
+
1517
+@repo.command("add")
1518
+@click.argument("name")
1519
+@click.argument("path", type=click.Path())
1520
+@DB_OPTION
1521
+def repo_add(name: str, path: str, db: str):
1522
+ """Register a repository by NAME and PATH."""
1523
+ from navegador.multirepo import MultiRepoManager
1524
+
1525
+ mgr = MultiRepoManager(_get_store(db))
1526
+ mgr.add_repo(name, path)
1527
+ console.print(f"[green]Repo registered:[/green] {name} → {path}")
1528
+
1529
+
1530
+@repo.command("list")
1531
+@DB_OPTION
1532
+@click.option("--json", "as_json", is_flag=True)
1533
+def repo_list(db: str, as_json: bool):
1534
+ """List all registered repositories."""
1535
+ from navegador.multirepo import MultiRepoManager
1536
+
1537
+ repos = MultiRepoManager(_get_store(db)).list_repos()
1538
+ if as_json:
1539
+ click.echo(json.dumps(repos, indent=2))
1540
+ return
1541
+ if not repos:
1542
+ console.print("[yellow]No repositories registered.[/yellow]")
1543
+ return
1544
+ table = Table(title="Registered repositories")
1545
+ table.add_column("Name", style="cyan")
1546
+ table.add_column("Path")
1547
+ for r in repos:
1548
+ table.add_row(r["name"], r["path"])
1549
+ console.print(table)
1550
+
1551
+
1552
+@repo.command("ingest-all")
1553
+@DB_OPTION
1554
+@click.option("--clear", is_flag=True, help="Clear graph before ingesting.")
1555
+@click.option("--json", "as_json", is_flag=True)
1556
+def repo_ingest_all(db: str, clear: bool, as_json: bool):
1557
+ """Ingest all registered repositories."""
1558
+ from navegador.multirepo import MultiRepoManager
1559
+
1560
+ mgr = MultiRepoManager(_get_store(db))
1561
+ with console.status("[bold]Ingesting all repos…[/bold]"):
1562
+ summary = mgr.ingest_all(clear=clear)
1563
+ if as_json:
1564
+ click.echo(json.dumps(summary, indent=2))
1565
+ return
1566
+ for name, stats in summary.items():
1567
+ table = Table(title=f"Repo: {name}")
1568
+ table.add_column("Metric", style="cyan")
1569
+ table.add_column("Count", justify="right", style="green")
1570
+ for k, v in stats.items():
1571
+ table.add_row(str(k).capitalize(), str(v))
1572
+ console.print(table)
1573
+
1574
+
1575
+@repo.command("search")
1576
+@click.argument("query")
1577
+@DB_OPTION
1578
+@click.option("--limit", default=20, show_default=True)
1579
+@click.option("--json", "as_json", is_flag=True)
1580
+def repo_search(query: str, db: str, limit: int, as_json: bool):
1581
+ """Search across all registered repositories."""
1582
+ from navegador.multirepo import MultiRepoManager
1583
+
1584
+ results = MultiRepoManager(_get_store(db)).cross_repo_search(query, limit=limit)
1585
+ if as_json:
1586
+ click.echo(json.dumps(results, indent=2))
1587
+ return
1588
+ if not results:
1589
+ console.print("[yellow]No results.[/yellow]")
1590
+ return
1591
+ table = Table(title=f"Cross-repo search: {query!r}")
1592
+ table.add_column("Label", style="cyan")
1593
+ table.add_column("Name", style="bold")
1594
+ table.add_column("File/Path")
1595
+ for r in results:
1596
+ table.add_row(r["label"], r["name"], r["file_path"])
1597
+ console.print(table)
1598
+
1599
+
1600
+# ── Rename (#26) ──────────────────────────────────────────────────────────────
1601
+
1602
+
1603
+@main.command()
1604
+@click.argument("old_name")
1605
+@click.argument("new_name")
1606
+@DB_OPTION
1607
+@click.option("--preview", is_flag=True, help="Show what would change without applying.")
1608
+@click.option("--json", "as_json", is_flag=True)
1609
+def rename(old_name: str, new_name: str, db: str, preview: bool, as_json: bool):
1610
+ """Rename a symbol across the graph (coordinated rename).
1611
+
1612
+ \b
1613
+ Examples:
1614
+ navegador rename old_func new_func --preview
1615
+ navegador rename MyClass RenamedClass
1616
+ """
1617
+ from navegador.refactor import SymbolRenamer
1618
+
1619
+ renamer = SymbolRenamer(_get_store(db))
1620
+ if preview:
1621
+ result = renamer.preview_rename(old_name, new_name)
1622
+ data = {
1623
+ "old_name": result.old_name,
1624
+ "new_name": result.new_name,
1625
+ "affected_files": result.affected_files,
1626
+ "affected_nodes": len(result.affected_nodes),
1627
+ "edges_updated": result.edges_updated,
1628
+ }
1629
+ else:
1630
+ result = renamer.apply_rename(old_name, new_name)
1631
+ data = {
1632
+ "old_name": result.old_name,
1633
+ "new_name": result.new_name,
1634
+ "affected_files": result.affected_files,
1635
+ "affected_nodes": len(result.affected_nodes),
1636
+ "edges_updated": result.edges_updated,
1637
+ }
1638
+
1639
+ if as_json:
1640
+ click.echo(json.dumps(data, indent=2))
1641
+ return
1642
+
1643
+ action = "Preview" if preview else "Renamed"
1644
+ console.print(f"[green]{action}:[/green] {old_name!r} → {new_name!r}")
1645
+ console.print(f" Nodes affected : {data['affected_nodes']}")
1646
+ console.print(f" Edges updated : {data['edges_updated']}")
1647
+ if data["affected_files"]:
1648
+ console.print(" Files:")
1649
+ for f in data["affected_files"]:
1650
+ console.print(f" {f}")
1651
+
1652
+
1653
+# ── CODEOWNERS (#39) ──────────────────────────────────────────────────────────
1654
+
1655
+
1656
+@main.command()
1657
+@click.argument("repo_path", type=click.Path(exists=True))
1658
+@DB_OPTION
1659
+@click.option("--json", "as_json", is_flag=True)
1660
+def codeowners(repo_path: str, db: str, as_json: bool):
1661
+ """Parse CODEOWNERS and map ownership to Person nodes."""
1662
+ from navegador.codeowners import CodeownersIngester
1663
+
1664
+ stats = CodeownersIngester(_get_store(db)).ingest(repo_path)
1665
+ if as_json:
1666
+ click.echo(json.dumps(stats, indent=2))
1667
+ return
1668
+ console.print(
1669
+ f"[green]CODEOWNERS ingested:[/green] "
1670
+ f"{stats['owners']} owners, {stats['patterns']} patterns, {stats['edges']} edges"
1671
+ )
1672
+
1673
+
1674
+# ── ADR (#40) ─────────────────────────────────────────────────────────────────
1675
+
1676
+
1677
+@main.group()
1678
+def adr():
1679
+ """Ingest Architecture Decision Records (ADRs) into the knowledge graph."""
1680
+
1681
+
1682
+@adr.command("ingest")
1683
+@click.argument("adr_dir", type=click.Path(exists=True))
1684
+@DB_OPTION
1685
+@click.option("--json", "as_json", is_flag=True)
1686
+def adr_ingest(adr_dir: str, db: str, as_json: bool):
1687
+ """Parse ADR markdown files and create Decision nodes."""
1688
+ from navegador.adr import ADRIngester
1689
+
1690
+ stats = ADRIngester(_get_store(db)).ingest(adr_dir)
1691
+ if as_json:
1692
+ click.echo(json.dumps(stats, indent=2))
1693
+ return
1694
+ console.print(
1695
+ f"[green]ADRs ingested:[/green] {stats['decisions']} decisions, {stats['skipped']} skipped"
1696
+ )
1697
+
1698
+
1699
+# ── API schema (#41) ─────────────────────────────────────────────────────────
1700
+
1701
+
1702
+@main.group()
1703
+def api():
1704
+ """Ingest API schema files (OpenAPI, GraphQL) into the graph."""
1705
+
1706
+
1707
+@api.command("ingest")
1708
+@click.argument("path", type=click.Path(exists=True))
1709
+@DB_OPTION
1710
+@click.option(
1711
+ "--type",
1712
+ "schema_type",
1713
+ type=click.Choice(["openapi", "graphql", "auto"]),
1714
+ default="auto",
1715
+ show_default=True,
1716
+ help="Schema type. auto detects from file extension.",
1717
+)
1718
+@click.option("--json", "as_json", is_flag=True)
1719
+def api_ingest(path: str, db: str, schema_type: str, as_json: bool):
1720
+ """Parse an OpenAPI or GraphQL schema and create API endpoint nodes.
1721
+
1722
+ \b
1723
+ Examples:
1724
+ navegador api ingest openapi.yaml
1725
+ navegador api ingest schema.graphql --type graphql
1726
+ navegador api ingest swagger.json --type openapi
1727
+ """
1728
+ from pathlib import Path as P
1729
+
1730
+ from navegador.api_schema import APISchemaIngester
1731
+
1732
+ ingester = APISchemaIngester(_get_store(db))
1733
+ p = P(path)
1734
+
1735
+ if schema_type == "auto":
1736
+ if p.suffix.lower() in (".graphql", ".gql"):
1737
+ schema_type = "graphql"
1738
+ else:
1739
+ schema_type = "openapi"
1740
+
1741
+ if schema_type == "graphql":
1742
+ stats = ingester.ingest_graphql(path)
1743
+ label = "GraphQL"
1744
+ else:
1745
+ stats = ingester.ingest_openapi(path)
1746
+ label = "OpenAPI"
1747
+
1748
+ if as_json:
1749
+ click.echo(json.dumps(stats, indent=2))
1750
+ return
1751
+
1752
+ table = Table(title=f"{label} schema ingested")
1753
+ table.add_column("Metric", style="cyan")
1754
+ table.add_column("Count", justify="right", style="green")
1755
+ for k, v in stats.items():
1756
+ table.add_row(k.replace("_", " ").capitalize(), str(v))
1757
+ console.print(table)
1758
+
1759
+
1760
+# ── PM: project management ticket ingestion (#53) ─────────────────────────────
1761
+
1762
+
1763
+@main.group()
1764
+def pm():
1765
+ """Ingest project management tickets (GitHub Issues, Linear, Jira)."""
1766
+
1767
+
1768
+@pm.command("ingest")
1769
+@click.option("--github", "github_repo", default="", metavar="OWNER/REPO",
1770
+ help="GitHub repository in owner/repo format.")
1771
+@click.option("--token", default="", envvar="GITHUB_TOKEN",
1772
+ help="GitHub personal access token.")
1773
+@click.option("--state", default="open",
1774
+ type=click.Choice(["open", "closed", "all"]),
1775
+ show_default=True,
1776
+ help="GitHub issue state filter.")
1777
+@click.option("--limit", default=100, show_default=True,
1778
+ help="Maximum number of issues to fetch.")
1779
+@DB_OPTION
1780
+@click.option("--json", "as_json", is_flag=True)
1781
+def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool):
1782
+ """Ingest tickets from a PM tool into the knowledge graph.
1783
+
1784
+ \b
1785
+ Examples:
1786
+ navegador pm ingest --github owner/repo
1787
+ navegador pm ingest --github owner/repo --token ghp_...
1788
+ navegador pm ingest --github owner/repo --state all --limit 200
1789
+ """
1790
+ if not github_repo:
1791
+ raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).")
1792
+
1793
+ from navegador.pm import TicketIngester
1794
+
1795
+ ing = TicketIngester(_get_store(db))
1796
+ stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit)
1797
+
1798
+ if as_json:
1799
+ click.echo(json.dumps(stats, indent=2))
1800
+ else:
1801
+ table = Table(title=f"PM import: {github_repo}")
1802
+ table.add_column("Metric", style="cyan")
1803
+ table.add_column("Count", justify="right", style="green")
1804
+ for k, v in stats.items():
1805
+ table.add_row(k.capitalize(), str(v))
1806
+ console.print(table)
1807
+
1808
+
1809
+# ── Dependencies: external package ingestion (#58) ────────────────────────────
1810
+
1811
+
1812
+@main.group()
1813
+def deps():
1814
+ """Ingest external package dependencies (npm, pip, cargo)."""
1815
+
1816
+
1817
+@deps.command("ingest")
1818
+@click.argument("path", type=click.Path(exists=True))
1819
+@click.option(
1820
+ "--type", "dep_type",
1821
+ type=click.Choice(["auto", "npm", "pip", "cargo"]),
1822
+ default="auto",
1823
+ show_default=True,
1824
+ help="Manifest type. auto detects from filename.",
1825
+)
1826
+@DB_OPTION
1827
+@click.option("--json", "as_json", is_flag=True)
1828
+def deps_ingest(path: str, dep_type: str, db: str, as_json: bool):
1829
+ """Ingest external dependencies from a package manifest.
1830
+
1831
+ \b
1832
+ PATH can be:
1833
+ package.json (npm)
1834
+ requirements.txt (pip)
1835
+ pyproject.toml (pip)
1836
+ Cargo.toml (cargo)
1837
+
1838
+ \b
1839
+ Examples:
1840
+ navegador deps ingest package.json
1841
+ navegador deps ingest requirements.txt
1842
+ navegador deps ingest Cargo.toml --type cargo
1843
+ """
1844
+ from pathlib import Path as P
1845
+
1846
+ from navegador.dependencies import DependencyIngester
1847
+
1848
+ ing = DependencyIngester(_get_store(db))
1849
+ p = P(path)
1850
+
1851
+ if dep_type == "auto":
1852
+ name = p.name.lower()
1853
+ if name == "package.json":
1854
+ dep_type = "npm"
1855
+ elif name in ("requirements.txt", "pyproject.toml"):
1856
+ dep_type = "pip"
1857
+ elif name == "cargo.toml":
1858
+ dep_type = "cargo"
1859
+ else:
1860
+ raise click.UsageError(
1861
+ f"Cannot auto-detect type for {p.name!r}. Use --type npm|pip|cargo."
1862
+ )
1863
+
1864
+ dispatch = {
1865
+ "npm": ing.ingest_npm,
1866
+ "pip": ing.ingest_pip,
1867
+ "cargo": ing.ingest_cargo,
1868
+ }
1869
+ stats = dispatch[dep_type](path)
1870
+
1871
+ if as_json:
1872
+ click.echo(json.dumps(stats, indent=2))
1873
+ else:
1874
+ console.print(
1875
+ f"[green]Dependencies ingested[/green] ({dep_type}): "
1876
+ f"{stats['packages']} packages"
1877
+ )
1878
+
1879
+
1880
+# ── Submodules: ingest parent + submodules (#61) ──────────────────────────────
1881
+
1882
+
1883
+@main.group()
1884
+def submodules():
1885
+ """Ingest a parent repository and all its git submodules."""
1886
+
1887
+
1888
+@submodules.command("ingest")
1889
+@click.argument("repo_path", type=click.Path(exists=True))
1890
+@DB_OPTION
1891
+@click.option("--clear", is_flag=True, help="Clear existing graph before ingesting.")
1892
+@click.option("--json", "as_json", is_flag=True)
1893
+def submodules_ingest(repo_path: str, db: str, clear: bool, as_json: bool):
1894
+ """Ingest a repository and all its git submodules as linked nodes.
1895
+
1896
+ \b
1897
+ Examples:
1898
+ navegador submodules ingest .
1899
+ navegador submodules ingest /path/to/repo --clear
1900
+ """
1901
+ from navegador.submodules import SubmoduleIngester
1902
+
1903
+ ing = SubmoduleIngester(_get_store(db))
1904
+ stats = ing.ingest_with_submodules(repo_path, clear=clear)
1905
+
1906
+ if as_json:
1907
+ click.echo(json.dumps(stats, indent=2))
1908
+ else:
1909
+ sub_names = list(stats.get("submodules", {}).keys())
1910
+ console.print(
1911
+ f"[green]Submodule ingestion complete[/green]: "
1912
+ f"{stats.get('total_files', 0)} total files, "
1913
+ f"{len(sub_names)} submodule(s)"
1914
+ )
1915
+ if sub_names:
1916
+ console.print(" Submodules: " + ", ".join(sub_names))
1917
+
1918
+
1919
+@submodules.command("list")
1920
+@click.argument("repo_path", type=click.Path(exists=True), default=".")
1921
+def submodules_list(repo_path: str):
1922
+ """List git submodules found in REPO_PATH."""
1923
+ from navegador.submodules import SubmoduleIngester
1924
+
1925
+ subs = SubmoduleIngester.__new__(SubmoduleIngester)
1926
+ subs.store = None # type: ignore[assignment]
1927
+ items = subs.detect_submodules(repo_path)
1928
+
1929
+ if not items:
1930
+ console.print("[yellow]No submodules found (no .gitmodules).[/yellow]")
1931
+ return
1932
+
1933
+ table = Table(title=f"Submodules in {repo_path}")
1934
+ table.add_column("Name", style="cyan")
1935
+ table.add_column("Path")
1936
+ table.add_column("URL")
1937
+ for item in items:
1938
+ table.add_row(item["name"], item["path"], item.get("url", ""))
1939
+ console.print(table)
1940
+
1941
+
1942
+# ── Workspace: multi-repo (#62) ────────────────────────────────────────────────
1943
+
1944
+
1945
+@main.group()
1946
+def workspace():
1947
+ """Manage a multi-repo workspace (unified or federated graph)."""
1948
+
1949
+
1950
+@workspace.command("ingest")
1951
+@click.argument("repos", nargs=-1, metavar="NAME=PATH ...")
1952
+@click.option(
1953
+ "--mode",
1954
+ type=click.Choice(["unified", "federated"]),
1955
+ default="unified",
1956
+ show_default=True,
1957
+ help="Graph mode: unified (shared graph) or federated (per-repo graphs).",
1958
+)
1959
+@DB_OPTION
1960
+@click.option("--clear", is_flag=True)
1961
+@click.option("--json", "as_json", is_flag=True)
1962
+def workspace_ingest(repos: tuple, mode: str, db: str, clear: bool, as_json: bool):
1963
+ """Ingest multiple repositories as a workspace.
1964
+
1965
+ \b
1966
+ REPOS is a list of NAME=PATH pairs, e.g.:
1967
+ navegador workspace ingest backend=/path/to/backend frontend=/path/to/frontend
1968
+
1969
+ \b
1970
+ Examples:
1971
+ navegador workspace ingest backend=. frontend=../frontend --mode unified
1972
+ navegador workspace ingest api=./api worker=./worker --mode federated
1973
+ """
1974
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1975
+
1976
+ if not repos:
1977
+ raise click.UsageError("Provide at least one NAME=PATH repo.")
1978
+
1979
+ wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode))
1980
+ for repo_spec in repos:
1981
+ if "=" not in repo_spec:
1982
+ raise click.UsageError(
1983
+ f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format."
1984
+ )
1985
+ name, path = repo_spec.split("=", 1)
1986
+ wm.add_repo(name.strip(), path.strip())
1987
+
1988
+ stats = wm.ingest_all(clear=clear)
1989
+
1990
+ if as_json:
1991
+ click.echo(json.dumps(stats, indent=2))
1992
+ else:
1993
+ for repo_name, repo_stats in stats.items():
1994
+ if "error" in repo_stats:
1995
+ console.print(f"[red]Error ingesting {repo_name}:[/red] {repo_stats['error']}")
1996
+ else:
1997
+ console.print(
1998
+ f"[green]{repo_name}[/green]: "
1999
+ f"{repo_stats.get('files', 0)} files, "
2000
+ f"{repo_stats.get('nodes', 0)} nodes"
2001
+ )
12652002
12662003
ADDED navegador/dependencies.py
12672004
ADDED navegador/planopticon_pipeline.py
12682005
ADDED navegador/pm.py
12692006
ADDED navegador/submodules.py
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1260,5 +1260,742 @@
1260 async def _run():
1261 async with stdio_server() as (read_stream, write_stream):
1262 await server.run(read_stream, write_stream, server.create_initialization_options())
1263
1264 asyncio.run(_run())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1265
1266 DDED navegador/dependencies.py
1267 DDED navegador/planopticon_pipeline.py
1268 DDED navegador/pm.py
1269 DDED navegador/submodules.py
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1260,5 +1260,742 @@
1260 async def _run():
1261 async with stdio_server() as (read_stream, write_stream):
1262 await server.run(read_stream, write_stream, server.create_initialization_options())
1263
1264 asyncio.run(_run())
1265
1266
1267 # ── ANALYSIS: impact ──────────────────────────────────────────────────────────
1268
1269
1270 @main.command()
1271 @click.argument("name")
1272 @click.option("--file", "file_path", default="", help="Narrow to a specific file.")
1273 @click.option("--depth", default=3, show_default=True, help="Traversal depth.")
1274 @DB_OPTION
1275 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1276 def impact(name: str, file_path: str, depth: int, db: str, as_json: bool):
1277 """Blast-radius analysis — what does changing NAME affect?
1278
1279 Traverses CALLS, REFERENCES, INHERITS, IMPLEMENTS, ANNOTATES edges
1280 outward to find all downstream symbols and files affected by a change.
1281 """
1282 from navegador.analysis.impact import ImpactAnalyzer
1283
1284 result = ImpactAnalyzer(_get_store(db)).blast_radius(name, file_path=file_path, depth=depth)
1285
1286 if as_json:
1287 click.echo(json.dumps(result.to_dict(), indent=2))
1288 return
1289
1290 console.print(
1291 f"[bold]Blast radius:[/bold] [cyan]{name}[/cyan] (depth={depth})"
1292 )
1293 if not result.affected_nodes:
1294 console.print("[yellow]No affected nodes found.[/yellow]")
1295 return
1296
1297 table = Table(title=f"Affected nodes ({len(result.affected_nodes)})")
1298 table.add_column("Type", style="cyan")
1299 table.add_column("Name", style="bold")
1300 table.add_column("File")
1301 table.add_column("Line", justify="right")
1302 for node in result.affected_nodes:
1303 table.add_row(
1304 node["type"], node["name"], node["file_path"], str(node["line_start"] or "")
1305 )
1306 console.print(table)
1307
1308 if result.affected_files:
1309 console.print(f"\n[bold]Affected files ({len(result.affected_files)}):[/bold]")
1310 for fp in result.affected_files:
1311 console.print(f" {fp}")
1312
1313 if result.affected_knowledge:
1314 console.print(f"\n[bold]Affected knowledge ({len(result.affected_knowledge)}):[/bold]")
1315 for kn in result.affected_knowledge:
1316 console.print(f" [{kn['type']}] {kn['name']}")
1317
1318
1319 # ── ANALYSIS: flow trace ──────────────────────────────────────────────────────
1320
1321
1322 @main.command()
1323 @click.argument("name")
1324 @click.option("--file", "file_path", default="", help="Narrow to a specific file.")
1325 @click.option("--depth", default=10, show_default=True, help="Maximum call depth.")
1326 @DB_OPTION
1327 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1328 def trace(name: str, file_path: str, depth: int, db: str, as_json: bool):
1329 """Execution flow trace — follow call chains from an entry point.
1330
1331 Traverses CALLS edges forward from NAME, returning all execution paths
1332 up to the given depth.
1333 """
1334 from navegador.analysis.flow import FlowTracer
1335
1336 chains = FlowTracer(_get_store(db)).trace(name, file_path=file_path, max_depth=depth)
1337
1338 if as_json:
1339 click.echo(json.dumps([c.to_list() for c in chains], indent=2))
1340 return
1341
1342 if not chains:
1343 console.print(f"[yellow]No call chains found from[/yellow] [cyan]{name}[/cyan].")
1344 return
1345
1346 console.print(
1347 f"[bold]Call chains from[/bold] [cyan]{name}[/cyan] — {len(chains)} path(s)"
1348 )
1349 for i, chain in enumerate(chains, 1):
1350 steps = chain.to_list()
1351 path_str = " → ".join(
1352 [steps[0]["caller"]] + [s["callee"] for s in steps]
1353 ) if steps else name
1354 console.print(f" {i}. {path_str}")
1355
1356
1357 # ── ANALYSIS: dead code ───────────────────────────────────────────────────────
1358
1359
1360 @main.command()
1361 @DB_OPTION
1362 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1363 def deadcode(db: str, as_json: bool):
1364 """Detect dead code — unreachable functions, classes, and orphan files.
1365
1366 A function/class is dead if nothing calls, references, or imports it.
1367 An orphan file is one that no other file imports.
1368 """
1369 from navegador.analysis.deadcode import DeadCodeDetector
1370
1371 report = DeadCodeDetector(_get_store(db)).detect()
1372
1373 if as_json:
1374 click.echo(json.dumps(report.to_dict(), indent=2))
1375 return
1376
1377 summary = report.to_dict()["summary"]
1378 console.print(
1379 f"[bold]Dead code report:[/bold] "
1380 f"{summary['unreachable_functions']} dead functions, "
1381 f"{summary['unreachable_classes']} dead classes, "
1382 f"{summary['orphan_files']} orphan files"
1383 )
1384
1385 if report.unreachable_functions:
1386 fn_table = Table(title=f"Unreachable functions/methods ({len(report.unreachable_functions)})")
1387 fn_table.add_column("Type", style="cyan")
1388 fn_table.add_column("Name", style="bold")
1389 fn_table.add_column("File")
1390 fn_table.add_column("Line", justify="right")
1391 for fn in report.unreachable_functions:
1392 fn_table.add_row(fn["type"], fn["name"], fn["file_path"], str(fn["line_start"] or ""))
1393 console.print(fn_table)
1394
1395 if report.unreachable_classes:
1396 cls_table = Table(title=f"Unreachable classes ({len(report.unreachable_classes)})")
1397 cls_table.add_column("Name", style="bold")
1398 cls_table.add_column("File")
1399 cls_table.add_column("Line", justify="right")
1400 for cls in report.unreachable_classes:
1401 cls_table.add_row(cls["name"], cls["file_path"], str(cls["line_start"] or ""))
1402 console.print(cls_table)
1403
1404 if report.orphan_files:
1405 console.print(f"\n[bold]Orphan files ({len(report.orphan_files)}):[/bold]")
1406 for fp in report.orphan_files:
1407 console.print(f" {fp}")
1408
1409 if not any([report.unreachable_functions, report.unreachable_classes, report.orphan_files]):
1410 console.print("[green]No dead code found.[/green]")
1411
1412
1413 # ── ANALYSIS: test mapping ────────────────────────────────────────────────────
1414
1415
1416 @main.command()
1417 @DB_OPTION
1418 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1419 def testmap(db: str, as_json: bool):
1420 """Map test functions to production code via TESTS edges.
1421
1422 Finds functions starting with test_, resolves the production symbol
1423 via CALLS edges and name heuristics, then writes TESTS edges to the graph.
1424 """
1425 from navegador.analysis.testmap import TestMapper
1426
1427 result = TestMapper(_get_store(db)).map_tests()
1428
1429 if as_json:
1430 click.echo(json.dumps(result.to_dict(), indent=2))
1431 return
1432
1433 console.print(
1434 f"[bold]Test map:[/bold] {len(result.links)} linked, "
1435 f"{len(result.unmatched_tests)} unmatched, "
1436 f"{result.edges_created} TESTS edges created"
1437 )
1438
1439 if result.links:
1440 table = Table(title=f"Test -> production links ({len(result.links)})")
1441 table.add_column("Test", style="cyan")
1442 table.add_column("Production symbol", style="bold")
1443 table.add_column("File")
1444 table.add_column("Source")
1445 for lnk in result.links:
1446 table.add_row(lnk.test_name, lnk.prod_name, lnk.prod_file, lnk.source)
1447 console.print(table)
1448
1449 if result.unmatched_tests:
1450 console.print(f"\n[yellow]Unmatched tests ({len(result.unmatched_tests)}):[/yellow]")
1451 for t in result.unmatched_tests:
1452 console.print(f" {t['name']} ({t['file_path']})")
1453
1454
1455 # ── ANALYSIS: cycles ──────────────────────────────────────────────────────────
1456
1457
1458 @main.command()
1459 @DB_OPTION
1460 @click.option("--imports", "check_imports", is_flag=True, default=False,
1461 help="Check import cycles only.")
1462 @click.option("--calls", "check_calls", is_flag=True, default=False,
1463 help="Check call cycles only.")
1464 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
1465 def cycles(db: str, check_imports: bool, check_calls: bool, as_json: bool):
1466 """Detect circular dependencies in import and call graphs.
1467
1468 By default checks both import cycles and call cycles.
1469 Use --imports or --calls to restrict to one graph.
1470 """
1471 from navegador.analysis.cycles import CycleDetector
1472
1473 detector = CycleDetector(_get_store(db))
1474 run_imports = check_imports or (not check_imports and not check_calls)
1475 run_calls = check_calls or (not check_imports and not check_calls)
1476
1477 import_cycles = detector.detect_import_cycles() if run_imports else []
1478 call_cycles = detector.detect_call_cycles() if run_calls else []
1479
1480 if as_json:
1481 click.echo(
1482 json.dumps(
1483 {"import_cycles": import_cycles, "call_cycles": call_cycles}, indent=2
1484 )
1485 )
1486 return
1487
1488 if not import_cycles and not call_cycles:
1489 console.print("[green]No circular dependencies found.[/green]")
1490 return
1491
1492 if import_cycles:
1493 table = Table(title=f"Import cycles ({len(import_cycles)})")
1494 table.add_column("#", justify="right")
1495 table.add_column("Cycle")
1496 for i, cycle in enumerate(import_cycles, 1):
1497 table.add_row(str(i), " -> ".join(cycle) + f" -> {cycle[0]}")
1498 console.print(table)
1499
1500 if call_cycles:
1501 table = Table(title=f"Call cycles ({len(call_cycles)})")
1502 table.add_column("#", justify="right")
1503 table.add_column("Cycle")
1504 for i, cycle in enumerate(call_cycles, 1):
1505 table.add_row(str(i), " -> ".join(cycle) + f" -> {cycle[0]}")
1506 console.print(table)
1507
1508
1509 # ── Multi-repo (#16) ─────────────────────────────────────────────────────────
1510
1511
1512 @main.group()
1513 def repo():
1514 """Manage and query across multiple repositories."""
1515
1516
1517 @repo.command("add")
1518 @click.argument("name")
1519 @click.argument("path", type=click.Path())
1520 @DB_OPTION
1521 def repo_add(name: str, path: str, db: str):
1522 """Register a repository by NAME and PATH."""
1523 from navegador.multirepo import MultiRepoManager
1524
1525 mgr = MultiRepoManager(_get_store(db))
1526 mgr.add_repo(name, path)
1527 console.print(f"[green]Repo registered:[/green] {name} → {path}")
1528
1529
1530 @repo.command("list")
1531 @DB_OPTION
1532 @click.option("--json", "as_json", is_flag=True)
1533 def repo_list(db: str, as_json: bool):
1534 """List all registered repositories."""
1535 from navegador.multirepo import MultiRepoManager
1536
1537 repos = MultiRepoManager(_get_store(db)).list_repos()
1538 if as_json:
1539 click.echo(json.dumps(repos, indent=2))
1540 return
1541 if not repos:
1542 console.print("[yellow]No repositories registered.[/yellow]")
1543 return
1544 table = Table(title="Registered repositories")
1545 table.add_column("Name", style="cyan")
1546 table.add_column("Path")
1547 for r in repos:
1548 table.add_row(r["name"], r["path"])
1549 console.print(table)
1550
1551
1552 @repo.command("ingest-all")
1553 @DB_OPTION
1554 @click.option("--clear", is_flag=True, help="Clear graph before ingesting.")
1555 @click.option("--json", "as_json", is_flag=True)
1556 def repo_ingest_all(db: str, clear: bool, as_json: bool):
1557 """Ingest all registered repositories."""
1558 from navegador.multirepo import MultiRepoManager
1559
1560 mgr = MultiRepoManager(_get_store(db))
1561 with console.status("[bold]Ingesting all repos…[/bold]"):
1562 summary = mgr.ingest_all(clear=clear)
1563 if as_json:
1564 click.echo(json.dumps(summary, indent=2))
1565 return
1566 for name, stats in summary.items():
1567 table = Table(title=f"Repo: {name}")
1568 table.add_column("Metric", style="cyan")
1569 table.add_column("Count", justify="right", style="green")
1570 for k, v in stats.items():
1571 table.add_row(str(k).capitalize(), str(v))
1572 console.print(table)
1573
1574
1575 @repo.command("search")
1576 @click.argument("query")
1577 @DB_OPTION
1578 @click.option("--limit", default=20, show_default=True)
1579 @click.option("--json", "as_json", is_flag=True)
1580 def repo_search(query: str, db: str, limit: int, as_json: bool):
1581 """Search across all registered repositories."""
1582 from navegador.multirepo import MultiRepoManager
1583
1584 results = MultiRepoManager(_get_store(db)).cross_repo_search(query, limit=limit)
1585 if as_json:
1586 click.echo(json.dumps(results, indent=2))
1587 return
1588 if not results:
1589 console.print("[yellow]No results.[/yellow]")
1590 return
1591 table = Table(title=f"Cross-repo search: {query!r}")
1592 table.add_column("Label", style="cyan")
1593 table.add_column("Name", style="bold")
1594 table.add_column("File/Path")
1595 for r in results:
1596 table.add_row(r["label"], r["name"], r["file_path"])
1597 console.print(table)
1598
1599
1600 # ── Rename (#26) ──────────────────────────────────────────────────────────────
1601
1602
1603 @main.command()
1604 @click.argument("old_name")
1605 @click.argument("new_name")
1606 @DB_OPTION
1607 @click.option("--preview", is_flag=True, help="Show what would change without applying.")
1608 @click.option("--json", "as_json", is_flag=True)
1609 def rename(old_name: str, new_name: str, db: str, preview: bool, as_json: bool):
1610 """Rename a symbol across the graph (coordinated rename).
1611
1612 \b
1613 Examples:
1614 navegador rename old_func new_func --preview
1615 navegador rename MyClass RenamedClass
1616 """
1617 from navegador.refactor import SymbolRenamer
1618
1619 renamer = SymbolRenamer(_get_store(db))
1620 if preview:
1621 result = renamer.preview_rename(old_name, new_name)
1622 data = {
1623 "old_name": result.old_name,
1624 "new_name": result.new_name,
1625 "affected_files": result.affected_files,
1626 "affected_nodes": len(result.affected_nodes),
1627 "edges_updated": result.edges_updated,
1628 }
1629 else:
1630 result = renamer.apply_rename(old_name, new_name)
1631 data = {
1632 "old_name": result.old_name,
1633 "new_name": result.new_name,
1634 "affected_files": result.affected_files,
1635 "affected_nodes": len(result.affected_nodes),
1636 "edges_updated": result.edges_updated,
1637 }
1638
1639 if as_json:
1640 click.echo(json.dumps(data, indent=2))
1641 return
1642
1643 action = "Preview" if preview else "Renamed"
1644 console.print(f"[green]{action}:[/green] {old_name!r} → {new_name!r}")
1645 console.print(f" Nodes affected : {data['affected_nodes']}")
1646 console.print(f" Edges updated : {data['edges_updated']}")
1647 if data["affected_files"]:
1648 console.print(" Files:")
1649 for f in data["affected_files"]:
1650 console.print(f" {f}")
1651
1652
1653 # ── CODEOWNERS (#39) ──────────────────────────────────────────────────────────
1654
1655
1656 @main.command()
1657 @click.argument("repo_path", type=click.Path(exists=True))
1658 @DB_OPTION
1659 @click.option("--json", "as_json", is_flag=True)
1660 def codeowners(repo_path: str, db: str, as_json: bool):
1661 """Parse CODEOWNERS and map ownership to Person nodes."""
1662 from navegador.codeowners import CodeownersIngester
1663
1664 stats = CodeownersIngester(_get_store(db)).ingest(repo_path)
1665 if as_json:
1666 click.echo(json.dumps(stats, indent=2))
1667 return
1668 console.print(
1669 f"[green]CODEOWNERS ingested:[/green] "
1670 f"{stats['owners']} owners, {stats['patterns']} patterns, {stats['edges']} edges"
1671 )
1672
1673
1674 # ── ADR (#40) ─────────────────────────────────────────────────────────────────
1675
1676
1677 @main.group()
1678 def adr():
1679 """Ingest Architecture Decision Records (ADRs) into the knowledge graph."""
1680
1681
1682 @adr.command("ingest")
1683 @click.argument("adr_dir", type=click.Path(exists=True))
1684 @DB_OPTION
1685 @click.option("--json", "as_json", is_flag=True)
1686 def adr_ingest(adr_dir: str, db: str, as_json: bool):
1687 """Parse ADR markdown files and create Decision nodes."""
1688 from navegador.adr import ADRIngester
1689
1690 stats = ADRIngester(_get_store(db)).ingest(adr_dir)
1691 if as_json:
1692 click.echo(json.dumps(stats, indent=2))
1693 return
1694 console.print(
1695 f"[green]ADRs ingested:[/green] {stats['decisions']} decisions, {stats['skipped']} skipped"
1696 )
1697
1698
1699 # ── API schema (#41) ─────────────────────────────────────────────────────────
1700
1701
1702 @main.group()
1703 def api():
1704 """Ingest API schema files (OpenAPI, GraphQL) into the graph."""
1705
1706
1707 @api.command("ingest")
1708 @click.argument("path", type=click.Path(exists=True))
1709 @DB_OPTION
1710 @click.option(
1711 "--type",
1712 "schema_type",
1713 type=click.Choice(["openapi", "graphql", "auto"]),
1714 default="auto",
1715 show_default=True,
1716 help="Schema type. auto detects from file extension.",
1717 )
1718 @click.option("--json", "as_json", is_flag=True)
1719 def api_ingest(path: str, db: str, schema_type: str, as_json: bool):
1720 """Parse an OpenAPI or GraphQL schema and create API endpoint nodes.
1721
1722 \b
1723 Examples:
1724 navegador api ingest openapi.yaml
1725 navegador api ingest schema.graphql --type graphql
1726 navegador api ingest swagger.json --type openapi
1727 """
1728 from pathlib import Path as P
1729
1730 from navegador.api_schema import APISchemaIngester
1731
1732 ingester = APISchemaIngester(_get_store(db))
1733 p = P(path)
1734
1735 if schema_type == "auto":
1736 if p.suffix.lower() in (".graphql", ".gql"):
1737 schema_type = "graphql"
1738 else:
1739 schema_type = "openapi"
1740
1741 if schema_type == "graphql":
1742 stats = ingester.ingest_graphql(path)
1743 label = "GraphQL"
1744 else:
1745 stats = ingester.ingest_openapi(path)
1746 label = "OpenAPI"
1747
1748 if as_json:
1749 click.echo(json.dumps(stats, indent=2))
1750 return
1751
1752 table = Table(title=f"{label} schema ingested")
1753 table.add_column("Metric", style="cyan")
1754 table.add_column("Count", justify="right", style="green")
1755 for k, v in stats.items():
1756 table.add_row(k.replace("_", " ").capitalize(), str(v))
1757 console.print(table)
1758
1759
1760 # ── PM: project management ticket ingestion (#53) ─────────────────────────────
1761
1762
1763 @main.group()
1764 def pm():
1765 """Ingest project management tickets (GitHub Issues, Linear, Jira)."""
1766
1767
1768 @pm.command("ingest")
1769 @click.option("--github", "github_repo", default="", metavar="OWNER/REPO",
1770 help="GitHub repository in owner/repo format.")
1771 @click.option("--token", default="", envvar="GITHUB_TOKEN",
1772 help="GitHub personal access token.")
1773 @click.option("--state", default="open",
1774 type=click.Choice(["open", "closed", "all"]),
1775 show_default=True,
1776 help="GitHub issue state filter.")
1777 @click.option("--limit", default=100, show_default=True,
1778 help="Maximum number of issues to fetch.")
1779 @DB_OPTION
1780 @click.option("--json", "as_json", is_flag=True)
1781 def pm_ingest(github_repo: str, token: str, state: str, limit: int, db: str, as_json: bool):
1782 """Ingest tickets from a PM tool into the knowledge graph.
1783
1784 \b
1785 Examples:
1786 navegador pm ingest --github owner/repo
1787 navegador pm ingest --github owner/repo --token ghp_...
1788 navegador pm ingest --github owner/repo --state all --limit 200
1789 """
1790 if not github_repo:
1791 raise click.UsageError("Provide --github <owner/repo> (more backends coming in a future release).")
1792
1793 from navegador.pm import TicketIngester
1794
1795 ing = TicketIngester(_get_store(db))
1796 stats = ing.ingest_github_issues(github_repo, token=token, state=state, limit=limit)
1797
1798 if as_json:
1799 click.echo(json.dumps(stats, indent=2))
1800 else:
1801 table = Table(title=f"PM import: {github_repo}")
1802 table.add_column("Metric", style="cyan")
1803 table.add_column("Count", justify="right", style="green")
1804 for k, v in stats.items():
1805 table.add_row(k.capitalize(), str(v))
1806 console.print(table)
1807
1808
1809 # ── Dependencies: external package ingestion (#58) ────────────────────────────
1810
1811
1812 @main.group()
1813 def deps():
1814 """Ingest external package dependencies (npm, pip, cargo)."""
1815
1816
1817 @deps.command("ingest")
1818 @click.argument("path", type=click.Path(exists=True))
1819 @click.option(
1820 "--type", "dep_type",
1821 type=click.Choice(["auto", "npm", "pip", "cargo"]),
1822 default="auto",
1823 show_default=True,
1824 help="Manifest type. auto detects from filename.",
1825 )
1826 @DB_OPTION
1827 @click.option("--json", "as_json", is_flag=True)
1828 def deps_ingest(path: str, dep_type: str, db: str, as_json: bool):
1829 """Ingest external dependencies from a package manifest.
1830
1831 \b
1832 PATH can be:
1833 package.json (npm)
1834 requirements.txt (pip)
1835 pyproject.toml (pip)
1836 Cargo.toml (cargo)
1837
1838 \b
1839 Examples:
1840 navegador deps ingest package.json
1841 navegador deps ingest requirements.txt
1842 navegador deps ingest Cargo.toml --type cargo
1843 """
1844 from pathlib import Path as P
1845
1846 from navegador.dependencies import DependencyIngester
1847
1848 ing = DependencyIngester(_get_store(db))
1849 p = P(path)
1850
1851 if dep_type == "auto":
1852 name = p.name.lower()
1853 if name == "package.json":
1854 dep_type = "npm"
1855 elif name in ("requirements.txt", "pyproject.toml"):
1856 dep_type = "pip"
1857 elif name == "cargo.toml":
1858 dep_type = "cargo"
1859 else:
1860 raise click.UsageError(
1861 f"Cannot auto-detect type for {p.name!r}. Use --type npm|pip|cargo."
1862 )
1863
1864 dispatch = {
1865 "npm": ing.ingest_npm,
1866 "pip": ing.ingest_pip,
1867 "cargo": ing.ingest_cargo,
1868 }
1869 stats = dispatch[dep_type](path)
1870
1871 if as_json:
1872 click.echo(json.dumps(stats, indent=2))
1873 else:
1874 console.print(
1875 f"[green]Dependencies ingested[/green] ({dep_type}): "
1876 f"{stats['packages']} packages"
1877 )
1878
1879
1880 # ── Submodules: ingest parent + submodules (#61) ──────────────────────────────
1881
1882
1883 @main.group()
1884 def submodules():
1885 """Ingest a parent repository and all its git submodules."""
1886
1887
1888 @submodules.command("ingest")
1889 @click.argument("repo_path", type=click.Path(exists=True))
1890 @DB_OPTION
1891 @click.option("--clear", is_flag=True, help="Clear existing graph before ingesting.")
1892 @click.option("--json", "as_json", is_flag=True)
1893 def submodules_ingest(repo_path: str, db: str, clear: bool, as_json: bool):
1894 """Ingest a repository and all its git submodules as linked nodes.
1895
1896 \b
1897 Examples:
1898 navegador submodules ingest .
1899 navegador submodules ingest /path/to/repo --clear
1900 """
1901 from navegador.submodules import SubmoduleIngester
1902
1903 ing = SubmoduleIngester(_get_store(db))
1904 stats = ing.ingest_with_submodules(repo_path, clear=clear)
1905
1906 if as_json:
1907 click.echo(json.dumps(stats, indent=2))
1908 else:
1909 sub_names = list(stats.get("submodules", {}).keys())
1910 console.print(
1911 f"[green]Submodule ingestion complete[/green]: "
1912 f"{stats.get('total_files', 0)} total files, "
1913 f"{len(sub_names)} submodule(s)"
1914 )
1915 if sub_names:
1916 console.print(" Submodules: " + ", ".join(sub_names))
1917
1918
1919 @submodules.command("list")
1920 @click.argument("repo_path", type=click.Path(exists=True), default=".")
1921 def submodules_list(repo_path: str):
1922 """List git submodules found in REPO_PATH."""
1923 from navegador.submodules import SubmoduleIngester
1924
1925 subs = SubmoduleIngester.__new__(SubmoduleIngester)
1926 subs.store = None # type: ignore[assignment]
1927 items = subs.detect_submodules(repo_path)
1928
1929 if not items:
1930 console.print("[yellow]No submodules found (no .gitmodules).[/yellow]")
1931 return
1932
1933 table = Table(title=f"Submodules in {repo_path}")
1934 table.add_column("Name", style="cyan")
1935 table.add_column("Path")
1936 table.add_column("URL")
1937 for item in items:
1938 table.add_row(item["name"], item["path"], item.get("url", ""))
1939 console.print(table)
1940
1941
1942 # ── Workspace: multi-repo (#62) ────────────────────────────────────────────────
1943
1944
1945 @main.group()
1946 def workspace():
1947 """Manage a multi-repo workspace (unified or federated graph)."""
1948
1949
1950 @workspace.command("ingest")
1951 @click.argument("repos", nargs=-1, metavar="NAME=PATH ...")
1952 @click.option(
1953 "--mode",
1954 type=click.Choice(["unified", "federated"]),
1955 default="unified",
1956 show_default=True,
1957 help="Graph mode: unified (shared graph) or federated (per-repo graphs).",
1958 )
1959 @DB_OPTION
1960 @click.option("--clear", is_flag=True)
1961 @click.option("--json", "as_json", is_flag=True)
1962 def workspace_ingest(repos: tuple, mode: str, db: str, clear: bool, as_json: bool):
1963 """Ingest multiple repositories as a workspace.
1964
1965 \b
1966 REPOS is a list of NAME=PATH pairs, e.g.:
1967 navegador workspace ingest backend=/path/to/backend frontend=/path/to/frontend
1968
1969 \b
1970 Examples:
1971 navegador workspace ingest backend=. frontend=../frontend --mode unified
1972 navegador workspace ingest api=./api worker=./worker --mode federated
1973 """
1974 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1975
1976 if not repos:
1977 raise click.UsageError("Provide at least one NAME=PATH repo.")
1978
1979 wm = WorkspaceManager(_get_store(db), mode=WorkspaceMode(mode))
1980 for repo_spec in repos:
1981 if "=" not in repo_spec:
1982 raise click.UsageError(
1983 f"Invalid repo spec {repo_spec!r}. Expected NAME=PATH format."
1984 )
1985 name, path = repo_spec.split("=", 1)
1986 wm.add_repo(name.strip(), path.strip())
1987
1988 stats = wm.ingest_all(clear=clear)
1989
1990 if as_json:
1991 click.echo(json.dumps(stats, indent=2))
1992 else:
1993 for repo_name, repo_stats in stats.items():
1994 if "error" in repo_stats:
1995 console.print(f"[red]Error ingesting {repo_name}:[/red] {repo_stats['error']}")
1996 else:
1997 console.print(
1998 f"[green]{repo_name}[/green]: "
1999 f"{repo_stats.get('files', 0)} files, "
2000 f"{repo_stats.get('nodes', 0)} nodes"
2001 )
2002
2003 DDED navegador/dependencies.py
2004 DDED navegador/planopticon_pipeline.py
2005 DDED navegador/pm.py
2006 DDED navegador/submodules.py
--- a/navegador/dependencies.py
+++ b/navegador/dependencies.py
@@ -0,0 +1,264 @@
1
+"""
2
+External dependency nodes — track npm/pip/cargo packages in the knowledge graph.
3
+
4
+Issue: #58
5
+
6
+Parses package manifests and creates ExternalDependency nodes with DEPENDS_ON
7
+edges to the repository, enabling queries like "what packages does this repo
8
+depend on?" and cross-repo dependency analysis.
9
+
10
+Since ExternalDependency is not (yet) a first-class NodeLabel, we use
11
+NodeLabel.Concept with domain="external_dependency" and a "package_manager"
12
+property encoded in the description.
13
+
14
+Usage::
15
+
16
+ from navegador.dependencies import DependencyIngester
17
+
18
+ ing = DependencyIngester(store)
19
+
20
+ stats = ing.ingest_npm("package.json")
21
+ stats = ing.ingest_pip("requirements.txt") # or pyproject.toml
22
+ stats = ing.ingest_cargo("Cargo.toml")
23
+"""
24
+
25
+from __future__ import annotations
26
+
27
+import logging
28
+import re
29
+from pathlib import Path
30
+from typing import Any
31
+
32
+from navegador.graph.schema import EdgeType, NodeLabel
33
+from navegador.graph.store import GraphStore
34
+
35
+logger = logging.getLogger(__name__)
36
+
37
+# We represent external dependencies as Concept nodes with this domain tag
38
+_DOMAIN = "external_dependency"
39
+
40
+
41
+class DependencyIngester:
42
+ """
43
+ Parses package manifests and writes ExternalDependency nodes into the graph.
44
+
45
+ Each dependency becomes a Concept node::
46
+
47
+ name — "<package>@<version>" (e.g. "react@^18.0.0")
48
+ description — "<package_manager>:<package>"
49
+ domain — "external_dependency"
50
+ status — version specifier
51
+
52
+ A DEPENDS_ON edge is created from the source File node (the manifest path)
53
+ to each dependency Concept node.
54
+ """
55
+
56
+ def __init__(self, store: GraphStore) -> None:
57
+ self.store = store
58
+
59
+ # ── npm / package.json ────────────────────────────────────────────────────
60
+
61
+ def ingest_npm(self, package_json_path: str | Path) -> dict[str, Any]:
62
+ """
63
+ Parse a ``package.json`` and ingest all dependency entries.
64
+
65
+ Reads ``dependencies``, ``devDependencies``, and
66
+ ``peerDependencies``.
67
+
68
+ Parameters
69
+ ----------
70
+ package_json_path:
71
+ Absolute or relative path to ``package.json``.
72
+
73
+ Returns
74
+ -------
75
+ dict with key ``packages`` (int count ingested)
76
+ """
77
+ import json
78
+
79
+ p = Path(package_json_path).resolve()
80
+ data = json.loads(p.read_text(encoding="utf-8"))
81
+
82
+ dep_sections = ["dependencies", "devDependencies", "peerDependencies"]
83
+ packages: dict[str, str] = {}
84
+ for section in dep_sections:
85
+ packages.update(data.get(section, {}) or {})
86
+
87
+ count = 0
88
+ for pkg_name, version in packages.items():
89
+ self._upsert_dep("npm", pkg_name, version, str(p))
90
+ count += 1
91
+
92
+ logger.info("DependencyIngester.ingest_npm(%s): %d packages", p, count)
93
+ return {"packages": count}
94
+
95
+ # ── pip / requirements.txt / pyproject.toml ───────────────────────────────
96
+
97
+ def ingest_pip(self, requirements_path: str | Path) -> dict[str, Any]:
98
+ """
99
+ Parse a ``requirements.txt`` or ``pyproject.toml`` and ingest all
100
+ Python dependency entries.
101
+
102
+ For ``pyproject.toml`` reads ``[project].dependencies`` and
103
+ ``[project.optional-dependencies]``.
104
+
105
+ Parameters
106
+ ----------
107
+ requirements_path:
108
+ Absolute or relative path to ``requirements.txt`` or
109
+ ``pyproject.toml``.
110
+
111
+ Returns
112
+ -------
113
+ dict with key ``packages`` (int count ingested)
114
+ """
115
+ p = Path(requirements_path).resolve()
116
+ name_lower = p.name.lower()
117
+
118
+ if name_lower == "pyproject.toml":
119
+ packages = self._parse_pyproject(p)
120
+ else:
121
+ packages = self._parse_requirements_txt(p)
122
+
123
+ count = 0
124
+ for pkg_name, version in packages.items():
125
+ self._upsert_dep("pip", pkg_name, version, str(p))
126
+ count += 1
127
+
128
+ logger.info("DependencyIngester.ingest_pip(%s): %d packages", p, count)
129
+ return {"packages": count}
130
+
131
+ # ── cargo / Cargo.toml ────────────────────────────────────────────────────
132
+
133
+ def ingest_cargo(self, cargo_toml_path: str | Path) -> dict[str, Any]:
134
+ """
135
+ Parse a ``Cargo.toml`` and ingest all Rust crate dependencies.
136
+
137
+ Reads ``[dependencies]``, ``[dev-dependencies]``, and
138
+ ``[build-dependencies]``.
139
+
140
+ Parameters
141
+ ----------
142
+ cargo_toml_path:
143
+ Absolute or relative path to ``Cargo.toml``.
144
+
145
+ Returns
146
+ -------
147
+ dict with key ``packages`` (int count ingested)
148
+ """
149
+ p = Path(cargo_toml_path).resolve()
150
+ packages = self._parse_cargo_toml(p)
151
+
152
+ count = 0
153
+ for pkg_name, version in packages.items():
154
+ self._upsert_dep("cargo", pkg_name, version, str(p))
155
+ count += 1
156
+
157
+ logger.info("DependencyIngester.ingest_cargo(%s): %d packages", p, count)
158
+ return {"packaoding="utf-8")
159
+
160
+ count = 0
161
+ in_require = False
162
+
163
+ for raw_line in text.splitlines():
164
+ line = raw_line.strip()
165
+
166
+ # Module declaration
167
+ if line.startswith("module "):
168
+ mod_name = line.removeprefix("module").strip()
169
+ self.store.create_node(
170
+ NodeLabel.Concept,
171
+ {
172
+ "name": mod_name,
173
+ "description": f"go:{mod_name}",
174
+ "domain": _DOMAIN,
175
+ "status": "module",
176
+ },
177
+ )
178
+ continue
179
+
180
+ # Require block boundaries
181
+ if line == "require (":
182
+ in_require = True
183
+ continue
184
+ if line == ")" and in_require:
185
+ in_require = False
186
+ continue
187
+
188
+ # Single-line require
189
+ if line.startswith("require ") and "(" not in line:
190
+ parts = line.removeprefix("require").strip().split()
191
+ if len(parts) >= 2:
192
+ pkg_name, version = parts[0], parts[1]
193
+ self._upsert_dep("go", pkg_name, version, str(p))
194
+ count += 1
195
+ continue
196
+
197
+ # Inside require block
198
+ if in_require and line and not line.startswith("//"):
199
+ parts = line.split()
200
+ if len(parts) >= 2:
201
+ pkg_name, version = parts[0], parts[1]
202
+ self._upsert_dep("go", pkg_name, version, str(p))
203
+ count += 1
204
+
205
+ logger.info("DependencyIngester.ingest_gomod(%s): %d packages", p, count)
206
+ return {"packages": count}
207
+
208
+ # ── Core helpers ──────────────────────────────────────────────────────────
209
+
210
+ def _upsert_dep(
211
+ self,
212
+ package_manager: str,
213
+ pkg_name: str,
214
+ version: str,
215
+ source_path: str,
216
+ ) -> None:
217
+ """Write a single dependency node and a DEPENDS_ON edge from the manifest."""
218
+ node_name = f"{pkg_name}@{version}" if version else pkg_name
219
+ self.store.create_node(
220
+ NodeLabel.Concept,
221
+ {
222
+ "name": node_name,
223
+ "description": f"{package_manager}:{pkg_name}",
224
+ "domain": _DOMAIN,
225
+ "status": version,
226
+ },
227
+ )
228
+ # Ensure domain node exists
229
+ self.store.create_node(
230
+ NodeLabel.Domain, {"name": _DOMAIN, "description": "External package dependencies"}
231
+ )
232
+ # Ensure the manifest File node exists (minimal representation)
233
+ self.store.create_node(
234
+ NodeLabel.File,
235
+ {
236
+ "name": Path(source_path).name,
237
+ "path": source_path,
238
+ "language": package_manager,
239
+ "size": 0,
240
+ "line_count": 0,
241
+ "content_hash": "",
242
+ },
243
+ )
244
+ # File -DEPENDS_ON-> ExternalDependency concept
245
+ self.store.create_edge(
246
+ NodeLabel.File,
247
+ {"name": Path(source_path).name},
248
+ EdgeType.DEPENDS_ON,
249
+ NodeLabel.Concept,
250
+ {"name": node_name},
251
+ )
252
+
253
+ # ── Parsers ───────────────────────────────────────────────────────────────
254
+
255
+ @staticmethod
256
+ def _parse_requirements_txt(path: Path) -> dict[str, str]:
257
+ """Parse requirements.txt into {package_name: version_spec}."""
258
+ packages: dict[str, str] = {}
259
+ for raw_line in path.read_text(encoding="utf-8").splitlines():
260
+ line = raw_line.strip()
261
+ # Skip blanks, comments, options
262
+ if not line or line.startswith("#") or line.startswith("-"):
263
+ continue
264
+ # Stri
--- a/navegador/dependencies.py
+++ b/navegador/dependencies.py
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/dependencies.py
+++ b/navegador/dependencies.py
@@ -0,0 +1,264 @@
1 """
2 External dependency nodes — track npm/pip/cargo packages in the knowledge graph.
3
4 Issue: #58
5
6 Parses package manifests and creates ExternalDependency nodes with DEPENDS_ON
7 edges to the repository, enabling queries like "what packages does this repo
8 depend on?" and cross-repo dependency analysis.
9
10 Since ExternalDependency is not (yet) a first-class NodeLabel, we use
11 NodeLabel.Concept with domain="external_dependency" and a "package_manager"
12 property encoded in the description.
13
14 Usage::
15
16 from navegador.dependencies import DependencyIngester
17
18 ing = DependencyIngester(store)
19
20 stats = ing.ingest_npm("package.json")
21 stats = ing.ingest_pip("requirements.txt") # or pyproject.toml
22 stats = ing.ingest_cargo("Cargo.toml")
23 """
24
25 from __future__ import annotations
26
27 import logging
28 import re
29 from pathlib import Path
30 from typing import Any
31
32 from navegador.graph.schema import EdgeType, NodeLabel
33 from navegador.graph.store import GraphStore
34
35 logger = logging.getLogger(__name__)
36
37 # We represent external dependencies as Concept nodes with this domain tag
38 _DOMAIN = "external_dependency"
39
40
41 class DependencyIngester:
42 """
43 Parses package manifests and writes ExternalDependency nodes into the graph.
44
45 Each dependency becomes a Concept node::
46
47 name — "<package>@<version>" (e.g. "react@^18.0.0")
48 description — "<package_manager>:<package>"
49 domain — "external_dependency"
50 status — version specifier
51
52 A DEPENDS_ON edge is created from the source File node (the manifest path)
53 to each dependency Concept node.
54 """
55
56 def __init__(self, store: GraphStore) -> None:
57 self.store = store
58
59 # ── npm / package.json ────────────────────────────────────────────────────
60
61 def ingest_npm(self, package_json_path: str | Path) -> dict[str, Any]:
62 """
63 Parse a ``package.json`` and ingest all dependency entries.
64
65 Reads ``dependencies``, ``devDependencies``, and
66 ``peerDependencies``.
67
68 Parameters
69 ----------
70 package_json_path:
71 Absolute or relative path to ``package.json``.
72
73 Returns
74 -------
75 dict with key ``packages`` (int count ingested)
76 """
77 import json
78
79 p = Path(package_json_path).resolve()
80 data = json.loads(p.read_text(encoding="utf-8"))
81
82 dep_sections = ["dependencies", "devDependencies", "peerDependencies"]
83 packages: dict[str, str] = {}
84 for section in dep_sections:
85 packages.update(data.get(section, {}) or {})
86
87 count = 0
88 for pkg_name, version in packages.items():
89 self._upsert_dep("npm", pkg_name, version, str(p))
90 count += 1
91
92 logger.info("DependencyIngester.ingest_npm(%s): %d packages", p, count)
93 return {"packages": count}
94
95 # ── pip / requirements.txt / pyproject.toml ───────────────────────────────
96
97 def ingest_pip(self, requirements_path: str | Path) -> dict[str, Any]:
98 """
99 Parse a ``requirements.txt`` or ``pyproject.toml`` and ingest all
100 Python dependency entries.
101
102 For ``pyproject.toml`` reads ``[project].dependencies`` and
103 ``[project.optional-dependencies]``.
104
105 Parameters
106 ----------
107 requirements_path:
108 Absolute or relative path to ``requirements.txt`` or
109 ``pyproject.toml``.
110
111 Returns
112 -------
113 dict with key ``packages`` (int count ingested)
114 """
115 p = Path(requirements_path).resolve()
116 name_lower = p.name.lower()
117
118 if name_lower == "pyproject.toml":
119 packages = self._parse_pyproject(p)
120 else:
121 packages = self._parse_requirements_txt(p)
122
123 count = 0
124 for pkg_name, version in packages.items():
125 self._upsert_dep("pip", pkg_name, version, str(p))
126 count += 1
127
128 logger.info("DependencyIngester.ingest_pip(%s): %d packages", p, count)
129 return {"packages": count}
130
131 # ── cargo / Cargo.toml ────────────────────────────────────────────────────
132
133 def ingest_cargo(self, cargo_toml_path: str | Path) -> dict[str, Any]:
134 """
135 Parse a ``Cargo.toml`` and ingest all Rust crate dependencies.
136
137 Reads ``[dependencies]``, ``[dev-dependencies]``, and
138 ``[build-dependencies]``.
139
140 Parameters
141 ----------
142 cargo_toml_path:
143 Absolute or relative path to ``Cargo.toml``.
144
145 Returns
146 -------
147 dict with key ``packages`` (int count ingested)
148 """
149 p = Path(cargo_toml_path).resolve()
150 packages = self._parse_cargo_toml(p)
151
152 count = 0
153 for pkg_name, version in packages.items():
154 self._upsert_dep("cargo", pkg_name, version, str(p))
155 count += 1
156
157 logger.info("DependencyIngester.ingest_cargo(%s): %d packages", p, count)
158 return {"packaoding="utf-8")
159
160 count = 0
161 in_require = False
162
163 for raw_line in text.splitlines():
164 line = raw_line.strip()
165
166 # Module declaration
167 if line.startswith("module "):
168 mod_name = line.removeprefix("module").strip()
169 self.store.create_node(
170 NodeLabel.Concept,
171 {
172 "name": mod_name,
173 "description": f"go:{mod_name}",
174 "domain": _DOMAIN,
175 "status": "module",
176 },
177 )
178 continue
179
180 # Require block boundaries
181 if line == "require (":
182 in_require = True
183 continue
184 if line == ")" and in_require:
185 in_require = False
186 continue
187
188 # Single-line require
189 if line.startswith("require ") and "(" not in line:
190 parts = line.removeprefix("require").strip().split()
191 if len(parts) >= 2:
192 pkg_name, version = parts[0], parts[1]
193 self._upsert_dep("go", pkg_name, version, str(p))
194 count += 1
195 continue
196
197 # Inside require block
198 if in_require and line and not line.startswith("//"):
199 parts = line.split()
200 if len(parts) >= 2:
201 pkg_name, version = parts[0], parts[1]
202 self._upsert_dep("go", pkg_name, version, str(p))
203 count += 1
204
205 logger.info("DependencyIngester.ingest_gomod(%s): %d packages", p, count)
206 return {"packages": count}
207
208 # ── Core helpers ──────────────────────────────────────────────────────────
209
210 def _upsert_dep(
211 self,
212 package_manager: str,
213 pkg_name: str,
214 version: str,
215 source_path: str,
216 ) -> None:
217 """Write a single dependency node and a DEPENDS_ON edge from the manifest."""
218 node_name = f"{pkg_name}@{version}" if version else pkg_name
219 self.store.create_node(
220 NodeLabel.Concept,
221 {
222 "name": node_name,
223 "description": f"{package_manager}:{pkg_name}",
224 "domain": _DOMAIN,
225 "status": version,
226 },
227 )
228 # Ensure domain node exists
229 self.store.create_node(
230 NodeLabel.Domain, {"name": _DOMAIN, "description": "External package dependencies"}
231 )
232 # Ensure the manifest File node exists (minimal representation)
233 self.store.create_node(
234 NodeLabel.File,
235 {
236 "name": Path(source_path).name,
237 "path": source_path,
238 "language": package_manager,
239 "size": 0,
240 "line_count": 0,
241 "content_hash": "",
242 },
243 )
244 # File -DEPENDS_ON-> ExternalDependency concept
245 self.store.create_edge(
246 NodeLabel.File,
247 {"name": Path(source_path).name},
248 EdgeType.DEPENDS_ON,
249 NodeLabel.Concept,
250 {"name": node_name},
251 )
252
253 # ── Parsers ───────────────────────────────────────────────────────────────
254
255 @staticmethod
256 def _parse_requirements_txt(path: Path) -> dict[str, str]:
257 """Parse requirements.txt into {package_name: version_spec}."""
258 packages: dict[str, str] = {}
259 for raw_line in path.read_text(encoding="utf-8").splitlines():
260 line = raw_line.strip()
261 # Skip blanks, comments, options
262 if not line or line.startswith("#") or line.startswith("-"):
263 continue
264 # Stri
--- a/navegador/planopticon_pipeline.py
+++ b/navegador/planopticon_pipeline.py
@@ -0,0 +1,174 @@
1
+"""
2
+PlanopticonPipeline — first-class pipeline for meeting recordings → knowledge graph.
3
+
4
+Orchestrates: detect input type → ingest → link to code → return stats.
5
+
6
+Issues: #7 (first-class pipeline), #18 (action items, decision timeline, auto-linking)
7
+
8
+Usage::
9
+
10
+ from navegador.planopticon_pipeline import PlanopticonPipeline
11
+
12
+ pipeline = PlanopticonPipeline(store)
13
+ stats = pipeline.run("planopticon-output/", source_tag="Q4 Planning")
14
+
15
+ items = pipeline.extract_action_items(kg_data)
16
+ timeline = pipeline.build_decision_timeline(store)
17
+ linked = pipeline.auto_link_to_code(store)
18
+"""
19
+
20
+from __future__ import annotations
21
+
22
+import logging
23
+from d, fieldataclasses import dataclass
24
+from pathlib import Path
25
+from typing import chema import EdgeType, NodeLabelath
26
+from ty
27
+ "project", {}
28
+ ret).get("name", "")
29
+��──�# Explicit action_items list (manifest format)
30
+ for raw in kg_data.get("action_items", []):
31
+ action = (raw.get("action") or "").strip()
32
+ if not action:
33
+ continue
34
+ items.append(
35
+ ActionItem(
36
+ action=action,
37
+ assignee=(raw.get("assignee") or "").strip(),
38
+ context=raw.get("context", ""),
39
+ priority=raw.get("priority", "info"),
40
+ source=source,
41
+ )
42
+ )
43
+
44
+ # Entities / nodes with task/action_item planning type
45
+ for entity in kg_data.get("entitie{
46
+ _data.get("", "") or entity.gtype not in ( cont
47
+ namname:
48
+ continue
49
+ }end(
50
+ ActionItem(
51
+ action=name,
52
+ assignee=(entity.get("assignee") or "").strip(),
53
+ context=entity.get("description", ""),
54
+ priority=entity.get("priority", "info"),
55
+ source=source,
56
+ )
57
+ )
58
+
59
+ return items
60
+
61
+ # ── Decision timeline ─────────────────────────────────────────────────────
62
+
63
+ @staticmethod
64
+ def build_decision_timeline(store: GraphStore) -> list[dict[str, Any]]:
65
+ """
66
+ Return all Decision nodes ordered chronologically by their ``date`` property.
67
+
68
+ Nodes without a date are placed last, sorted by name.
69
+
70
+ Returns
71
+ -------
72
+ lis"
73
+" + k_labe "WHERE k:Conce
74
+ items.applabels(k)[0], k.name"
75
+ )
76
+ code_cypher = (
77
+ "MATCH (c) "
78
+ "WHERE c:Funct
79
+ items.applabels(c)[0], c.name"
80
+ )
81
+
82
+ try:
83
+ k_result = store.query(knowledge_cypher)
84
+ c_result = store.query(code_cypher)
85
+ except Exception:
86
+ logger.warning("auto_link_to_code: initial queries failed", exc_info=True)
87
+ return 0
88
+
89
+ knowledge_nodes: list[tuple[str, str]] = [
90
+
91
+ nning type
92
+ � link to code → return s" if row[0] and row[1]
93
+ ]
94
+ code_nodes: list[tuple[str, str]] = [
95
+
96
+ nning type
97
+ (c_result.result_set or [])
98
+ if row[0] and row[1]
99
+ ]
100
+
101
+ if not knowledge_nodes or not code_nodes:
102
+ return 0
103
+
104
+ linked = 0
105
+ for k_label, k_name in knowledge_nodes:
106
+ # Extract significant tokens (length >= 4) from the knowledge name
107
+ tokens = [
108
+ w.lower()
109
+ for w in k_name.replace("_", " "
110
+ if len(w) >= 4
111
+ ]
112
+ if not tokens:
113
+ continue
114
+
115
+ for c_label, c_name in code_nodes:
116
+ c_lower = c_name.lower()
117
+ if any(tok in c_lower for tok in tokens):
118
+ # Create ANNOTATES edge from knowledge node to code node
119
+ cypher = (
120
+
121
+ + k_label
122
+ + " {name: $kn}), (c:"
123
+ + c_label
124
+ name: $kn}), (c:" + c_label + " {name: $cn}) "
125
+ "MERGE (k)-[r:ANNOTATES]->(c)"
126
+ )
127
+ try:
128
+ store.query(cypher, {"kn": k_name, "cn": c_name})
129
+ linked += 1
130
+ e
131
+ logger.debug("auto_l
132
+ )
133
+
134
+ return e, c_name)
135
+
136
+ return linked
137
+
138
+ # ── Helpers ───────────────────────────────────────────────────────────────
139
+
140
+ @staticmethod
141
+ def _detect_input(p: Path) -> tuple[str, Path]:
142
+ """
143
+ Detect input type and resolve to a concrete file path.
144
+
145
+ Returns
146
+ -------
147
+ (input_type, resolved_path) where input_type is one of:
148
+ "manifest", "interchange", "batch", "kg"
149
+ """
150
+ if p.is_dir():
151
+ candidates = [
152
+ ("manifest", p / "manifest.json"),
153
+ ("interchange", p / "interchange.json"),
154
+ ("batch", p / "batch_manifest.json"),
155
+ ("kg", p / "results" / "knowledge_graph.json"),
156
+ ("kg", p / "knowledge_graph.json"),
157
+ ]
158
+ for itype, candidate in candidates:
159
+ if candidate.exists():
160
+ return itype, candidate
161
+ raise FileNotFoundError(
162
+ f"No recognised planopticon file found in {p}. "
163
+ "Expected manifest.json, interchange.json, "
164
+ "batch_manifest.json, or knowledge_graph.json."
165
+ )
166
+
167
+ name = p.name.lower()
168
+ if "manifest" in name and "batch" not in name:
169
+ return "manifest", p
170
+ if "interchange" in name:
171
+ return "interchange", p
172
+ if "batch" in name:
173
+ return "batch", p
174
+ # Default: treat as knowledge_graph.j
--- a/navegador/planopticon_pipeline.py
+++ b/navegador/planopticon_pipeline.py
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/planopticon_pipeline.py
+++ b/navegador/planopticon_pipeline.py
@@ -0,0 +1,174 @@
1 """
2 PlanopticonPipeline — first-class pipeline for meeting recordings → knowledge graph.
3
4 Orchestrates: detect input type → ingest → link to code → return stats.
5
6 Issues: #7 (first-class pipeline), #18 (action items, decision timeline, auto-linking)
7
8 Usage::
9
10 from navegador.planopticon_pipeline import PlanopticonPipeline
11
12 pipeline = PlanopticonPipeline(store)
13 stats = pipeline.run("planopticon-output/", source_tag="Q4 Planning")
14
15 items = pipeline.extract_action_items(kg_data)
16 timeline = pipeline.build_decision_timeline(store)
17 linked = pipeline.auto_link_to_code(store)
18 """
19
20 from __future__ import annotations
21
22 import logging
23 from d, fieldataclasses import dataclass
24 from pathlib import Path
25 from typing import chema import EdgeType, NodeLabelath
26 from ty
27 "project", {}
28 ret).get("name", "")
29 ��──�# Explicit action_items list (manifest format)
30 for raw in kg_data.get("action_items", []):
31 action = (raw.get("action") or "").strip()
32 if not action:
33 continue
34 items.append(
35 ActionItem(
36 action=action,
37 assignee=(raw.get("assignee") or "").strip(),
38 context=raw.get("context", ""),
39 priority=raw.get("priority", "info"),
40 source=source,
41 )
42 )
43
44 # Entities / nodes with task/action_item planning type
45 for entity in kg_data.get("entitie{
46 _data.get("", "") or entity.gtype not in ( cont
47 namname:
48 continue
49 }end(
50 ActionItem(
51 action=name,
52 assignee=(entity.get("assignee") or "").strip(),
53 context=entity.get("description", ""),
54 priority=entity.get("priority", "info"),
55 source=source,
56 )
57 )
58
59 return items
60
61 # ── Decision timeline ─────────────────────────────────────────────────────
62
63 @staticmethod
64 def build_decision_timeline(store: GraphStore) -> list[dict[str, Any]]:
65 """
66 Return all Decision nodes ordered chronologically by their ``date`` property.
67
68 Nodes without a date are placed last, sorted by name.
69
70 Returns
71 -------
72 lis"
73 " + k_labe "WHERE k:Conce
74 items.applabels(k)[0], k.name"
75 )
76 code_cypher = (
77 "MATCH (c) "
78 "WHERE c:Funct
79 items.applabels(c)[0], c.name"
80 )
81
82 try:
83 k_result = store.query(knowledge_cypher)
84 c_result = store.query(code_cypher)
85 except Exception:
86 logger.warning("auto_link_to_code: initial queries failed", exc_info=True)
87 return 0
88
89 knowledge_nodes: list[tuple[str, str]] = [
90
91 nning type
92 � link to code → return s" if row[0] and row[1]
93 ]
94 code_nodes: list[tuple[str, str]] = [
95
96 nning type
97 (c_result.result_set or [])
98 if row[0] and row[1]
99 ]
100
101 if not knowledge_nodes or not code_nodes:
102 return 0
103
104 linked = 0
105 for k_label, k_name in knowledge_nodes:
106 # Extract significant tokens (length >= 4) from the knowledge name
107 tokens = [
108 w.lower()
109 for w in k_name.replace("_", " "
110 if len(w) >= 4
111 ]
112 if not tokens:
113 continue
114
115 for c_label, c_name in code_nodes:
116 c_lower = c_name.lower()
117 if any(tok in c_lower for tok in tokens):
118 # Create ANNOTATES edge from knowledge node to code node
119 cypher = (
120
121 + k_label
122 + " {name: $kn}), (c:"
123 + c_label
124 name: $kn}), (c:" + c_label + " {name: $cn}) "
125 "MERGE (k)-[r:ANNOTATES]->(c)"
126 )
127 try:
128 store.query(cypher, {"kn": k_name, "cn": c_name})
129 linked += 1
130 e
131 logger.debug("auto_l
132 )
133
134 return e, c_name)
135
136 return linked
137
138 # ── Helpers ───────────────────────────────────────────────────────────────
139
140 @staticmethod
141 def _detect_input(p: Path) -> tuple[str, Path]:
142 """
143 Detect input type and resolve to a concrete file path.
144
145 Returns
146 -------
147 (input_type, resolved_path) where input_type is one of:
148 "manifest", "interchange", "batch", "kg"
149 """
150 if p.is_dir():
151 candidates = [
152 ("manifest", p / "manifest.json"),
153 ("interchange", p / "interchange.json"),
154 ("batch", p / "batch_manifest.json"),
155 ("kg", p / "results" / "knowledge_graph.json"),
156 ("kg", p / "knowledge_graph.json"),
157 ]
158 for itype, candidate in candidates:
159 if candidate.exists():
160 return itype, candidate
161 raise FileNotFoundError(
162 f"No recognised planopticon file found in {p}. "
163 "Expected manifest.json, interchange.json, "
164 "batch_manifest.json, or knowledge_graph.json."
165 )
166
167 name = p.name.lower()
168 if "manifest" in name and "batch" not in name:
169 return "manifest", p
170 if "interchange" in name:
171 return "interchange", p
172 if "batch" in name:
173 return "batch", p
174 # Default: treat as knowledge_graph.j
--- a/navegador/pm.py
+++ b/navegador/pm.py
@@ -0,0 +1,234 @@
1
+"""
2
+PM tool integration — ingest project management tickets and cross-link to code.
3
+
4
+Issue: #53
5
+
6
+Supports:
7
+ - GitHub Issues (fully implemented)
8
+ - Linear (stub — raises NotImplementedError)
9
+ - Jira (stub — raises NotImplementedError)
10
+
11
+Tickets are stored as Rule nodes (they represent commitments/requirements)
12
+and linked to code symbols by name mention similarity.
13
+
14
+Usage::
15
+
16
+ from navegador.pm import TicketIngester
17
+
18
+ ing = TicketIngester(store)
19
+
20
+ # GitHub
21
+ stats = ing.ingest_github_issues("owner/repo", token="ghp_...")
22
+
23
+ # Linear (stub)
24
+ stats = ing.ingest_linear(api_key="lin_...", project="MyProject")
25
+
26
+ # Jira (stub)
27
+ stats = ing.ingest_jira(url="https://company.atlassian.net", token="...")
28
+"""
29
+
30
+from __future__ import annotations
31
+
32
+import logging
33
+import re
34
+from typing import Any
35
+
36
+from navegador.graph.schema import EdgeType, NodeLabel
37
+from navegador.graph.store import GraphStore
38
+
39
+logger = logging.getLogger(__name__)
40
+
41
+
42
+# ── Ticket node label ─────────────────────────────────────────────────────────
43
+# Tickets are stored under a synthetic "Ticket" label that maps to Rule in the
44
+# schema — they represent requirements and commitments from the PM tool.
45
+_TICKET_LABEL = NodeLabel.Rule
46
+
47
+
48
+class TicketIngester:
49
+ """
50
+ Ingests project management tickets into the knowledge graph.
51
+
52
+ Each ticket becomes a Rule node with::
53
+
54
+ name — "#<number>: <title>" (GitHub) or the ticket ID
55
+ description — ticket body / description
56
+ domain created,
57
+ — repo name"code`` runs af"verity — "i )
58
+
59
+ c.name"
60
+ )bel
61
+ rationale — ticket URL for traceability
62
+
63
+ After ingestion, ``_link_to_code`` runs a lightweight name-match pass to
64
+ create ANNOTATES edges from each ticket to code symbols it mentions.
65
+ """
66
+
67
+ def __init__(self, store: GraphStore) -> None:
68
+ self.store = store
69
+
70
+ # ── GitHub Issues ─────────────────────────────────────────────────────────
71
+
72
+ def ingest_github_issues(
73
+ self,
74
+ repo: str,
75
+ token: str = "",
76
+ state: str = "open",
77
+ limit: int = 100,
78
+ ) -> dict[str, Any]:
79
+ """
80
+ Fetch GitHub issues for *repo* and ingest them into the graph.
81
+
82
+ Parameters
83
+ ----------
84
+ repo:
85
+ Repository in ``"owner/repo"`` format.
86
+ token:
87
+ GitHub personal access token (or ``GITHUB_TOKEN`` env var value).
88
+ If empty, unauthenticated requests are used (60 req/h rate limit).
89
+ state:
90
+ ``"open"``, ``"closed"``, or ``"all"``.
91
+ limit:
92
+ Maximum number of issues to fetch (GitHub paginates at 100/page).
93
+
94
+ Returns
95
+ -------
96
+ dict with keys: tickets, linked
97
+ """
98
+ import urllib.request
99
+
100
+ headers: dict[str, str] = {
101
+ "Accept": "application/vnd.github+json",
102
+ "X-GitHub-Api-Version": "2022-11-28",
103
+ "User-Agent": "navegador/0.4",
104
+ }
105
+ if token:
106
+ headers["Authorization"] = f"Bearer {token}"
107
+
108
+ per_page = min(limit, 100)
109
+ url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1"
110
+
111
+ try:
112
+ req = urllib.request.Request(url, headers=headers)
113
+ with urllib.request.urlopen(req, timeout=15) as resp:
114
+ import json
115
+
116
+ issues: list[dict] = json.loads(resp.read().decode())
117
+ except Exception as exc:
118
+ raise RuntimeError(f"Failed to fetch GitHub issues for {repo!r}: {exc}") from exc
119
+
120
+ # Filter out pull requests (GitHub issues API returns both)
121
+ issues = [i for i in issues if "pull_request" not in i]
122
+
123
+ domain = repo.split("/")[-1] if "/" in repo else repo
124
+ tickets_created = 0
125
+
126
+ for issue in issues[:limit]:
127
+ number = issue.get("number", 0)
128
+ title = (issue.get("title") or "").strip()
129
+ body = (issue.get("body") or "").strip()
130
+ html_url = issue.get("html_url", "")
131
+ labels = [lbl.get("name", "") for lbl in issue.get("labels", [])]
132
+ severity = self._github_severity(labels)
133
+
134
+ node_name = f"#{number}: {title}"[:200]
135
+ self.store.create_node(
136
+ _TICKET_LABEL,
137
+ {
138
+ "name": node_name,
139
+ "description": body[:2000],
140
+ "domain": domain,
141
+ "severity": severity,
142
+ "rationale": html_url,
143
+ "examples": "",
144
+ },
145
+ )
146
+ tickets_created += 1
147
+
148
+ # Assignees → Person nodes + ASSIGNED_TO edges
149
+ for assignee in issue.get("assignees", []) or []:
150
+ login = (assignee.get("login") or "").strip()
151
+ if login:
152
+ self.store.create_node(
153
+ NodeLabel.Person,
154
+ {"name": login, "email": "", "role": "", "team": ""},
155
+ )
156
+ self.store.create_edge(
157
+ _TICKET_LABEL,
158
+ {"name": node_name},
159
+ EdgeType.ASSIGNED_TO,
160
+ NodeLabel.Person,
161
+ {"name": login},
162
+ )
163
+
164
+ linked = self._link_to_code(domain)
165
+ logger.info(
166
+ "TicketIngester.ingest_github_issues(%s): tickets=%d linked=%d",
167
+ repo,
168
+ tickets_created,
169
+ linked,
170
+ )
171
+ return {"tickets": tickets_created, "linked": linked}
172
+
173
+ # ── Linear (stub) ─────────────────────────────────────────────────────────
174
+
175
+ def ingest_linear(self, api_key: str, project: str = "") -> dict[str, Any]:
176
+ """
177
+ Ingest Linear issues into the knowledge graph.
178
+
179
+ .. note::
180
+ Not yet implemented. Linear GraphQL API support is planned
181
+ for a future release. Track progress at:
182
+ https://github.com/weareconflict/navegador/issues/53
183
+
184
+ Raises
185
+ ------
186
+ NotImplementedError
187
+ """
188
+ raise NotImplementedError(
189
+ "Linear ingestion is not yet implemented. "
190
+ "Planned for a future release — see GitHub issue #53. "
191
+ "To contributly implemented)
192
+ - Li"""
193
+PM tool integration — inges"
194
+ s): tickets"RETURN t.na)TURN t.name, t.description"
195
+ code_cypher = (
196
+ "MATCH (c) WHERE c:Funct"
197
+ "
198
+ namefor row in
199
+ t_result.result_set or []) if row[0]
200
+ ]
201
+ code_nodes = [
202
+
203
+ name(str(row[0]
204
+ if row[0] and row[1]
205
+ ]
206
+
207
+ if not tickets or not code_nodes:
208
+ return 0
209
+
210
+ linked = 0
211
+ for t_name, t_desc in tickets:
212
+ combined = f"{t_name} {t_desc}"
213
+ tokens = {
214
+ w.lower()
215
+ for w in re.
216
+ if len(w) >= 4ANNOTATES edg EdgeType.ASSIGNED_T+ c_label EdgeType.ASSIGNED_T+ " {name: $cn}) "
217
+ "MERGE (t)-[r:ANNOTATES]->(c)"
218
+ )
219
+ try:
220
+ self.store.query(cypher, {"tn": t_name, "cn": c_name})
221
+ linked += 1
222
+ except Exception:
223
+ EdgeType.ASSIGNED_T logger.debug("Tic EdgeType.ASSIGNED_T)
224
+ return linked
225
+
226
+ @staticmethod
227
+ def _github_severity(labels: list[str]) -> str:
228
+ """Map GitHub label names to navegador severity levels."""
229
+ label_lower = {lbl.lower() for lbl in labels}
230
+ if label_lower & {"critical", "blocker", "urgent", "p0"}:
231
+ return "critical"
232
+ if label_lower & {"bug", "high", "p1", "important"}:
233
+ return "warning"
234
+ return "info"
--- a/navegador/pm.py
+++ b/navegador/pm.py
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/pm.py
+++ b/navegador/pm.py
@@ -0,0 +1,234 @@
1 """
2 PM tool integration — ingest project management tickets and cross-link to code.
3
4 Issue: #53
5
6 Supports:
7 - GitHub Issues (fully implemented)
8 - Linear (stub — raises NotImplementedError)
9 - Jira (stub — raises NotImplementedError)
10
11 Tickets are stored as Rule nodes (they represent commitments/requirements)
12 and linked to code symbols by name mention similarity.
13
14 Usage::
15
16 from navegador.pm import TicketIngester
17
18 ing = TicketIngester(store)
19
20 # GitHub
21 stats = ing.ingest_github_issues("owner/repo", token="ghp_...")
22
23 # Linear (stub)
24 stats = ing.ingest_linear(api_key="lin_...", project="MyProject")
25
26 # Jira (stub)
27 stats = ing.ingest_jira(url="https://company.atlassian.net", token="...")
28 """
29
30 from __future__ import annotations
31
32 import logging
33 import re
34 from typing import Any
35
36 from navegador.graph.schema import EdgeType, NodeLabel
37 from navegador.graph.store import GraphStore
38
39 logger = logging.getLogger(__name__)
40
41
42 # ── Ticket node label ─────────────────────────────────────────────────────────
43 # Tickets are stored under a synthetic "Ticket" label that maps to Rule in the
44 # schema — they represent requirements and commitments from the PM tool.
45 _TICKET_LABEL = NodeLabel.Rule
46
47
48 class TicketIngester:
49 """
50 Ingests project management tickets into the knowledge graph.
51
52 Each ticket becomes a Rule node with::
53
54 name — "#<number>: <title>" (GitHub) or the ticket ID
55 description — ticket body / description
56 domain created,
57 — repo name"code`` runs af"verity — "i )
58
59 c.name"
60 )bel
61 rationale — ticket URL for traceability
62
63 After ingestion, ``_link_to_code`` runs a lightweight name-match pass to
64 create ANNOTATES edges from each ticket to code symbols it mentions.
65 """
66
67 def __init__(self, store: GraphStore) -> None:
68 self.store = store
69
70 # ── GitHub Issues ─────────────────────────────────────────────────────────
71
72 def ingest_github_issues(
73 self,
74 repo: str,
75 token: str = "",
76 state: str = "open",
77 limit: int = 100,
78 ) -> dict[str, Any]:
79 """
80 Fetch GitHub issues for *repo* and ingest them into the graph.
81
82 Parameters
83 ----------
84 repo:
85 Repository in ``"owner/repo"`` format.
86 token:
87 GitHub personal access token (or ``GITHUB_TOKEN`` env var value).
88 If empty, unauthenticated requests are used (60 req/h rate limit).
89 state:
90 ``"open"``, ``"closed"``, or ``"all"``.
91 limit:
92 Maximum number of issues to fetch (GitHub paginates at 100/page).
93
94 Returns
95 -------
96 dict with keys: tickets, linked
97 """
98 import urllib.request
99
100 headers: dict[str, str] = {
101 "Accept": "application/vnd.github+json",
102 "X-GitHub-Api-Version": "2022-11-28",
103 "User-Agent": "navegador/0.4",
104 }
105 if token:
106 headers["Authorization"] = f"Bearer {token}"
107
108 per_page = min(limit, 100)
109 url = f"https://api.github.com/repos/{repo}/issues?state={state}&per_page={per_page}&page=1"
110
111 try:
112 req = urllib.request.Request(url, headers=headers)
113 with urllib.request.urlopen(req, timeout=15) as resp:
114 import json
115
116 issues: list[dict] = json.loads(resp.read().decode())
117 except Exception as exc:
118 raise RuntimeError(f"Failed to fetch GitHub issues for {repo!r}: {exc}") from exc
119
120 # Filter out pull requests (GitHub issues API returns both)
121 issues = [i for i in issues if "pull_request" not in i]
122
123 domain = repo.split("/")[-1] if "/" in repo else repo
124 tickets_created = 0
125
126 for issue in issues[:limit]:
127 number = issue.get("number", 0)
128 title = (issue.get("title") or "").strip()
129 body = (issue.get("body") or "").strip()
130 html_url = issue.get("html_url", "")
131 labels = [lbl.get("name", "") for lbl in issue.get("labels", [])]
132 severity = self._github_severity(labels)
133
134 node_name = f"#{number}: {title}"[:200]
135 self.store.create_node(
136 _TICKET_LABEL,
137 {
138 "name": node_name,
139 "description": body[:2000],
140 "domain": domain,
141 "severity": severity,
142 "rationale": html_url,
143 "examples": "",
144 },
145 )
146 tickets_created += 1
147
148 # Assignees → Person nodes + ASSIGNED_TO edges
149 for assignee in issue.get("assignees", []) or []:
150 login = (assignee.get("login") or "").strip()
151 if login:
152 self.store.create_node(
153 NodeLabel.Person,
154 {"name": login, "email": "", "role": "", "team": ""},
155 )
156 self.store.create_edge(
157 _TICKET_LABEL,
158 {"name": node_name},
159 EdgeType.ASSIGNED_TO,
160 NodeLabel.Person,
161 {"name": login},
162 )
163
164 linked = self._link_to_code(domain)
165 logger.info(
166 "TicketIngester.ingest_github_issues(%s): tickets=%d linked=%d",
167 repo,
168 tickets_created,
169 linked,
170 )
171 return {"tickets": tickets_created, "linked": linked}
172
173 # ── Linear (stub) ─────────────────────────────────────────────────────────
174
175 def ingest_linear(self, api_key: str, project: str = "") -> dict[str, Any]:
176 """
177 Ingest Linear issues into the knowledge graph.
178
179 .. note::
180 Not yet implemented. Linear GraphQL API support is planned
181 for a future release. Track progress at:
182 https://github.com/weareconflict/navegador/issues/53
183
184 Raises
185 ------
186 NotImplementedError
187 """
188 raise NotImplementedError(
189 "Linear ingestion is not yet implemented. "
190 "Planned for a future release — see GitHub issue #53. "
191 "To contributly implemented)
192 - Li"""
193 PM tool integration — inges"
194 s): tickets"RETURN t.na)TURN t.name, t.description"
195 code_cypher = (
196 "MATCH (c) WHERE c:Funct"
197 "
198 namefor row in
199 t_result.result_set or []) if row[0]
200 ]
201 code_nodes = [
202
203 name(str(row[0]
204 if row[0] and row[1]
205 ]
206
207 if not tickets or not code_nodes:
208 return 0
209
210 linked = 0
211 for t_name, t_desc in tickets:
212 combined = f"{t_name} {t_desc}"
213 tokens = {
214 w.lower()
215 for w in re.
216 if len(w) >= 4ANNOTATES edg EdgeType.ASSIGNED_T+ c_label EdgeType.ASSIGNED_T+ " {name: $cn}) "
217 "MERGE (t)-[r:ANNOTATES]->(c)"
218 )
219 try:
220 self.store.query(cypher, {"tn": t_name, "cn": c_name})
221 linked += 1
222 except Exception:
223 EdgeType.ASSIGNED_T logger.debug("Tic EdgeType.ASSIGNED_T)
224 return linked
225
226 @staticmethod
227 def _github_severity(labels: list[str]) -> str:
228 """Map GitHub label names to navegador severity levels."""
229 label_lower = {lbl.lower() for lbl in labels}
230 if label_lower & {"critical", "blocker", "urgent", "p0"}:
231 return "critical"
232 if label_lower & {"bug", "high", "p1", "important"}:
233 return "warning"
234 return "info"
--- a/navegador/submodules.py
+++ b/navegador/submodules.py
@@ -0,0 +1,216 @@
1
+"""
2
+VCS submodule traversal — ingest a parent repo and all its git submodules as
3
+linked Repository nodes.
4
+
5
+Issue: #61
6
+
7
+Usage::
8
+
9
+ from navegador.submodules import SubmoduleIngester
10
+
11
+ ing = SubmoduleIngester(store)
12
+ submodules = ing.detect_submodules("/path/to/repo")
13
+ stats = ing.ingest_with_submodules("/path/to/repo", clear=False)
14
+"""
15
+
16
+from __future__ import annotations
17
+
18
+import logging
19
+import re
20
+from pathlib import Path
21
+from typing import Any
22
+
23
+from navegador.graph.schema import EdgeType, NodeLabel
24
+from navegador.graph.store import GraphStore
25
+
26
+logger = logging.getLogger(__name__)
27
+
28
+
29
+class SubmoduleIngester:
30
+ """
31
+ Detects and ingests git submodules as linked Repository nodes.
32
+
33
+ After ingesting the parent repository (using RepoIngester) each submodule
34
+ is also ingested and a DEPENDS_ON edge is created from the parent
35
+ Repository node to each submodule Repository node.
36
+ """
37
+
38
+ def __init__(self, store: GraphStore) -> None:
39
+ self.store = store
40
+
41
+ # ── Submodule detection ───────────────────────────────────────────────────
42
+
43
+ def detect_submodules(self, repo_path: str | Path) -> list[dict[str, Any]]:
44
+ """
45
+ Parse ``.gitmodules`` and return a list of submodule descriptors.
46
+
47
+ Parameters
48
+ ----------
49
+ repo_path:
50
+ Root of the parent repository.
51
+
52
+ Returns
53
+ -------
54
+ list of dicts with keys:
55
+ ``name`` — submodule logical name
56
+ ``path`` — relative path within the parent repo
57
+ ``url`` — remote URL
58
+ ``abs_path`` — absolute filesystem path (``repo_path / path``)
59
+ """
60
+ repo_root = Path(repo_path).resolve()
61
+ gitmodules = repo_root / ".gitmodules"
62
+
63
+ if not gitmodules.exists():
64
+ return []
65
+
66
+ text = gitmodules.read_text(encoding="utf-8")
67
+ return _parse_gitmodules(text, repo_root)
68
+
69
+ # ── Ingestion ─────────────────────────────────────────────────────────────
70
+
71
+ def ingest_with_submodules(
72
+ self,
73
+ repo_path: str | Path,
74
+ clear: bool = False,
75
+ ) -> dict[str, Any]:
76
+ """
77
+ Ingest the parent repository and all discovered submodules.
78
+
79
+ For each submodule whose ``abs_path`` exists on disk the full code
80
+ ingestion pipeline (``RepoIngester``) is run. Repository nodes are
81
+ linked with DEPENDS_ON edges.
82
+
83
+ Parameters
84
+ ----------
85
+ repo_path:
86
+ Root of the parent (super-project) repository.
87
+ clear:
88
+ If ``True``, wipe the graph before ingesting the parent.
89
+
90
+ Returns
91
+ -------
92
+ dict with keys:
93
+ ``parent`` — ingestion stats for the parent repo
94
+ ``submodules`` — dict keyed by submodule name → stats or error
95
+ ``total_files`` — aggregate file count
96
+ """
97
+ from navegador.ingestion.parser import RepoIngester
98
+
99
+ repo_root = Path(repo_path).resolve()
100
+ parent_name = repo_root.name
101
+
102
+ ingester = RepoIngester(self.store)
103
+
104
+ # Ingest parent
105
+ logger.info("SubmoduleIngester: ingesting parent %s", repo_root)
106
+ parent_stats = ingester.ingest(str(repo_root), clear=clear)
107
+
108
+ # Ensure parent Repository node exists
109
+ self.store.create_node(
110
+ NodeLabel.Repository,
111
+ {
112
+ "name": parent_name,
113
+ "path": str(repo_root),
114
+ "language": "",
115
+ "description": "parent repository",
116
+ },
117
+ )
118
+
119
+ submodules = self.detect_submodules(repo_root)
120
+ sub_results: dict[str, Any] = {}
121
+ total_files = parent_stats.get("files", 0)
122
+
123
+ for sub in submodules:
124
+ sub_name = sub["name"]
125
+ sub_path = sub["abs_path"]
126
+
127
+ if not Path(sub_path).exists():
128
+ logger.warning(
129
+ "SubmoduleIngester: submodule %s not found at %s (not initialised?)",
130
+ sub_name,
131
+ sub_path,
132
+ )
133
+ sub_results[sub_name] = {"error": f"path not found: {sub_path}"}
134
+ continue
135
+
136
+ logger.info("SubmoduleIngester: ingesting submodule %s → %s", sub_name, sub_path)
137
+ try:
138
+ sub_stats = ingester.ingest(str(sub_path), clear=False)
139
+ sub_results[sub_name] = sub_stats
140
+ total_files += sub_stats.get("files", 0)
141
+ except Exception as exc: # noqa: BLE001
142
+ logger.error("SubmoduleIngester: failed to ingest %s: %s", sub_name, exc)
143
+ sub_results[sub_name] = {"error": str(exc)}
144
+ continue
145
+
146
+ # Create submodule Repository node
147
+ self.store.create_node(
148
+ NodeLabel.Repository,
149
+ {
150
+ "name": sub_name,
151
+ "path": str(sub_path),
152
+ "language": "",
153
+ "description": sub.get("url", ""),
154
+ },
155
+ )
156
+
157
+ # parent -DEPENDS_ON-> submodule
158
+ self.store.create_edge(
159
+ NodeLabel.Repository,
160
+ {"name": parent_name},
161
+ EdgeType.DEPENDS_ON,
162
+ NodeLabel.Repository,
163
+ {"name": sub_name},
164
+ )
165
+
166
+ return {
167
+ "parent": parent_stats,
168
+ "submodules": sub_results,
169
+ "total_files": total_files,
170
+ }
171
+
172
+
173
+# ── Parser ────────────────────────────────────────────────────────────────────
174
+
175
+
176
+def _parse_gitmodules(text: str, repo_root: Path) -> list[dict[str, Any]]:
177
+ """
178
+ Parse a ``.gitmodules`` file into a list of submodule dicts.
179
+
180
+ ``.gitmodules`` format::
181
+
182
+ [submodule "name"]
183
+ path = relative/path
184
+ url = https://...
185
+ """
186
+ submodules: list[dict[str, Any]] = []
187
+ current: dict[str, str] = {}
188
+
189
+ for line in text.splitlines():
190
+ line = line.strip()
191
+
192
+ header = re.match(r'^\[submodule\s+"([^"]+)"\]$', line)
193
+ if header:
194
+ if current.get("name"):
195
+ submodules.append(_finalise(current, repo_root))
196
+ current = {"name": header.group(1)}
197
+ continue
198
+
199
+ kv = re.match(r"^(\w+)\s*=\s*(.+)$", line)
200
+ if kv:
201
+ current[kv.group(1).strip()] = kv.group(2).strip()
202
+
203
+ if current.get("name"):
204
+ submodules.append(_finalise(current, repo_root))
205
+
206
+ return submodules
207
+
208
+
209
+def _finalise(raw: dict[str, str], repo_root: Path) -> dict[str, Any]:
210
+ rel_path = raw.get("path", raw.get("name", ""))
211
+ return {
212
+ "name": raw["name"],
213
+ "path": rel_path,
214
+ "url": raw.get("url", ""),
215
+ "abs_path": str(repo_root / rel_path),
216
+ }
--- a/navegador/submodules.py
+++ b/navegador/submodules.py
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/submodules.py
+++ b/navegador/submodules.py
@@ -0,0 +1,216 @@
1 """
2 VCS submodule traversal — ingest a parent repo and all its git submodules as
3 linked Repository nodes.
4
5 Issue: #61
6
7 Usage::
8
9 from navegador.submodules import SubmoduleIngester
10
11 ing = SubmoduleIngester(store)
12 submodules = ing.detect_submodules("/path/to/repo")
13 stats = ing.ingest_with_submodules("/path/to/repo", clear=False)
14 """
15
16 from __future__ import annotations
17
18 import logging
19 import re
20 from pathlib import Path
21 from typing import Any
22
23 from navegador.graph.schema import EdgeType, NodeLabel
24 from navegador.graph.store import GraphStore
25
26 logger = logging.getLogger(__name__)
27
28
29 class SubmoduleIngester:
30 """
31 Detects and ingests git submodules as linked Repository nodes.
32
33 After ingesting the parent repository (using RepoIngester) each submodule
34 is also ingested and a DEPENDS_ON edge is created from the parent
35 Repository node to each submodule Repository node.
36 """
37
38 def __init__(self, store: GraphStore) -> None:
39 self.store = store
40
41 # ── Submodule detection ───────────────────────────────────────────────────
42
43 def detect_submodules(self, repo_path: str | Path) -> list[dict[str, Any]]:
44 """
45 Parse ``.gitmodules`` and return a list of submodule descriptors.
46
47 Parameters
48 ----------
49 repo_path:
50 Root of the parent repository.
51
52 Returns
53 -------
54 list of dicts with keys:
55 ``name`` — submodule logical name
56 ``path`` — relative path within the parent repo
57 ``url`` — remote URL
58 ``abs_path`` — absolute filesystem path (``repo_path / path``)
59 """
60 repo_root = Path(repo_path).resolve()
61 gitmodules = repo_root / ".gitmodules"
62
63 if not gitmodules.exists():
64 return []
65
66 text = gitmodules.read_text(encoding="utf-8")
67 return _parse_gitmodules(text, repo_root)
68
69 # ── Ingestion ─────────────────────────────────────────────────────────────
70
71 def ingest_with_submodules(
72 self,
73 repo_path: str | Path,
74 clear: bool = False,
75 ) -> dict[str, Any]:
76 """
77 Ingest the parent repository and all discovered submodules.
78
79 For each submodule whose ``abs_path`` exists on disk the full code
80 ingestion pipeline (``RepoIngester``) is run. Repository nodes are
81 linked with DEPENDS_ON edges.
82
83 Parameters
84 ----------
85 repo_path:
86 Root of the parent (super-project) repository.
87 clear:
88 If ``True``, wipe the graph before ingesting the parent.
89
90 Returns
91 -------
92 dict with keys:
93 ``parent`` — ingestion stats for the parent repo
94 ``submodules`` — dict keyed by submodule name → stats or error
95 ``total_files`` — aggregate file count
96 """
97 from navegador.ingestion.parser import RepoIngester
98
99 repo_root = Path(repo_path).resolve()
100 parent_name = repo_root.name
101
102 ingester = RepoIngester(self.store)
103
104 # Ingest parent
105 logger.info("SubmoduleIngester: ingesting parent %s", repo_root)
106 parent_stats = ingester.ingest(str(repo_root), clear=clear)
107
108 # Ensure parent Repository node exists
109 self.store.create_node(
110 NodeLabel.Repository,
111 {
112 "name": parent_name,
113 "path": str(repo_root),
114 "language": "",
115 "description": "parent repository",
116 },
117 )
118
119 submodules = self.detect_submodules(repo_root)
120 sub_results: dict[str, Any] = {}
121 total_files = parent_stats.get("files", 0)
122
123 for sub in submodules:
124 sub_name = sub["name"]
125 sub_path = sub["abs_path"]
126
127 if not Path(sub_path).exists():
128 logger.warning(
129 "SubmoduleIngester: submodule %s not found at %s (not initialised?)",
130 sub_name,
131 sub_path,
132 )
133 sub_results[sub_name] = {"error": f"path not found: {sub_path}"}
134 continue
135
136 logger.info("SubmoduleIngester: ingesting submodule %s → %s", sub_name, sub_path)
137 try:
138 sub_stats = ingester.ingest(str(sub_path), clear=False)
139 sub_results[sub_name] = sub_stats
140 total_files += sub_stats.get("files", 0)
141 except Exception as exc: # noqa: BLE001
142 logger.error("SubmoduleIngester: failed to ingest %s: %s", sub_name, exc)
143 sub_results[sub_name] = {"error": str(exc)}
144 continue
145
146 # Create submodule Repository node
147 self.store.create_node(
148 NodeLabel.Repository,
149 {
150 "name": sub_name,
151 "path": str(sub_path),
152 "language": "",
153 "description": sub.get("url", ""),
154 },
155 )
156
157 # parent -DEPENDS_ON-> submodule
158 self.store.create_edge(
159 NodeLabel.Repository,
160 {"name": parent_name},
161 EdgeType.DEPENDS_ON,
162 NodeLabel.Repository,
163 {"name": sub_name},
164 )
165
166 return {
167 "parent": parent_stats,
168 "submodules": sub_results,
169 "total_files": total_files,
170 }
171
172
173 # ── Parser ────────────────────────────────────────────────────────────────────
174
175
176 def _parse_gitmodules(text: str, repo_root: Path) -> list[dict[str, Any]]:
177 """
178 Parse a ``.gitmodules`` file into a list of submodule dicts.
179
180 ``.gitmodules`` format::
181
182 [submodule "name"]
183 path = relative/path
184 url = https://...
185 """
186 submodules: list[dict[str, Any]] = []
187 current: dict[str, str] = {}
188
189 for line in text.splitlines():
190 line = line.strip()
191
192 header = re.match(r'^\[submodule\s+"([^"]+)"\]$', line)
193 if header:
194 if current.get("name"):
195 submodules.append(_finalise(current, repo_root))
196 current = {"name": header.group(1)}
197 continue
198
199 kv = re.match(r"^(\w+)\s*=\s*(.+)$", line)
200 if kv:
201 current[kv.group(1).strip()] = kv.group(2).strip()
202
203 if current.get("name"):
204 submodules.append(_finalise(current, repo_root))
205
206 return submodules
207
208
209 def _finalise(raw: dict[str, str], repo_root: Path) -> dict[str, Any]:
210 rel_path = raw.get("path", raw.get("name", ""))
211 return {
212 "name": raw["name"],
213 "path": rel_path,
214 "url": raw.get("url", ""),
215 "abs_path": str(repo_root / rel_path),
216 }
+133 -18
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -205,47 +205,162 @@
205205
# ── Fossil ─────────────────────────────────────────────────────────────────────
206206
207207
208208
class FossilAdapter(VCSAdapter):
209209
"""
210
- VCS adapter stub for Fossil repositories.
210
+ VCS adapter for Fossil repositories.
211211
212
- ``is_repo()`` is fully implemented; all other methods raise
213
- ``NotImplementedError`` until a full implementation is added.
212
+ ``is_repo()`` checks for ``.fslckout`` / ``_FOSSIL_`` sentinel files.
213
+ All other methods run ``fossil`` sub-commands via subprocess.
214214
"""
215215
216
- _NOT_IMPLEMENTED_MSG = (
217
- "FossilAdapter.{method} is not yet implemented. "
218
- "Contributions welcome — see CONTRIBUTING.md for the VCS adapter guide."
219
- )
216
+ def _run(self, args: list[str], check: bool = True) -> subprocess.CompletedProcess:
217
+ """Run a fossil sub-command inside *repo_path* and return the result."""
218
+ return subprocess.run(
219
+ ["fossil", *args],
220
+ cwd=self.repo_path,
221
+ capture_output=True,
222
+ text=True,
223
+ check=check,
224
+ )
220225
221226
def is_repo(self) -> bool:
222227
"""Return True when *repo_path* looks like a Fossil checkout."""
223228
return (
224229
(self.repo_path / ".fslckout").exists()
225230
or (self.repo_path / "_FOSSIL_").exists()
226231
)
227232
228233
def current_branch(self) -> str:
229
- raise NotImplementedError(
230
- self._NOT_IMPLEMENTED_MSG.format(method="current_branch")
231
- )
234
+ """
235
+ Return the name of the current Fossil branch.
236
+
237
+ Runs ``fossil branch current`` and returns its output stripped of
238
+ whitespace.
239
+ """
240
+ result = self._run(["branch", "current"])
241
+ return result.stdout.strip()
232242
233243
def changed_files(self, since: str = "") -> list[str]:
234
- raise NotImplementedError(
235
- self._NOT_IMPLEMENTED_MSG.format(method="changed_files")
236
- )
244
+ """
245
+ Return a list of file paths that have changed.
246
+
247
+ Runs ``fossil changes --differ`` which reports files that differ
248
+ from the current check-in. The *since* parameter is not used by
249
+ Fossil's change model and is accepted for interface compatibility.
250
+ """
251
+ result = self._run(["changes", "--differ"])
252
+ files: list[str] = []
253
+ for line in result.stdout.splitlines():
254
+ # fossil changes output: "<STATUS> <path>"
255
+ parts = line.split(None, 1)
256
+ if len(parts) == 2:
257
+ files.append(parts[1].strip())
258
+ elif parts:
259
+ files.append(parts[0].strip())
260
+ return [f for f in files if f]
237261
238262
def file_history(self, file_path: str, limit: int = 10) -> list[dict]:
239
- raise NotImplementedError(
240
- self._NOT_IMPLEMENTED_MSG.format(method="file_history")
241
- )
263
+ """
264
+ Return up to *limit* timeline entries for *file_path*.
265
+
266
+ Runs ``fossil timeline --limit <n> --type ci --path <file>`` and
267
+ parses the output into a list of dicts with keys:
268
+ ``hash``, ``author``, ``date``, ``message``.
269
+ """
270
+ result = self._run([
271
+ "timeline",
272
+ "--limit", str(limit),
273
+ "--type", "ci",
274
+ "--path", file_path,
275
+ ])
276
+ return _parse_fossil_timeline(result.stdout)
242277
243278
def blame(self, file_path: str) -> list[dict]:
244
- raise NotImplementedError(
245
- self._NOT_IMPLEMENTED_MSG.format(method="blame")
279
+ """
280
+ Return per-line blame data for *file_path*.
281
+
282
+ Runs ``fossil annotate --log <file>`` and returns a list of dicts with
283
+ keys: ``line``, ``hash``, ``author``, ``content``.
284
+ """
285
+ result = self._run(["annotate", "--log", file_path])
286
+ return _parse_fossil_annotate(result.stdout)
287
+
288
+
289
+def _parse_fossil_timeline(output: str) -> list[dict]:
290
+ """
291
+ Parse ``fossil timeline`` output into a list of entry dicts.
292
+
293
+ Fossil timeline lines look like::
294
+
295
+ === 2024-01-15 ===
296
+ 14:23:07 [abc123def456] Commit message here. (user: alice, tags: trunk)
297
+
298
+ We emit one dict per timeline entry.
299
+ """
300
+ entries: list[dict] = []
301
+ current_date = ""
302
+
303
+ for line in output.splitlines():
304
+ line = line.strip()
305
+ if not line:
306
+ continue
307
+
308
+ # Date header: "=== 2024-01-15 ==="
309
+ if line.startswith("===") and line.endswith("==="):
310
+ current_date = line.strip("= ").strip()
311
+ continue
312
+
313
+ # Entry line: "HH:MM:SS [hashprefix] message (user: ..., tags: ...)"
314
+ import re
315
+
316
+ m = re.match(
317
+ r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$",
318
+ line,
246319
)
320
+ if m:
321
+ time_part, hash_part, message, author = m.groups()
322
+ entries.append({
323
+ "hash": hash_part,
324
+ "author": author or "",
325
+ "date": f"{current_date} {time_part}".strip(),
326
+ "message": message.rstrip(),
327
+ })
328
+
329
+ return entries
330
+
331
+
332
+def _parse_fossil_annotate(output: str) -> list[dict]:
333
+ """
334
+ Parse ``fossil annotate --log`` output into a list of blame dicts.
335
+
336
+ Each line looks like::
337
+
338
+ 1.1 alice 2024-01-15: actual line content
339
+
340
+ We return one dict per source line with keys:
341
+ ``line``, ``hash``, ``author``, ``content``.
342
+ """
343
+ import re
344
+
345
+ entries: list[dict] = []
346
+ line_number = 0
347
+
348
+ for raw in output.splitlines():
349
+ # Pattern: "<version> <author> <date>: <content>"
350
+ m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw)
351
+ if m:
352
+ version, author, content = m.groups()
353
+ line_number += 1
354
+ entries.append({
355
+ "line": line_number,
356
+ "hash": version,
357
+ "author": author,
358
+ "content": content,
359
+ })
360
+
361
+ return entries
247362
248363
249364
# ── Factory ────────────────────────────────────────────────────────────────────
250365
251366
252367
253368
ADDED tests/test_v04_batch3.py
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -205,47 +205,162 @@
205 # ── Fossil ─────────────────────────────────────────────────────────────────────
206
207
208 class FossilAdapter(VCSAdapter):
209 """
210 VCS adapter stub for Fossil repositories.
211
212 ``is_repo()`` is fully implemented; all other methods raise
213 ``NotImplementedError`` until a full implementation is added.
214 """
215
216 _NOT_IMPLEMENTED_MSG = (
217 "FossilAdapter.{method} is not yet implemented. "
218 "Contributions welcome — see CONTRIBUTING.md for the VCS adapter guide."
219 )
 
 
 
 
 
220
221 def is_repo(self) -> bool:
222 """Return True when *repo_path* looks like a Fossil checkout."""
223 return (
224 (self.repo_path / ".fslckout").exists()
225 or (self.repo_path / "_FOSSIL_").exists()
226 )
227
228 def current_branch(self) -> str:
229 raise NotImplementedError(
230 self._NOT_IMPLEMENTED_MSG.format(method="current_branch")
231 )
 
 
 
 
 
232
233 def changed_files(self, since: str = "") -> list[str]:
234 raise NotImplementedError(
235 self._NOT_IMPLEMENTED_MSG.format(method="changed_files")
236 )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
237
238 def file_history(self, file_path: str, limit: int = 10) -> list[dict]:
239 raise NotImplementedError(
240 self._NOT_IMPLEMENTED_MSG.format(method="file_history")
241 )
 
 
 
 
 
 
 
 
 
 
 
242
243 def blame(self, file_path: str) -> list[dict]:
244 raise NotImplementedError(
245 self._NOT_IMPLEMENTED_MSG.format(method="blame")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246 )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
247
248
249 # ── Factory ────────────────────────────────────────────────────────────────────
250
251
252
253 DDED tests/test_v04_batch3.py
--- navegador/vcs.py
+++ navegador/vcs.py
@@ -205,47 +205,162 @@
205 # ── Fossil ─────────────────────────────────────────────────────────────────────
206
207
208 class FossilAdapter(VCSAdapter):
209 """
210 VCS adapter for Fossil repositories.
211
212 ``is_repo()`` checks for ``.fslckout`` / ``_FOSSIL_`` sentinel files.
213 All other methods run ``fossil`` sub-commands via subprocess.
214 """
215
216 def _run(self, args: list[str], check: bool = True) -> subprocess.CompletedProcess:
217 """Run a fossil sub-command inside *repo_path* and return the result."""
218 return subprocess.run(
219 ["fossil", *args],
220 cwd=self.repo_path,
221 capture_output=True,
222 text=True,
223 check=check,
224 )
225
226 def is_repo(self) -> bool:
227 """Return True when *repo_path* looks like a Fossil checkout."""
228 return (
229 (self.repo_path / ".fslckout").exists()
230 or (self.repo_path / "_FOSSIL_").exists()
231 )
232
233 def current_branch(self) -> str:
234 """
235 Return the name of the current Fossil branch.
236
237 Runs ``fossil branch current`` and returns its output stripped of
238 whitespace.
239 """
240 result = self._run(["branch", "current"])
241 return result.stdout.strip()
242
243 def changed_files(self, since: str = "") -> list[str]:
244 """
245 Return a list of file paths that have changed.
246
247 Runs ``fossil changes --differ`` which reports files that differ
248 from the current check-in. The *since* parameter is not used by
249 Fossil's change model and is accepted for interface compatibility.
250 """
251 result = self._run(["changes", "--differ"])
252 files: list[str] = []
253 for line in result.stdout.splitlines():
254 # fossil changes output: "<STATUS> <path>"
255 parts = line.split(None, 1)
256 if len(parts) == 2:
257 files.append(parts[1].strip())
258 elif parts:
259 files.append(parts[0].strip())
260 return [f for f in files if f]
261
262 def file_history(self, file_path: str, limit: int = 10) -> list[dict]:
263 """
264 Return up to *limit* timeline entries for *file_path*.
265
266 Runs ``fossil timeline --limit <n> --type ci --path <file>`` and
267 parses the output into a list of dicts with keys:
268 ``hash``, ``author``, ``date``, ``message``.
269 """
270 result = self._run([
271 "timeline",
272 "--limit", str(limit),
273 "--type", "ci",
274 "--path", file_path,
275 ])
276 return _parse_fossil_timeline(result.stdout)
277
278 def blame(self, file_path: str) -> list[dict]:
279 """
280 Return per-line blame data for *file_path*.
281
282 Runs ``fossil annotate --log <file>`` and returns a list of dicts with
283 keys: ``line``, ``hash``, ``author``, ``content``.
284 """
285 result = self._run(["annotate", "--log", file_path])
286 return _parse_fossil_annotate(result.stdout)
287
288
289 def _parse_fossil_timeline(output: str) -> list[dict]:
290 """
291 Parse ``fossil timeline`` output into a list of entry dicts.
292
293 Fossil timeline lines look like::
294
295 === 2024-01-15 ===
296 14:23:07 [abc123def456] Commit message here. (user: alice, tags: trunk)
297
298 We emit one dict per timeline entry.
299 """
300 entries: list[dict] = []
301 current_date = ""
302
303 for line in output.splitlines():
304 line = line.strip()
305 if not line:
306 continue
307
308 # Date header: "=== 2024-01-15 ==="
309 if line.startswith("===") and line.endswith("==="):
310 current_date = line.strip("= ").strip()
311 continue
312
313 # Entry line: "HH:MM:SS [hashprefix] message (user: ..., tags: ...)"
314 import re
315
316 m = re.match(
317 r"(\d{2}:\d{2}:\d{2})\s+\[([0-9a-f]+)\]\s+(.*?)(?:\s+\(user:\s*(\w+).*\))?$",
318 line,
319 )
320 if m:
321 time_part, hash_part, message, author = m.groups()
322 entries.append({
323 "hash": hash_part,
324 "author": author or "",
325 "date": f"{current_date} {time_part}".strip(),
326 "message": message.rstrip(),
327 })
328
329 return entries
330
331
332 def _parse_fossil_annotate(output: str) -> list[dict]:
333 """
334 Parse ``fossil annotate --log`` output into a list of blame dicts.
335
336 Each line looks like::
337
338 1.1 alice 2024-01-15: actual line content
339
340 We return one dict per source line with keys:
341 ``line``, ``hash``, ``author``, ``content``.
342 """
343 import re
344
345 entries: list[dict] = []
346 line_number = 0
347
348 for raw in output.splitlines():
349 # Pattern: "<version> <author> <date>: <content>"
350 m = re.match(r"(\S+)\s+(\S+)\s+\S+:\s+(.*)", raw)
351 if m:
352 version, author, content = m.groups()
353 line_number += 1
354 entries.append({
355 "line": line_number,
356 "hash": version,
357 "author": author,
358 "content": content,
359 })
360
361 return entries
362
363
364 # ── Factory ────────────────────────────────────────────────────────────────────
365
366
367
368 DDED tests/test_v04_batch3.py
--- a/tests/test_v04_batch3.py
+++ b/tests/test_v04_batch3.py
@@ -0,0 +1,1234 @@
1
+"""
2
+Tests for navegador v0.4 batch 3 — issues #7, #18, #53, #55, #58, #61, #62.
3
+
4
+Covers:
5
+ #7 / #18 — PlanopticonPipeline (pipeline, action items, decision timeline, auto-link)
6
+ #53 — TicketIngester (GitHub, Linear stub, Jira stub)
7
+ #55 — FossilAdapter (current_branch, changed_files, file_history, blame)
8
+ #58 — DependencyIngester (npm, pip/requirements.txt, pip/pyproject.toml, cargo)
9
+ #61 — SubmoduleIngester (detect_submodules, ingest_with_submodules)
10
+ #62 — WorkspaceMode enum, WorkspaceManager (unified + federated)
11
+"""
12
+
13
+from __future__ import annotations
14
+
15
+import json
16
+import subprocess
17
+import tempfile
18
+from pathlib import Path
19
+from unittest.mock import MagicMock, patch
20
+
21
+import pytest
22
+
23
+
24
+# ── Shared mock store factory ─────────────────────────────────────────────────
25
+
26
+
27
+def _make_store():
28
+ store = MagicMock()
29
+ store.query.return_value = MagicMock(result_set=[])
30
+ return store
31
+
32
+
33
+# =============================================================================
34
+# #7 / #18 — PlanopticonPipeline
35
+# =============================================================================
36
+
37
+
38
+class TestPlanopticonPipelineDetectInput:
39
+ """_detect_input correctly identifies file types from path."""
40
+
41
+ from navegador.planopticon_pipeline import PlanopticonPipeline as _Pipeline
42
+
43
+ def test_manifest_file(self, tmp_path):
44
+ from navegador.planopticon_pipeline import PlanopticonPipeline
45
+
46
+ f = tmp_path / "manifest.json"
47
+ f.write_text("{}")
48
+ itype, resolved = PlanopticonPipeline._detect_input(f)
49
+ assert itype == "manifest"
50
+ assert resolved == f
51
+
52
+ def test_interchange_file(self, tmp_path):
53
+ from navegador.planopticon_pipeline import PlanopticonPipeline
54
+
55
+ f = tmp_path / "interchange.json"
56
+ f.write_text("{}")
57
+ itype, _ = PlanopticonPipeline._detect_input(f)
58
+ assert itype == "interchange"
59
+
60
+ def test_batch_file(self, tmp_path):
61
+ from navegador.planopticon_pipeline import PlanopticonPipeline
62
+
63
+ f = tmp_path / "batch_manifest.json"
64
+ f.write_text("{}")
65
+ itype, _ = PlanopticonPipeline._detect_input(f)
66
+ assert itype == "batch"
67
+
68
+ def test_kg_file_default(self, tmp_path):
69
+ from navegador.planopticon_pipeline import PlanopticonPipeline
70
+
71
+ f = tmp_path / "knowledge_graph.json"
72
+ f.write_text("{}")
73
+ itype, _ = PlanopticonPipeline._detect_input(f)
74
+ assert itype == "kg"
75
+
76
+ def test_directory_with_manifest(self, tmp_path):
77
+ from navegador.planopticon_pipeline import PlanopticonPipeline
78
+
79
+ (tmp_path / "manifest.json").write_text("{}")
80
+ itype, resolved = PlanopticonPipeline._detect_input(tmp_path)
81
+ assert itype == "manifest"
82
+
83
+ def test_directory_without_known_files_raises(self, tmp_path):
84
+ from navegador.planopticon_pipeline import PlanopticonPipeline
85
+
86
+ with pytest.raises(FileNotFoundError):
87
+ PlanopticonPipeline._detect_input(tmp_path)
88
+
89
+
90
+class TestPlanopticonPipelineRun:
91
+ """PlanopticonPipeline.run delegates to PlanopticonIngester and auto-links."""
92
+
93
+ def test_run_returns_stats_with_linked_key(self, tmp_path):
94
+ from navegador.planopticon_pipeline import PlanopticonPipeline
95
+
96
+ kg_data = {"nodes": [], "relationships": [], "sources": []}
97
+ kg_file = tmp_path / "knowledge_graph.json"
98
+ kg_file.write_text(json.dumps(kg_data))
99
+
100
+ store = _make_store()
101
+ pipeline = PlanopticonPipeline(store, source_tag="test")
102
+ stats = pipeline.run(str(kg_file))
103
+
104
+ assert "nodes" in stats
105
+ assert "linked" in stats
106
+
107
+ def test_run_calls_ingester(self, tmp_path):
108
+ from navegador.planopticon_pipeline import PlanopticonPipeline
109
+
110
+ kg_data = {
111
+ "nodes": [{"id": "n1", "type": "concept", "name": "Auth"}],
112
+ "relationships": [],
113
+ "sources": [],
114
+ }
115
+ kg_file = tmp_path / "knowledge_graph.json"
116
+ kg_file.write_text(json.dumps(kg_data))
117
+
118
+ store = _make_store()
119
+ pipeline = PlanopticonPipeline(store)
120
+ stats = pipeline.run(str(kg_file), source_tag="Meeting")
121
+
122
+ assert isinstance(stats, dict)
123
+ # create_node should have been called at least once for the concept node
124
+ store.create_node.assert_called()
125
+
126
+
127
+class TestExtractActionItems:
128
+ """extract_action_items pulls action items from various KG data formats."""
129
+
130
+ def test_action_items_list(self):
131
+ from navegador.planopticon_pipeline import ActionItem, PlanopticonPipeline
132
+
133
+ kg_data = {
134
+ "action_items": [
135
+ {"action": "Write tests", "assignee": "Alice", "priority": "high"},
136
+ {"action": "Deploy service", "assignee": "", "priority": "info"},
137
+ ]
138
+ }
139
+ items = PlanopticonPipeline.extract_action_items(kg_data)
140
+ assert len(items) == 2
141
+ assert all(isinstance(i, ActionItem) for i in items)
142
+ assert items[0].action == "Write tests"
143
+ assert items[0].assignee == "Alice"
144
+ assert items[1].action == "Deploy service"
145
+
146
+ def test_blank_actions_skipped(self):
147
+ from navegador.planopticon_pipeline import PlanopticonPipeline
148
+
149
+ kg_data = {"action_items": [{"action": " ", "assignee": "Bob"}]}
150
+ items = PlanopticonPipeline.extract_action_items(kg_data)
151
+ assert items == []
152
+
153
+ def test_entities_with_task_type(self):
154
+ from navegador.planopticon_pipeline import PlanopticonPipeline
155
+
156
+ kg_data = {
157
+ "entities": [
158
+ {"planning_type": "task", "name": "Refactor auth module"},
159
+ {"planning_type": "decision", "name": "Use PostgreSQL"},
160
+ ]
161
+ }
162
+ items = PlanopticonPipeline.extract_action_items(kg_data)
163
+ assert len(items) == 1
164
+ assert items[0].action == "Refactor auth module"
165
+
166
+ def test_nodes_with_action_item_type(self):
167
+ from navegador.planopticon_pipeline import PlanopticonPipeline
168
+
169
+ kg_data = {
170
+ "nodes": [
171
+ {"type": "action_item", "name": "Update documentation"},
172
+ ]
173
+ }
174
+ items = PlanopticonPipeline.extract_action_items(kg_data)
175
+ assert len(items) == 1
176
+ assert items[0].action == "Update documentation"
177
+
178
+ def test_empty_data_returns_empty_list(self):
179
+ from navegador.planopticon_pipeline import PlanopticonPipeline
180
+
181
+ assert PlanopticonPipeline.extract_action_items({}) == []
182
+
183
+ def test_action_item_to_dict(self):
184
+ from navegador.planopticon_pipeline import ActionItem
185
+
186
+ item = ActionItem(action="Do thing", assignee="Carol", priority="critical")
187
+ d = item.to_dict()
188
+ assert d["action"] == "Do thing"
189
+ assert d["assignee"] == "Carol"
190
+ assert d["priority"] == "critical"
191
+
192
+
193
+class TestBuildDecisionTimeline:
194
+ """build_decision_timeline queries the store and returns chronological list."""
195
+
196
+ def test_returns_list_from_store(self):
197
+ from navegador.planopticon_pipeline import PlanopticonPipeline
198
+
199
+ store = _make_store()
200
+ store.query.return_value = MagicMock(
201
+ result_set=[
202
+ ["Use microservices", "Split monolith", "arch", "accepted", "Scalability", "2024-01-10"],
203
+ ["Use PostgreSQL", "Relational DB", "data", "accepted", "ACID", "2024-02-01"],
204
+ ]
205
+ )
206
+ timeline = PlanopticonPipeline.build_decision_timeline(store)
207
+ assert len(timeline) == 2
208
+ assert timeline[0]["name"] == "Use microservices"
209
+ assert timeline[0]["date"] == "2024-01-10"
210
+
211
+ def test_returns_empty_on_query_failure(self):
212
+ from navegador.planopticon_pipeline import PlanopticonPipeline
213
+
214
+ store = _make_store()
215
+ store.query.side_effect = Exception("DB error")
216
+ timeline = PlanopticonPipeline.build_decision_timeline(store)
217
+ assert timeline == []
218
+
219
+ def test_entry_has_required_keys(self):
220
+ from navegador.planopticon_pipeline import PlanopticonPipeline
221
+
222
+ store = _make_store()
223
+ store.query.return_value = MagicMock(
224
+ result_set=[["D1", "Desc", "domain", "accepted", "rationale", "2024-01-01"]]
225
+ )
226
+ timeline = PlanopticonPipeline.build_decision_timeline(store)
227
+ required_keys = {"name", "description", "domain", "status", "rationale", "date"}
228
+ assert required_keys.issubset(timeline[0].keys())
229
+
230
+
231
+class TestAutoLinkToCode:
232
+ """auto_link_to_code matches knowledge nodes to code by name similarity."""
233
+
234
+ def test_returns_zero_when_no_nodes(self):
235
+ from navegador.planopticon_pipeline import PlanopticonPipeline
236
+
237
+ store = _make_store()
238
+ store.query.return_value = MagicMock(result_set=[])
239
+ assert PlanopticonPipeline.auto_link_to_code(store) == 0
240
+
241
+ def test_links_matching_nodes(self):
242
+ from navegador.planopticon_pipeline import PlanopticonPipeline
243
+
244
+ store = _make_store()
245
+
246
+ # First call: knowledge nodes; second: code nodes; subsequent: merge queries
247
+ call_count = 0
248
+ def _query(cypher, params=None):
249
+ nonlocal call_count
250
+ call_count += 1
251
+ if call_count == 1:
252
+ # knowledge nodes — use "authenticate" (12 chars) which IS in "authenticate_user"
253
+ return MagicMock(result_set=[["Concept", "authenticate handler"]])
254
+ elif call_count == 2:
255
+ # code nodes
256
+ return MagicMock(result_set=[["Function", "authenticate_user"]])
257
+ else:
258
+ # MERGE query — no result needed
259
+ return MagicMock(result_set=[])
260
+
261
+ store.query.side_effect = _query
262
+ linked = PlanopticonPipeline.auto_link_to_code(store)
263
+ # "authenticate" (12 chars, ≥4) is contained in "authenticate_user"
264
+ assert linked >= 1
265
+
266
+ def test_short_tokens_skipped(self):
267
+ from navegador.planopticon_pipeline import PlanopticonPipeline
268
+
269
+ store = _make_store()
270
+
271
+ call_count = 0
272
+ def _query(cypher, params=None):
273
+ nonlocal call_count
274
+ call_count += 1
275
+ if call_count == 1:
276
+ return MagicMock(result_set=[["Concept", "API"]]) # all tokens < 4 chars
277
+ elif call_count == 2:
278
+ return MagicMock(result_set=[["Function", "api_handler"]])
279
+ return MagicMock(result_set=[])
280
+
281
+ store.query.side_effect = _query
282
+ linked = PlanopticonPipeline.auto_link_to_code(store)
283
+ # "api" is only 3 chars — should not match
284
+ assert linked == 0
285
+
286
+ def test_returns_zero_on_query_failure(self):
287
+ from navegador.planopticon_pipeline import PlanopticonPipeline
288
+
289
+ store = _make_store()
290
+ store.query.side_effect = Exception("boom")
291
+ result = PlanopticonPipeline.auto_link_to_code(store)
292
+ assert result == 0
293
+
294
+
295
+# =============================================================================
296
+# #53 — TicketIngester
297
+# =============================================================================
298
+
299
+
300
+class TestTicketIngesterGitHub:
301
+ """TicketIngester.ingest_github_issues fetches and ingests GitHub issues."""
302
+
303
+ def _make_issue(self, number=1, title="Fix bug", body="Details", labels=None, assignees=None):
304
+ return {
305
+ "number": number,
306
+ "title": title,
307
+ "body": body,
308
+ "html_url": f"https://github.com/owner/repo/issues/{number}",
309
+ "labels": [{"name": l} for l in (labels or [])],
310
+ "assignees": [{"login": a} for a in (assignees or [])],
311
+ }
312
+
313
+ def test_ingest_creates_ticket_nodes(self):
314
+ from navegador.pm import TicketIngester
315
+
316
+ store = _make_store()
317
+ store.query.return_value = MagicMock(result_set=[])
318
+ ing = TicketIngester(store)
319
+
320
+ issues = [self._make_issue(1, "Bug report"), self._make_issue(2, "Feature request")]
321
+ with patch("urllib.request.urlopen") as mock_open:
322
+ cm = MagicMock()
323
+ cm.__enter__ = MagicMock(return_value=cm)
324
+ cm.__exit__ = MagicMock(return_value=False)
325
+ cm.read.return_value = json.dumps(issues).encode()
326
+ mock_open.return_value = cm
327
+
328
+ stats = ing.ingest_github_issues("owner/repo", token="test_token")
329
+
330
+ assert stats["tickets"] == 2
331
+ assert "linked" in stats
332
+
333
+ def test_pull_requests_filtered_out(self):
334
+ from navegador.pm import TicketIngester
335
+
336
+ store = _make_store()
337
+ store.query.return_value = MagicMock(result_set=[])
338
+ ing = TicketIngester(store)
339
+
340
+ # Mix of issue and PR
341
+ issue = self._make_issue(1, "Real issue")
342
+ pr = {**self._make_issue(2, "A PR"), "pull_request": {"url": "..."}}
343
+
344
+ with patch("urllib.request.urlopen") as mock_open:
345
+ cm = MagicMock()
346
+ cm.__enter__ = MagicMock(return_value=cm)
347
+ cm.__exit__ = MagicMock(return_value=False)
348
+ cm.read.return_value = json.dumps([issue, pr]).encode()
349
+ mock_open.return_value = cm
350
+
351
+ stats = ing.ingest_github_issues("owner/repo")
352
+
353
+ assert stats["tickets"] == 1 # PR filtered out
354
+
355
+ def test_assignees_become_person_nodes(self):
356
+ from navegador.pm import TicketIngester
357
+
358
+ store = _make_store()
359
+ store.query.return_value = MagicMock(result_set=[])
360
+ ing = TicketIngester(store)
361
+
362
+ issue = self._make_issue(1, "Assign me", assignees=["alice"])
363
+
364
+ with patch("urllib.request.urlopen") as mock_open:
365
+ cm = MagicMock()
366
+ cm.__enter__ = MagicMock(return_value=cm)
367
+ cm.__exit__ = MagicMock(return_value=False)
368
+ cm.read.return_value = json.dumps([issue]).encode()
369
+ mock_open.return_value = cm
370
+
371
+ ing.ingest_github_issues("owner/repo")
372
+
373
+ # Person node created for alice
374
+ person_calls = [
375
+ c for c in store.create_node.call_args_list
376
+ if c.args and hasattr(c.args[0], "value") and c.args[0].value == "Person"
377
+ ]
378
+ assert len(person_calls) >= 1
379
+
380
+ def test_network_error_raises_runtime_error(self):
381
+ from navegador.pm import TicketIngester
382
+
383
+ store = _make_store()
384
+ ing = TicketIngester(store)
385
+
386
+ with patch("urllib.request.urlopen", side_effect=Exception("network error")):
387
+ with pytest.raises(RuntimeError, match="Failed to fetch GitHub issues"):
388
+ ing.ingest_github_issues("owner/repo")
389
+
390
+
391
+class TestTicketIngesterSeverity:
392
+ """_github_severity maps label names to severity levels."""
393
+
394
+ def test_critical_label(self):
395
+ from navegador.pm import TicketIngester
396
+
397
+ assert TicketIngester._github_severity(["critical"]) == "critical"
398
+ assert TicketIngester._github_severity(["blocker"]) == "critical"
399
+
400
+ def test_warning_label(self):
401
+ from navegador.pm import TicketIngester
402
+
403
+ assert TicketIngester._github_severity(["bug"]) == "warning"
404
+ assert TicketIngester._github_severity(["high"]) == "warning"
405
+
406
+ def test_default_info(self):
407
+ from navegador.pm import TicketIngester
408
+
409
+ assert TicketIngester._github_severity([]) == "info"
410
+ assert TicketIngester._github_severity(["enhancement"]) == "info"
411
+
412
+
413
+class TestTicketIngesterStubs:
414
+ """Linear and Jira raise NotImplementedError with helpful messages."""
415
+
416
+ def test_linear_raises_not_implemented(self):
417
+ from navegador.pm import TicketIngester
418
+
419
+ ing = TicketIngester(_make_store())
420
+ with pytest.raises(NotImplementedError, match="Linear"):
421
+ ing.ingest_linear("lin_apikey")
422
+
423
+ def test_jira_raises_not_implemented(self):
424
+ from navegador.pm import TicketIngester
425
+
426
+ ing = TicketIngester(_make_store())
427
+ with pytest.raises(NotImplementedError, match="Jira"):
428
+ ing.ingest_jira("https://company.atlassian.net", token="tok")
429
+
430
+ def test_linear_message_contains_guidance(self):
431
+ from navegador.pm import TicketIngester
432
+
433
+ ing = TicketIngester(_make_store())
434
+ with pytest.raises(NotImplementedError) as exc_info:
435
+ ing.ingest_linear("lin_key", project="MyProject")
436
+ assert "53" in str(exc_info.value) or "Linear" in str(exc_info.value)
437
+
438
+ def test_jira_message_contains_guidance(self):
439
+ from navegador.pm import TicketIngester
440
+
441
+ ing = TicketIngester(_make_store())
442
+ with pytest.raises(NotImplementedError) as exc_info:
443
+ ing.ingest_jira("https://x.atlassian.net")
444
+ assert "Jira" in str(exc_info.value) or "jira" in str(exc_info.value).lower()
445
+
446
+
447
+# =============================================================================
448
+# #55 — FossilAdapter
449
+# =============================================================================
450
+
451
+
452
+@pytest.fixture()
453
+def fossil_dir(tmp_path):
454
+ d = tmp_path / "fossil_repo"
455
+ d.mkdir()
456
+ (d / ".fslckout").touch()
457
+ return d
458
+
459
+
460
+class TestFossilAdapterCurrentBranch:
461
+ """current_branch calls 'fossil branch current' and returns stripped output."""
462
+
463
+ def test_returns_branch_name(self, fossil_dir):
464
+ from navegador.vcs import FossilAdapter
465
+
466
+ adapter = FossilAdapter(fossil_dir)
467
+ mock_result = MagicMock()
468
+ mock_result.stdout = "trunk\n"
469
+
470
+ with patch("subprocess.run", return_value=mock_result):
471
+ branch = adapter.current_branch()
472
+
473
+ assert branch == "trunk"
474
+
475
+ def test_strips_whitespace(self, fossil_dir):
476
+ from navegador.vcs import FossilAdapter
477
+
478
+ adapter = FossilAdapter(fossil_dir)
479
+ mock_result = MagicMock()
480
+ mock_result.stdout = " feature-branch \n"
481
+
482
+ with patch("subprocess.run", return_value=mock_result):
483
+ branch = adapter.current_branch()
484
+
485
+ assert branch == "feature-branch"
486
+
487
+ def test_calls_fossil_branch_current(self, fossil_dir):
488
+ from navegador.vcs import FossilAdapter
489
+
490
+ adapter = FossilAdapter(fossil_dir)
491
+ mock_result = MagicMock()
492
+ mock_result.stdout = "main\n"
493
+
494
+ with patch("subprocess.run", return_value=mock_result) as mock_run:
495
+ adapter.current_branch()
496
+
497
+ call_args = mock_run.call_args
498
+ assert call_args[0][0] == ["fossil", "branch", "current"]
499
+
500
+
501
+class TestFossilAdapterChangedFiles:
502
+ """changed_files calls 'fossil changes --differ' and parses output."""
503
+
504
+ def test_returns_changed_file_list(self, fossil_dir):
505
+ from navegador.vcs import FossilAdapter
506
+
507
+ adapter = FossilAdapter(fossil_dir)
508
+ mock_result = MagicMock()
509
+ mock_result.stdout = "EDITED src/main.py\nADDED tests/test_new.py\n"
510
+
511
+ with patch("subprocess.run", return_value=mock_result):
512
+ files = adapter.changed_files()
513
+
514
+ assert "src/main.py" in files
515
+ assert "tests/test_new.py" in files
516
+
517
+ def test_empty_output_returns_empty_list(self, fossil_dir):
518
+ from navegador.vcs import FossilAdapter
519
+
520
+ adapter = FossilAdapter(fossil_dir)
521
+ mock_result = MagicMock()
522
+ mock_result.stdout = ""
523
+
524
+ with patch("subprocess.run", return_value=mock_result):
525
+ files = adapter.changed_files()
526
+
527
+ assert files == []
528
+
529
+ def test_calls_fossil_changes_differ(self, fossil_dir):
530
+ from navegador.vcs import FossilAdapter
531
+
532
+ adapter = FossilAdapter(fossil_dir)
533
+ mock_result = MagicMock()
534
+ mock_result.stdout = ""
535
+
536
+ with patch("subprocess.run", return_value=mock_result) as mock_run:
537
+ adapter.changed_files()
538
+
539
+ call_args = mock_run.call_args
540
+ assert call_args[0][0] == ["fossil", "changes", "--differ"]
541
+
542
+ def test_returns_list(self, fossil_dir):
543
+ from navegador.vcs import FossilAdapter
544
+
545
+ adapter = FossilAdapter(fossil_dir)
546
+ mock_result = MagicMock()
547
+ mock_result.stdout = "EDITED foo.py\n"
548
+
549
+ with patch("subprocess.run", return_value=mock_result):
550
+ result = adapter.changed_files()
551
+
552
+ assert isinstance(result, list)
553
+
554
+
555
+class TestFossilAdapterFileHistory:
556
+ """file_history calls 'fossil timeline' and parses output into entry dicts."""
557
+
558
+ SAMPLE_TIMELINE = """\
559
+=== 2024-01-15 ===
560
+14:23:07 [abc123def456] Add feature. (user: alice, tags: trunk)
561
+09:00:00 [deadbeef1234] Fix typo. (user: bob, tags: trunk)
562
+=== 2024-01-14 ===
563
+22:10:00 [cafe0000abcd] Initial commit. (user: alice, tags: initial)
564
+"""
565
+
566
+ def test_returns_list_of_dicts(self, fossil_dir):
567
+ from navegador.vcs import FossilAdapter
568
+
569
+ adapter = FossilAdapter(fossil_dir)
570
+ mock_result = MagicMock()
571
+ mock_result.stdout = self.SAMPLE_TIMELINE
572
+
573
+ with patch("subprocess.run", return_value=mock_result):
574
+ history = adapter.file_history("src/main.py")
575
+
576
+ assert isinstance(history, list)
577
+ assert len(history) >= 1
578
+
579
+ def test_entry_has_required_keys(self, fossil_dir):
580
+ from navegador.vcs import FossilAdapter
581
+
582
+ adapter = FossilAdapter(fossil_dir)
583
+ mock_result = MagicMock()
584
+ mock_result.stdout = self.SAMPLE_TIMELINE
585
+
586
+ with patch("subprocess.run", return_value=mock_result):
587
+ history = adapter.file_history("src/main.py")
588
+
589
+ for entry in history:
590
+ assert "hash" in entry
591
+ assert "author" in entry
592
+ assert "date" in entry
593
+ assert "message" in entry
594
+
595
+ def test_limit_passed_to_fossil(self, fossil_dir):
596
+ from navegador.vcs import FossilAdapter
597
+
598
+ adapter = FossilAdapter(fossil_dir)
599
+ mock_result = MagicMock()
600
+ mock_result.stdout = ""
601
+
602
+ with patch("subprocess.run", return_value=mock_result) as mock_run:
603
+ adapter.file_history("src/main.py", limit=5)
604
+
605
+ args = mock_run.call_args[0][0]
606
+ assert "5" in args
607
+
608
+ def test_empty_output_returns_empty_list(self, fossil_dir):
609
+ from navegador.vcs import FossilAdapter
610
+
611
+ adapter = FossilAdapter(fossil_dir)
612
+ mock_result = MagicMock()
613
+ mock_result.stdout = ""
614
+
615
+ with patch("subprocess.run", return_value=mock_result):
616
+ history = adapter.file_history("nonexistent.py")
617
+
618
+ assert history == []
619
+
620
+
621
+class TestFossilAdapterBlame:
622
+ """blame calls 'fossil annotate --log' and parses per-line output."""
623
+
624
+ SAMPLE_ANNOTATE = """\
625
+1.1 alice 2024-01-15: def main():
626
+1.1 alice 2024-01-15: pass
627
+1.2 bob 2024-01-20: # added comment
628
+"""
629
+
630
+ def test_returns_list(self, fossil_dir):
631
+ from navegador.vcs import FossilAdapter
632
+
633
+ adapter = FossilAdapter(fossil_dir)
634
+ mock_result = MagicMock()
635
+ mock_result.stdout = self.SAMPLE_ANNOTATE
636
+
637
+ with patch("subprocess.run", return_value=mock_result):
638
+ result = adapter.blame("src/main.py")
639
+
640
+ assert isinstance(result, list)
641
+ assert len(result) >= 1
642
+
643
+ def test_entry_has_required_keys(self, fossil_dir):
644
+ from navegador.vcs import FossilAdapter
645
+
646
+ adapter = FossilAdapter(fossil_dir)
647
+ mock_result = MagicMock()
648
+ mock_result.stdout = self.SAMPLE_ANNOTATE
649
+
650
+ with patch("subprocess.run", return_value=mock_result):
651
+ result = adapter.blame("src/main.py")
652
+
653
+ for entry in result:
654
+ assert "line" in entry
655
+ assert "hash" in entry
656
+ assert "author" in entry
657
+ assert "content" in entry
658
+
659
+ def test_line_numbers_sequential(self, fossil_dir):
660
+ from navegador.vcs import FossilAdapter
661
+
662
+ adapter = FossilAdapter(fossil_dir)
663
+ mock_result = MagicMock()
664
+ mock_result.stdout = self.SAMPLE_ANNOTATE
665
+
666
+ with patch("subprocess.run", return_value=mock_result):
667
+ result = adapter.blame("src/main.py")
668
+
669
+ if len(result) >= 2:
670
+ assert result[1]["line"] > result[0]["line"]
671
+
672
+ def test_calls_fossil_annotate(self, fossil_dir):
673
+ from navegador.vcs import FossilAdapter
674
+
675
+ adapter = FossilAdapter(fossil_dir)
676
+ mock_result = MagicMock()
677
+ mock_result.stdout = ""
678
+
679
+ with patch("subprocess.run", return_value=mock_result) as mock_run:
680
+ adapter.blame("src/main.py")
681
+
682
+ args = mock_run.call_args[0][0]
683
+ assert "fossil" in args
684
+ assert "annotate" in args
685
+
686
+
687
+# =============================================================================
688
+# #58 — DependencyIngester
689
+# =============================================================================
690
+
691
+
692
+class TestDependencyIngesterNPM:
693
+ """ingest_npm parses package.json and creates dependency nodes."""
694
+
695
+ def test_ingests_dependencies(self, tmp_path):
696
+ from navegador.dependencies import DependencyIngester
697
+
698
+ pkg = {
699
+ "name": "myapp",
700
+ "dependencies": {"react": "^18.0.0", "lodash": "4.17.21"},
701
+ "devDependencies": {"jest": "^29.0.0"},
702
+ }
703
+ pkg_file = tmp_path / "package.json"
704
+ pkg_file.write_text(json.dumps(pkg))
705
+
706
+ store = _make_store()
707
+ ing = DependencyIngester(store)
708
+ stats = ing.ingest_npm(str(pkg_file))
709
+
710
+ assert stats["packages"] == 3 # 2 deps + 1 devDep
711
+ assert store.create_node.call_count >= 3
712
+
713
+ def test_empty_dependencies(self, tmp_path):
714
+ from navegador.dependencies import DependencyIngester
715
+
716
+ pkg = {"name": "empty", "dependencies": {}}
717
+ pkg_file = tmp_path / "package.json"
718
+ pkg_file.write_text(json.dumps(pkg))
719
+
720
+ store = _make_store()
721
+ stats = DependencyIngester(store).ingest_npm(str(pkg_file))
722
+ assert stats["packages"] == 0
723
+
724
+ def test_peer_dependencies_included(self, tmp_path):
725
+ from navegador.dependencies import DependencyIngester
726
+
727
+ pkg = {
728
+ "peerDependencies": {"react": ">=17"},
729
+ }
730
+ pkg_file = tmp_path / "package.json"
731
+ pkg_file.write_text(json.dumps(pkg))
732
+
733
+ store = _make_store()
734
+ stats = DependencyIngester(store).ingest_npm(str(pkg_file))
735
+ assert stats["packages"] == 1
736
+
737
+ def test_creates_depends_on_edge(self, tmp_path):
738
+ from navegador.dependencies import DependencyIngester
739
+
740
+ pkg = {"dependencies": {"axios": "^1.0.0"}}
741
+ pkg_file = tmp_path / "package.json"
742
+ pkg_file.write_text(json.dumps(pkg))
743
+
744
+ store = _make_store()
745
+ DependencyIngester(store).ingest_npm(str(pkg_file))
746
+ store.create_edge.assert_called()
747
+
748
+
749
+class TestDependencyIngesterPip:
750
+ """ingest_pip parses requirements.txt and creates dependency nodes."""
751
+
752
+ def test_requirements_txt(self, tmp_path):
753
+ from navegador.dependencies import DependencyIngester
754
+
755
+ req_file = tmp_path / "requirements.txt"
756
+ req_file.write_text(
757
+ "requests>=2.28.0\n"
758
+ "flask[async]==2.3.0\n"
759
+ "# a comment\n"
760
+ "\n"
761
+ "pytest>=7.0 # dev\n"
762
+ )
763
+
764
+ store = _make_store()
765
+ stats = DependencyIngester(store).ingest_pip(str(req_file))
766
+ assert stats["packages"] == 3
767
+
768
+ def test_skips_comments_and_blanks(self, tmp_path):
769
+ from navegador.dependencies import DependencyIngester
770
+
771
+ req_file = tmp_path / "requirements.txt"
772
+ req_file.write_text("# comment\n\n-r other.txt\n")
773
+
774
+ store = _make_store()
775
+ stats = DependencyIngester(store).ingest_pip(str(req_file))
776
+ assert stats["packages"] == 0
777
+
778
+ def test_pyproject_toml(self, tmp_path):
779
+ from navegador.dependencies import DependencyIngester
780
+
781
+ toml_content = """\
782
+[project]
783
+name = "myproject"
784
+dependencies = [
785
+ "click>=8.0",
786
+ "rich>=12.0",
787
+ "pydantic>=2.0",
788
+]
789
+"""
790
+ pyproject = tmp_path / "pyproject.toml"
791
+ pyproject.write_text(toml_content)
792
+
793
+ store = _make_store()
794
+ stats = DependencyIngester(store).ingest_pip(str(pyproject))
795
+ assert stats["packages"] >= 3
796
+
797
+
798
+class TestDependencyIngesterCargo:
799
+ """ingest_cargo parses Cargo.toml and creates dependency nodes."""
800
+
801
+ def test_basic_cargo_toml(self, tmp_path):
802
+ from navegador.dependencies import DependencyIngester
803
+
804
+ cargo_content = """\
805
+[package]
806
+name = "myapp"
807
+
808
+[dependencies]
809
+serde = "1.0"
810
+tokio = { version = "1.0", features = ["full"] }
811
+
812
+[dev-dependencies]
813
+criterion = "0.4"
814
+"""
815
+ cargo_file = tmp_path / "Cargo.toml"
816
+ cargo_file.write_text(cargo_content)
817
+
818
+ store = _make_store()
819
+ stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
820
+ assert stats["packages"] == 3 # serde, tokio, criterion
821
+
822
+ def test_empty_cargo_toml(self, tmp_path):
823
+ from navegador.dependencies import DependencyIngester
824
+
825
+ cargo_file = tmp_path / "Cargo.toml"
826
+ cargo_file.write_text("[package]\nname = \"empty\"\n")
827
+
828
+ store = _make_store()
829
+ stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
830
+ assert stats["packages"] == 0
831
+
832
+ def test_build_dependencies_included(self, tmp_path):
833
+ from navegador.dependencies import DependencyIngester
834
+
835
+ cargo_content = "[build-dependencies]\nbuild-helper = \"0.3\"\n"
836
+ cargo_file = tmp_path / "Cargo.toml"
837
+ cargo_file.write_text(cargo_content)
838
+
839
+ store = _make_store()
840
+ stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
841
+ assert stats["packages"] == 1
842
+
843
+
844
+# =============================================================================
845
+# #61 — SubmoduleIngester
846
+# =============================================================================
847
+
848
+
849
+class TestDetectSubmodules:
850
+ """detect_submodules parses .gitmodules into structured dicts."""
851
+
852
+ def test_no_gitmodules_returns_empty(self, tmp_path):
853
+ from navegador.submodules import SubmoduleIngester
854
+
855
+ result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
856
+ assert result == []
857
+
858
+ def test_single_submodule(self, tmp_path):
859
+ from navegador.submodules import SubmoduleIngester
860
+
861
+ gitmodules = tmp_path / ".gitmodules"
862
+ gitmodules.write_text(
863
+ '[submodule "vendor/lib"]\n'
864
+ " path = vendor/lib\n"
865
+ " url = https://github.com/org/lib.git\n"
866
+ )
867
+
868
+ result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
869
+ assert len(result) == 1
870
+ assert result[0]["name"] == "vendor/lib"
871
+ assert result[0]["path"] == "vendor/lib"
872
+ assert result[0]["url"] == "https://github.com/org/lib.git"
873
+ assert result[0]["abs_path"] == str(tmp_path / "vendor/lib")
874
+
875
+ def test_multiple_submodules(self, tmp_path):
876
+ from navegador.submodules import SubmoduleIngester
877
+
878
+ gitmodules = tmp_path / ".gitmodules"
879
+ gitmodules.write_text(
880
+ '[submodule "a"]\n path = sub/a\n url = https://example.com/a.git\n'
881
+ '[submodule "b"]\n path = sub/b\n url = https://example.com/b.git\n'
882
+ )
883
+
884
+ result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
885
+ assert len(result) == 2
886
+ names = {r["name"] for r in result}
887
+ assert names == {"a", "b"}
888
+
889
+ def test_missing_url_returns_empty_string(self, tmp_path):
890
+ from navegador.submodules import SubmoduleIngester
891
+
892
+ gitmodules = tmp_path / ".gitmodules"
893
+ gitmodules.write_text('[submodule "x"]\n path = sub/x\n')
894
+
895
+ result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
896
+ assert result[0]["url"] == ""
897
+
898
+
899
+class TestIngestWithSubmodules:
900
+ """ingest_with_submodules ingests parent + submodules, creates DEPENDS_ON edges."""
901
+
902
+ def test_no_gitmodules_ingests_parent_only(self, tmp_path):
903
+ from navegador.submodules import SubmoduleIngester
904
+
905
+ store = _make_store()
906
+ ing = SubmoduleIngester(store)
907
+
908
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
909
+ mock_inst = MagicMock()
910
+ mock_inst.ingest.return_value = {"files": 5, "nodes": 10}
911
+ MockIngester.return_value = mock_inst
912
+
913
+ stats = ing.ingest_with_submodules(str(tmp_path))
914
+
915
+ assert stats["parent"]["files"] == 5
916
+ assert stats["submodules"] == {}
917
+ assert stats["total_files"] == 5
918
+
919
+ def test_missing_submodule_path_recorded_as_error(self, tmp_path):
920
+ from navegador.submodules import SubmoduleIngester
921
+
922
+ gitmodules = tmp_path / ".gitmodules"
923
+ gitmodules.write_text(
924
+ '[submodule "missing"]\n path = does/not/exist\n url = https://x.com/r.git\n'
925
+ )
926
+
927
+ store = _make_store()
928
+ ing = SubmoduleIngester(store)
929
+
930
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
931
+ mock_inst = MagicMock()
932
+ mock_inst.ingest.return_value = {"files": 3, "nodes": 6}
933
+ MockIngester.return_value = mock_inst
934
+
935
+ stats = ing.ingest_with_submodules(str(tmp_path))
936
+
937
+ assert "missing" in stats["submodules"]
938
+ assert "error" in stats["submodules"]["missing"]
939
+
940
+ def test_existing_submodule_ingested(self, tmp_path):
941
+ from navegador.submodules import SubmoduleIngester
942
+
943
+ sub_dir = tmp_path / "libs" / "core"
944
+ sub_dir.mkdir(parents=True)
945
+
946
+ gitmodules = tmp_path / ".gitmodules"
947
+ gitmodules.write_text(
948
+ '[submodule "core"]\n path = libs/core\n url = https://x.com/core.git\n'
949
+ )
950
+
951
+ store = _make_store()
952
+ ing = SubmoduleIngester(store)
953
+
954
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
955
+ mock_inst = MagicMock()
956
+ mock_inst.ingest.return_value = {"files": 4, "nodes": 8}
957
+ MockIngester.return_value = mock_inst
958
+
959
+ stats = ing.ingest_with_submodules(str(tmp_path))
960
+
961
+ assert "core" in stats["submodules"]
962
+ assert stats["submodules"]["core"]["files"] == 4
963
+ assert stats["total_files"] == 8 # parent 4 + submodule 4
964
+
965
+ # DEPENDS_ON edge from parent → submodule
966
+ store.create_edge.assert_called()
967
+
968
+
969
+# =============================================================================
970
+# #62 — WorkspaceMode + WorkspaceManager
971
+# =============================================================================
972
+
973
+
974
+class TestWorkspaceMode:
975
+ """WorkspaceMode enum has UNIFIED and FEDERATED values."""
976
+
977
+ def test_has_unified(self):
978
+ from navegador.multirepo import WorkspaceMode
979
+
980
+ assert WorkspaceMode.UNIFIED == "unified"
981
+
982
+ def test_has_federated(self):
983
+ from navegador.multirepo import WorkspaceMode
984
+
985
+ assert WorkspaceMode.FEDERATED == "federated"
986
+
987
+ def test_is_str_enum(self):
988
+ from navegador.multirepo import WorkspaceMode
989
+
990
+ assert isinstance(WorkspaceMode.UNIFIED, str)
991
+ assert isinstance(WorkspaceMode.FEDERATED, str)
992
+
993
+ def test_from_string(self):
994
+ from navegador.multirepo import WorkspaceMode
995
+
996
+ assert WorkspaceMode("unified") == WorkspaceMode.UNIFIED
997
+ assert WorkspaceMode("federated") == WorkspaceMode.FEDERATED
998
+
999
+
1000
+class TestWorkspaceManagerUnified:
1001
+ """WorkspaceManager in UNIFIED mode uses a single shared graph."""
1002
+
1003
+ def test_add_repo_creates_repository_node(self, tmp_path):
1004
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1005
+
1006
+ store = _make_store()
1007
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1008
+ wm.add_repo("backend", str(tmp_path))
1009
+
1010
+ store.create_node.assert_called()
1011
+
1012
+ def test_list_repos(self, tmp_path):
1013
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1014
+
1015
+ store = _make_store()
1016
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1017
+ wm.add_repo("backend", str(tmp_path))
1018
+ wm.add_repo("frontend", str(tmp_path))
1019
+
1020
+ repos = wm.list_repos()
1021
+ names = {r["name"] for r in repos}
1022
+ assert names == {"backend", "frontend"}
1023
+
1024
+ def test_ingest_all_calls_repo_ingester(self, tmp_path):
1025
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1026
+
1027
+ store = _make_store()
1028
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1029
+ wm.add_repo("repo1", str(tmp_path))
1030
+
1031
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1032
+ mock_inst = MagicMock()
1033
+ mock_inst.ingest.return_value = {"files": 2, "nodes": 5}
1034
+ MockIngester.return_value = mock_inst
1035
+
1036
+ summary = wm.ingest_all()
1037
+
1038
+ assert "repo1" in summary
1039
+ assert summary["repo1"]["files"] == 2
1040
+
1041
+ def test_ingest_all_no_repos_returns_empty(self):
1042
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1043
+
1044
+ wm = WorkspaceManager(_make_store(), mode=WorkspaceMode.UNIFIED)
1045
+ assert wm.ingest_all() == {}
1046
+
1047
+ def test_search_unified_queries_single_store(self):
1048
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1049
+
1050
+ store = _make_store()
1051
+ store.query.return_value = MagicMock(
1052
+ result_set=[["Function", "authenticate", "/src/auth.py"]]
1053
+ )
1054
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1055
+ wm.add_repo("repo", "/tmp/repo")
1056
+
1057
+ results = wm.search("authenticate")
1058
+ assert len(results) >= 1
1059
+ assert results[0]["name"] == "authenticate"
1060
+
1061
+ def test_ingest_error_recorded_in_summary(self, tmp_path):
1062
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1063
+
1064
+ store = _make_store()
1065
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1066
+ wm.add_repo("broken", str(tmp_path))
1067
+
1068
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1069
+ MockIngester.return_value.ingest.side_effect = RuntimeError("parse error")
1070
+ summary = wm.ingest_all()
1071
+
1072
+ assert "broken" in summary
1073
+ assert "error" in summary["broken"]
1074
+
1075
+
1076
+class TestWorkspaceManagerFederated:
1077
+ """WorkspaceManager in FEDERATED mode creates per-repo graphs."""
1078
+
1079
+ def test_add_repo_sets_federated_graph_name(self, tmp_path):
1080
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1081
+
1082
+ store = _make_store()
1083
+ wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1084
+ wm.add_repo("api", str(tmp_path))
1085
+
1086
+ repos = wm.list_repos()
1087
+ assert repos[0]["graph_name"] == "navegador_api"
1088
+
1089
+ def test_unified_graph_name_is_navegador(self, tmp_path):
1090
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1091
+
1092
+ store = _make_store()
1093
+ wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1094
+ wm.add_repo("api", str(tmp_path))
1095
+
1096
+ repos = wm.list_repos()
1097
+ assert repos[0]["graph_name"] == "navegador"
1098
+
1099
+ def test_federated_ingest_uses_per_repo_store(self, tmp_path):
1100
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1101
+
1102
+ store = _make_store()
1103
+ # select_graph returns a different mock each time
1104
+ store._client.select_graph.return_value = MagicMock()
1105
+
1106
+ wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1107
+ wm.add_repo("svc", str(tmp_path))
1108
+
1109
+ with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1110
+ mock_inst = MagicMock()
1111
+ mock_inst.ingest.return_value = {"files": 1, "nodes": 3}
1112
+ MockIngester.return_value = mock_inst
1113
+
1114
+ summary = wm.ingest_all()
1115
+
1116
+ assert "svc" in summary
1117
+ # select_graph should have been called with "navegador_svc"
1118
+ called_graphs = [
1119
+ c.args[0] for c in store._client.select_graph.call_args_list
1120
+ ]
1121
+ assert any("navegador_svc" in g for g in called_graphs)
1122
+
1123
+ def test_federated_search_merges_results(self):
1124
+ from navegador.multirepo import WorkspaceManager, WorkspaceMode
1125
+
1126
+ store = _make_store()
1127
+
1128
+ # Each per-repo graph returns a result
1129
+ per_repo_store_mock = MagicMock()
1130
+ per_repo_store_mock.query.return_value = MagicMock(
1131
+ result_set=[["Function", "auth_check", "/src/auth.py"]]
1132
+ )
1133
+ store._client.select_graph.return_value = per_repo_store_mock
1134
+
1135
+ wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1136
+ wm._repos = {
1137
+ "backend": {"path": "/tmp/backend", "graph_name": "navegador_backend"},
1138
+ "frontend": {"path": "/tmp/frontend", "graph_name": "navegador_frontend"},
1139
+ }
1140
+
1141
+ results = wm.search("auth")
1142
+ # Two repos each return one result → 2 total (deduplicated to 1 because same name)
1143
+ assert len(results) >= 1
1144
+
1145
+
1146
+# =============================================================================
1147
+# CLI smoke tests
1148
+# =============================================================================
1149
+
1150
+
1151
+class TestCLIPMGroup:
1152
+ """pm group is registered on the main CLI."""
1153
+
1154
+ def test_pm_group_exists(self):
1155
+ from click.testing import CliRunner
1156
+
1157
+ from navegador.cli.commands import main
1158
+
1159
+ runner = CliRunner()
1160
+ result = runner.invoke(main, ["pm", "--help"])
1161
+ assert result.exit_code == 0
1162
+ assert "ingest" in result.output
1163
+
1164
+ def test_pm_ingest_requires_github(self):
1165
+ from click.testing import CliRunner
1166
+
1167
+ from navegador.cli.commands import main
1168
+
1169
+ runner = CliRunner()
1170
+ result = runner.invoke(main, ["pm", "ingest"])
1171
+ assert result.exit_code != 0
1172
+
1173
+
1174
+class TestCLIDepsGroup:
1175
+ """deps group is registered on the main CLI."""
1176
+
1177
+ def test_deps_group_exists(self):
1178
+ from click.testing import CliRunner
1179
+
1180
+ from navegador.cli.commands import main
1181
+
1182
+ runner = CliRunner()
1183
+ result = runner.invoke(main, ["deps", "--help"])
1184
+ assert result.exit_code == 0
1185
+ assert "ingest" in result.output
1186
+
1187
+
1188
+class TestCLISubmodulesGroup:
1189
+ """submodules group is registered on the main CLI."""
1190
+
1191
+ def test_submodules_group_exists(self):
1192
+ from click.testing import CliRunner
1193
+
1194
+ from navegador.cli.commands import main
1195
+
1196
+ runner = CliRunner()
1197
+ result = runner.invoke(main, ["submodules", "--help"])
1198
+ assert result.exit_code == 0
1199
+
1200
+ def test_submodules_list_empty(self, tmp_path):
1201
+ from click.testing import CliRunner
1202
+
1203
+ from navegador.cli.commands import main
1204
+
1205
+ runner = CliRunner()
1206
+ result = runner.invoke(main, ["submodules", "list", str(tmp_path)])
1207
+ assert result.exit_code == 0
1208
+ assert "No submodules" in result.output
1209
+
1210
+
1211
+class TestCLIWorkspaceGroup:
1212
+ """workspace group is registered on the main CLI."""
1213
+
1214
+ def test_workspace_group_exists(self):
1215
+ from click.testing import CliRunner
1216
+
1217
+ from navegador.cli.commands import main
1218
+
1219
+ runner = CliRunner()
1220
+ result = runner.invoke(main, ["workspace", "--help"])
1221
+ assert result.exit_code == 0
1222
+ assert "ingest" in result.output
1223
+
1224
+ def test_workspace_ingest_requires_repos(self, tmp_path):
1225
+ from click.testing import CliRunner
1226
+
1227
+ from navegador.cli.commands import main
1228
+
1229
+ runner = CliRunner()
1230
+ result = runner.invoke(
1231
+ main,
1232
+ ["workspace", "ingest", "--db", str(tmp_path / "g.db")],
1233
+ )
1234
+ assert result.exit_code != 0
--- a/tests/test_v04_batch3.py
+++ b/tests/test_v04_batch3.py
@@ -0,0 +1,1234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_v04_batch3.py
+++ b/tests/test_v04_batch3.py
@@ -0,0 +1,1234 @@
1 """
2 Tests for navegador v0.4 batch 3 — issues #7, #18, #53, #55, #58, #61, #62.
3
4 Covers:
5 #7 / #18 — PlanopticonPipeline (pipeline, action items, decision timeline, auto-link)
6 #53 — TicketIngester (GitHub, Linear stub, Jira stub)
7 #55 — FossilAdapter (current_branch, changed_files, file_history, blame)
8 #58 — DependencyIngester (npm, pip/requirements.txt, pip/pyproject.toml, cargo)
9 #61 — SubmoduleIngester (detect_submodules, ingest_with_submodules)
10 #62 — WorkspaceMode enum, WorkspaceManager (unified + federated)
11 """
12
13 from __future__ import annotations
14
15 import json
16 import subprocess
17 import tempfile
18 from pathlib import Path
19 from unittest.mock import MagicMock, patch
20
21 import pytest
22
23
24 # ── Shared mock store factory ─────────────────────────────────────────────────
25
26
27 def _make_store():
28 store = MagicMock()
29 store.query.return_value = MagicMock(result_set=[])
30 return store
31
32
33 # =============================================================================
34 # #7 / #18 — PlanopticonPipeline
35 # =============================================================================
36
37
38 class TestPlanopticonPipelineDetectInput:
39 """_detect_input correctly identifies file types from path."""
40
41 from navegador.planopticon_pipeline import PlanopticonPipeline as _Pipeline
42
43 def test_manifest_file(self, tmp_path):
44 from navegador.planopticon_pipeline import PlanopticonPipeline
45
46 f = tmp_path / "manifest.json"
47 f.write_text("{}")
48 itype, resolved = PlanopticonPipeline._detect_input(f)
49 assert itype == "manifest"
50 assert resolved == f
51
52 def test_interchange_file(self, tmp_path):
53 from navegador.planopticon_pipeline import PlanopticonPipeline
54
55 f = tmp_path / "interchange.json"
56 f.write_text("{}")
57 itype, _ = PlanopticonPipeline._detect_input(f)
58 assert itype == "interchange"
59
60 def test_batch_file(self, tmp_path):
61 from navegador.planopticon_pipeline import PlanopticonPipeline
62
63 f = tmp_path / "batch_manifest.json"
64 f.write_text("{}")
65 itype, _ = PlanopticonPipeline._detect_input(f)
66 assert itype == "batch"
67
68 def test_kg_file_default(self, tmp_path):
69 from navegador.planopticon_pipeline import PlanopticonPipeline
70
71 f = tmp_path / "knowledge_graph.json"
72 f.write_text("{}")
73 itype, _ = PlanopticonPipeline._detect_input(f)
74 assert itype == "kg"
75
76 def test_directory_with_manifest(self, tmp_path):
77 from navegador.planopticon_pipeline import PlanopticonPipeline
78
79 (tmp_path / "manifest.json").write_text("{}")
80 itype, resolved = PlanopticonPipeline._detect_input(tmp_path)
81 assert itype == "manifest"
82
83 def test_directory_without_known_files_raises(self, tmp_path):
84 from navegador.planopticon_pipeline import PlanopticonPipeline
85
86 with pytest.raises(FileNotFoundError):
87 PlanopticonPipeline._detect_input(tmp_path)
88
89
90 class TestPlanopticonPipelineRun:
91 """PlanopticonPipeline.run delegates to PlanopticonIngester and auto-links."""
92
93 def test_run_returns_stats_with_linked_key(self, tmp_path):
94 from navegador.planopticon_pipeline import PlanopticonPipeline
95
96 kg_data = {"nodes": [], "relationships": [], "sources": []}
97 kg_file = tmp_path / "knowledge_graph.json"
98 kg_file.write_text(json.dumps(kg_data))
99
100 store = _make_store()
101 pipeline = PlanopticonPipeline(store, source_tag="test")
102 stats = pipeline.run(str(kg_file))
103
104 assert "nodes" in stats
105 assert "linked" in stats
106
107 def test_run_calls_ingester(self, tmp_path):
108 from navegador.planopticon_pipeline import PlanopticonPipeline
109
110 kg_data = {
111 "nodes": [{"id": "n1", "type": "concept", "name": "Auth"}],
112 "relationships": [],
113 "sources": [],
114 }
115 kg_file = tmp_path / "knowledge_graph.json"
116 kg_file.write_text(json.dumps(kg_data))
117
118 store = _make_store()
119 pipeline = PlanopticonPipeline(store)
120 stats = pipeline.run(str(kg_file), source_tag="Meeting")
121
122 assert isinstance(stats, dict)
123 # create_node should have been called at least once for the concept node
124 store.create_node.assert_called()
125
126
127 class TestExtractActionItems:
128 """extract_action_items pulls action items from various KG data formats."""
129
130 def test_action_items_list(self):
131 from navegador.planopticon_pipeline import ActionItem, PlanopticonPipeline
132
133 kg_data = {
134 "action_items": [
135 {"action": "Write tests", "assignee": "Alice", "priority": "high"},
136 {"action": "Deploy service", "assignee": "", "priority": "info"},
137 ]
138 }
139 items = PlanopticonPipeline.extract_action_items(kg_data)
140 assert len(items) == 2
141 assert all(isinstance(i, ActionItem) for i in items)
142 assert items[0].action == "Write tests"
143 assert items[0].assignee == "Alice"
144 assert items[1].action == "Deploy service"
145
146 def test_blank_actions_skipped(self):
147 from navegador.planopticon_pipeline import PlanopticonPipeline
148
149 kg_data = {"action_items": [{"action": " ", "assignee": "Bob"}]}
150 items = PlanopticonPipeline.extract_action_items(kg_data)
151 assert items == []
152
153 def test_entities_with_task_type(self):
154 from navegador.planopticon_pipeline import PlanopticonPipeline
155
156 kg_data = {
157 "entities": [
158 {"planning_type": "task", "name": "Refactor auth module"},
159 {"planning_type": "decision", "name": "Use PostgreSQL"},
160 ]
161 }
162 items = PlanopticonPipeline.extract_action_items(kg_data)
163 assert len(items) == 1
164 assert items[0].action == "Refactor auth module"
165
166 def test_nodes_with_action_item_type(self):
167 from navegador.planopticon_pipeline import PlanopticonPipeline
168
169 kg_data = {
170 "nodes": [
171 {"type": "action_item", "name": "Update documentation"},
172 ]
173 }
174 items = PlanopticonPipeline.extract_action_items(kg_data)
175 assert len(items) == 1
176 assert items[0].action == "Update documentation"
177
178 def test_empty_data_returns_empty_list(self):
179 from navegador.planopticon_pipeline import PlanopticonPipeline
180
181 assert PlanopticonPipeline.extract_action_items({}) == []
182
183 def test_action_item_to_dict(self):
184 from navegador.planopticon_pipeline import ActionItem
185
186 item = ActionItem(action="Do thing", assignee="Carol", priority="critical")
187 d = item.to_dict()
188 assert d["action"] == "Do thing"
189 assert d["assignee"] == "Carol"
190 assert d["priority"] == "critical"
191
192
193 class TestBuildDecisionTimeline:
194 """build_decision_timeline queries the store and returns chronological list."""
195
196 def test_returns_list_from_store(self):
197 from navegador.planopticon_pipeline import PlanopticonPipeline
198
199 store = _make_store()
200 store.query.return_value = MagicMock(
201 result_set=[
202 ["Use microservices", "Split monolith", "arch", "accepted", "Scalability", "2024-01-10"],
203 ["Use PostgreSQL", "Relational DB", "data", "accepted", "ACID", "2024-02-01"],
204 ]
205 )
206 timeline = PlanopticonPipeline.build_decision_timeline(store)
207 assert len(timeline) == 2
208 assert timeline[0]["name"] == "Use microservices"
209 assert timeline[0]["date"] == "2024-01-10"
210
211 def test_returns_empty_on_query_failure(self):
212 from navegador.planopticon_pipeline import PlanopticonPipeline
213
214 store = _make_store()
215 store.query.side_effect = Exception("DB error")
216 timeline = PlanopticonPipeline.build_decision_timeline(store)
217 assert timeline == []
218
219 def test_entry_has_required_keys(self):
220 from navegador.planopticon_pipeline import PlanopticonPipeline
221
222 store = _make_store()
223 store.query.return_value = MagicMock(
224 result_set=[["D1", "Desc", "domain", "accepted", "rationale", "2024-01-01"]]
225 )
226 timeline = PlanopticonPipeline.build_decision_timeline(store)
227 required_keys = {"name", "description", "domain", "status", "rationale", "date"}
228 assert required_keys.issubset(timeline[0].keys())
229
230
231 class TestAutoLinkToCode:
232 """auto_link_to_code matches knowledge nodes to code by name similarity."""
233
234 def test_returns_zero_when_no_nodes(self):
235 from navegador.planopticon_pipeline import PlanopticonPipeline
236
237 store = _make_store()
238 store.query.return_value = MagicMock(result_set=[])
239 assert PlanopticonPipeline.auto_link_to_code(store) == 0
240
241 def test_links_matching_nodes(self):
242 from navegador.planopticon_pipeline import PlanopticonPipeline
243
244 store = _make_store()
245
246 # First call: knowledge nodes; second: code nodes; subsequent: merge queries
247 call_count = 0
248 def _query(cypher, params=None):
249 nonlocal call_count
250 call_count += 1
251 if call_count == 1:
252 # knowledge nodes — use "authenticate" (12 chars) which IS in "authenticate_user"
253 return MagicMock(result_set=[["Concept", "authenticate handler"]])
254 elif call_count == 2:
255 # code nodes
256 return MagicMock(result_set=[["Function", "authenticate_user"]])
257 else:
258 # MERGE query — no result needed
259 return MagicMock(result_set=[])
260
261 store.query.side_effect = _query
262 linked = PlanopticonPipeline.auto_link_to_code(store)
263 # "authenticate" (12 chars, ≥4) is contained in "authenticate_user"
264 assert linked >= 1
265
266 def test_short_tokens_skipped(self):
267 from navegador.planopticon_pipeline import PlanopticonPipeline
268
269 store = _make_store()
270
271 call_count = 0
272 def _query(cypher, params=None):
273 nonlocal call_count
274 call_count += 1
275 if call_count == 1:
276 return MagicMock(result_set=[["Concept", "API"]]) # all tokens < 4 chars
277 elif call_count == 2:
278 return MagicMock(result_set=[["Function", "api_handler"]])
279 return MagicMock(result_set=[])
280
281 store.query.side_effect = _query
282 linked = PlanopticonPipeline.auto_link_to_code(store)
283 # "api" is only 3 chars — should not match
284 assert linked == 0
285
286 def test_returns_zero_on_query_failure(self):
287 from navegador.planopticon_pipeline import PlanopticonPipeline
288
289 store = _make_store()
290 store.query.side_effect = Exception("boom")
291 result = PlanopticonPipeline.auto_link_to_code(store)
292 assert result == 0
293
294
295 # =============================================================================
296 # #53 — TicketIngester
297 # =============================================================================
298
299
300 class TestTicketIngesterGitHub:
301 """TicketIngester.ingest_github_issues fetches and ingests GitHub issues."""
302
303 def _make_issue(self, number=1, title="Fix bug", body="Details", labels=None, assignees=None):
304 return {
305 "number": number,
306 "title": title,
307 "body": body,
308 "html_url": f"https://github.com/owner/repo/issues/{number}",
309 "labels": [{"name": l} for l in (labels or [])],
310 "assignees": [{"login": a} for a in (assignees or [])],
311 }
312
313 def test_ingest_creates_ticket_nodes(self):
314 from navegador.pm import TicketIngester
315
316 store = _make_store()
317 store.query.return_value = MagicMock(result_set=[])
318 ing = TicketIngester(store)
319
320 issues = [self._make_issue(1, "Bug report"), self._make_issue(2, "Feature request")]
321 with patch("urllib.request.urlopen") as mock_open:
322 cm = MagicMock()
323 cm.__enter__ = MagicMock(return_value=cm)
324 cm.__exit__ = MagicMock(return_value=False)
325 cm.read.return_value = json.dumps(issues).encode()
326 mock_open.return_value = cm
327
328 stats = ing.ingest_github_issues("owner/repo", token="test_token")
329
330 assert stats["tickets"] == 2
331 assert "linked" in stats
332
333 def test_pull_requests_filtered_out(self):
334 from navegador.pm import TicketIngester
335
336 store = _make_store()
337 store.query.return_value = MagicMock(result_set=[])
338 ing = TicketIngester(store)
339
340 # Mix of issue and PR
341 issue = self._make_issue(1, "Real issue")
342 pr = {**self._make_issue(2, "A PR"), "pull_request": {"url": "..."}}
343
344 with patch("urllib.request.urlopen") as mock_open:
345 cm = MagicMock()
346 cm.__enter__ = MagicMock(return_value=cm)
347 cm.__exit__ = MagicMock(return_value=False)
348 cm.read.return_value = json.dumps([issue, pr]).encode()
349 mock_open.return_value = cm
350
351 stats = ing.ingest_github_issues("owner/repo")
352
353 assert stats["tickets"] == 1 # PR filtered out
354
355 def test_assignees_become_person_nodes(self):
356 from navegador.pm import TicketIngester
357
358 store = _make_store()
359 store.query.return_value = MagicMock(result_set=[])
360 ing = TicketIngester(store)
361
362 issue = self._make_issue(1, "Assign me", assignees=["alice"])
363
364 with patch("urllib.request.urlopen") as mock_open:
365 cm = MagicMock()
366 cm.__enter__ = MagicMock(return_value=cm)
367 cm.__exit__ = MagicMock(return_value=False)
368 cm.read.return_value = json.dumps([issue]).encode()
369 mock_open.return_value = cm
370
371 ing.ingest_github_issues("owner/repo")
372
373 # Person node created for alice
374 person_calls = [
375 c for c in store.create_node.call_args_list
376 if c.args and hasattr(c.args[0], "value") and c.args[0].value == "Person"
377 ]
378 assert len(person_calls) >= 1
379
380 def test_network_error_raises_runtime_error(self):
381 from navegador.pm import TicketIngester
382
383 store = _make_store()
384 ing = TicketIngester(store)
385
386 with patch("urllib.request.urlopen", side_effect=Exception("network error")):
387 with pytest.raises(RuntimeError, match="Failed to fetch GitHub issues"):
388 ing.ingest_github_issues("owner/repo")
389
390
391 class TestTicketIngesterSeverity:
392 """_github_severity maps label names to severity levels."""
393
394 def test_critical_label(self):
395 from navegador.pm import TicketIngester
396
397 assert TicketIngester._github_severity(["critical"]) == "critical"
398 assert TicketIngester._github_severity(["blocker"]) == "critical"
399
400 def test_warning_label(self):
401 from navegador.pm import TicketIngester
402
403 assert TicketIngester._github_severity(["bug"]) == "warning"
404 assert TicketIngester._github_severity(["high"]) == "warning"
405
406 def test_default_info(self):
407 from navegador.pm import TicketIngester
408
409 assert TicketIngester._github_severity([]) == "info"
410 assert TicketIngester._github_severity(["enhancement"]) == "info"
411
412
413 class TestTicketIngesterStubs:
414 """Linear and Jira raise NotImplementedError with helpful messages."""
415
416 def test_linear_raises_not_implemented(self):
417 from navegador.pm import TicketIngester
418
419 ing = TicketIngester(_make_store())
420 with pytest.raises(NotImplementedError, match="Linear"):
421 ing.ingest_linear("lin_apikey")
422
423 def test_jira_raises_not_implemented(self):
424 from navegador.pm import TicketIngester
425
426 ing = TicketIngester(_make_store())
427 with pytest.raises(NotImplementedError, match="Jira"):
428 ing.ingest_jira("https://company.atlassian.net", token="tok")
429
430 def test_linear_message_contains_guidance(self):
431 from navegador.pm import TicketIngester
432
433 ing = TicketIngester(_make_store())
434 with pytest.raises(NotImplementedError) as exc_info:
435 ing.ingest_linear("lin_key", project="MyProject")
436 assert "53" in str(exc_info.value) or "Linear" in str(exc_info.value)
437
438 def test_jira_message_contains_guidance(self):
439 from navegador.pm import TicketIngester
440
441 ing = TicketIngester(_make_store())
442 with pytest.raises(NotImplementedError) as exc_info:
443 ing.ingest_jira("https://x.atlassian.net")
444 assert "Jira" in str(exc_info.value) or "jira" in str(exc_info.value).lower()
445
446
447 # =============================================================================
448 # #55 — FossilAdapter
449 # =============================================================================
450
451
452 @pytest.fixture()
453 def fossil_dir(tmp_path):
454 d = tmp_path / "fossil_repo"
455 d.mkdir()
456 (d / ".fslckout").touch()
457 return d
458
459
460 class TestFossilAdapterCurrentBranch:
461 """current_branch calls 'fossil branch current' and returns stripped output."""
462
463 def test_returns_branch_name(self, fossil_dir):
464 from navegador.vcs import FossilAdapter
465
466 adapter = FossilAdapter(fossil_dir)
467 mock_result = MagicMock()
468 mock_result.stdout = "trunk\n"
469
470 with patch("subprocess.run", return_value=mock_result):
471 branch = adapter.current_branch()
472
473 assert branch == "trunk"
474
475 def test_strips_whitespace(self, fossil_dir):
476 from navegador.vcs import FossilAdapter
477
478 adapter = FossilAdapter(fossil_dir)
479 mock_result = MagicMock()
480 mock_result.stdout = " feature-branch \n"
481
482 with patch("subprocess.run", return_value=mock_result):
483 branch = adapter.current_branch()
484
485 assert branch == "feature-branch"
486
487 def test_calls_fossil_branch_current(self, fossil_dir):
488 from navegador.vcs import FossilAdapter
489
490 adapter = FossilAdapter(fossil_dir)
491 mock_result = MagicMock()
492 mock_result.stdout = "main\n"
493
494 with patch("subprocess.run", return_value=mock_result) as mock_run:
495 adapter.current_branch()
496
497 call_args = mock_run.call_args
498 assert call_args[0][0] == ["fossil", "branch", "current"]
499
500
501 class TestFossilAdapterChangedFiles:
502 """changed_files calls 'fossil changes --differ' and parses output."""
503
504 def test_returns_changed_file_list(self, fossil_dir):
505 from navegador.vcs import FossilAdapter
506
507 adapter = FossilAdapter(fossil_dir)
508 mock_result = MagicMock()
509 mock_result.stdout = "EDITED src/main.py\nADDED tests/test_new.py\n"
510
511 with patch("subprocess.run", return_value=mock_result):
512 files = adapter.changed_files()
513
514 assert "src/main.py" in files
515 assert "tests/test_new.py" in files
516
517 def test_empty_output_returns_empty_list(self, fossil_dir):
518 from navegador.vcs import FossilAdapter
519
520 adapter = FossilAdapter(fossil_dir)
521 mock_result = MagicMock()
522 mock_result.stdout = ""
523
524 with patch("subprocess.run", return_value=mock_result):
525 files = adapter.changed_files()
526
527 assert files == []
528
529 def test_calls_fossil_changes_differ(self, fossil_dir):
530 from navegador.vcs import FossilAdapter
531
532 adapter = FossilAdapter(fossil_dir)
533 mock_result = MagicMock()
534 mock_result.stdout = ""
535
536 with patch("subprocess.run", return_value=mock_result) as mock_run:
537 adapter.changed_files()
538
539 call_args = mock_run.call_args
540 assert call_args[0][0] == ["fossil", "changes", "--differ"]
541
542 def test_returns_list(self, fossil_dir):
543 from navegador.vcs import FossilAdapter
544
545 adapter = FossilAdapter(fossil_dir)
546 mock_result = MagicMock()
547 mock_result.stdout = "EDITED foo.py\n"
548
549 with patch("subprocess.run", return_value=mock_result):
550 result = adapter.changed_files()
551
552 assert isinstance(result, list)
553
554
555 class TestFossilAdapterFileHistory:
556 """file_history calls 'fossil timeline' and parses output into entry dicts."""
557
558 SAMPLE_TIMELINE = """\
559 === 2024-01-15 ===
560 14:23:07 [abc123def456] Add feature. (user: alice, tags: trunk)
561 09:00:00 [deadbeef1234] Fix typo. (user: bob, tags: trunk)
562 === 2024-01-14 ===
563 22:10:00 [cafe0000abcd] Initial commit. (user: alice, tags: initial)
564 """
565
566 def test_returns_list_of_dicts(self, fossil_dir):
567 from navegador.vcs import FossilAdapter
568
569 adapter = FossilAdapter(fossil_dir)
570 mock_result = MagicMock()
571 mock_result.stdout = self.SAMPLE_TIMELINE
572
573 with patch("subprocess.run", return_value=mock_result):
574 history = adapter.file_history("src/main.py")
575
576 assert isinstance(history, list)
577 assert len(history) >= 1
578
579 def test_entry_has_required_keys(self, fossil_dir):
580 from navegador.vcs import FossilAdapter
581
582 adapter = FossilAdapter(fossil_dir)
583 mock_result = MagicMock()
584 mock_result.stdout = self.SAMPLE_TIMELINE
585
586 with patch("subprocess.run", return_value=mock_result):
587 history = adapter.file_history("src/main.py")
588
589 for entry in history:
590 assert "hash" in entry
591 assert "author" in entry
592 assert "date" in entry
593 assert "message" in entry
594
595 def test_limit_passed_to_fossil(self, fossil_dir):
596 from navegador.vcs import FossilAdapter
597
598 adapter = FossilAdapter(fossil_dir)
599 mock_result = MagicMock()
600 mock_result.stdout = ""
601
602 with patch("subprocess.run", return_value=mock_result) as mock_run:
603 adapter.file_history("src/main.py", limit=5)
604
605 args = mock_run.call_args[0][0]
606 assert "5" in args
607
608 def test_empty_output_returns_empty_list(self, fossil_dir):
609 from navegador.vcs import FossilAdapter
610
611 adapter = FossilAdapter(fossil_dir)
612 mock_result = MagicMock()
613 mock_result.stdout = ""
614
615 with patch("subprocess.run", return_value=mock_result):
616 history = adapter.file_history("nonexistent.py")
617
618 assert history == []
619
620
621 class TestFossilAdapterBlame:
622 """blame calls 'fossil annotate --log' and parses per-line output."""
623
624 SAMPLE_ANNOTATE = """\
625 1.1 alice 2024-01-15: def main():
626 1.1 alice 2024-01-15: pass
627 1.2 bob 2024-01-20: # added comment
628 """
629
630 def test_returns_list(self, fossil_dir):
631 from navegador.vcs import FossilAdapter
632
633 adapter = FossilAdapter(fossil_dir)
634 mock_result = MagicMock()
635 mock_result.stdout = self.SAMPLE_ANNOTATE
636
637 with patch("subprocess.run", return_value=mock_result):
638 result = adapter.blame("src/main.py")
639
640 assert isinstance(result, list)
641 assert len(result) >= 1
642
643 def test_entry_has_required_keys(self, fossil_dir):
644 from navegador.vcs import FossilAdapter
645
646 adapter = FossilAdapter(fossil_dir)
647 mock_result = MagicMock()
648 mock_result.stdout = self.SAMPLE_ANNOTATE
649
650 with patch("subprocess.run", return_value=mock_result):
651 result = adapter.blame("src/main.py")
652
653 for entry in result:
654 assert "line" in entry
655 assert "hash" in entry
656 assert "author" in entry
657 assert "content" in entry
658
659 def test_line_numbers_sequential(self, fossil_dir):
660 from navegador.vcs import FossilAdapter
661
662 adapter = FossilAdapter(fossil_dir)
663 mock_result = MagicMock()
664 mock_result.stdout = self.SAMPLE_ANNOTATE
665
666 with patch("subprocess.run", return_value=mock_result):
667 result = adapter.blame("src/main.py")
668
669 if len(result) >= 2:
670 assert result[1]["line"] > result[0]["line"]
671
672 def test_calls_fossil_annotate(self, fossil_dir):
673 from navegador.vcs import FossilAdapter
674
675 adapter = FossilAdapter(fossil_dir)
676 mock_result = MagicMock()
677 mock_result.stdout = ""
678
679 with patch("subprocess.run", return_value=mock_result) as mock_run:
680 adapter.blame("src/main.py")
681
682 args = mock_run.call_args[0][0]
683 assert "fossil" in args
684 assert "annotate" in args
685
686
687 # =============================================================================
688 # #58 — DependencyIngester
689 # =============================================================================
690
691
692 class TestDependencyIngesterNPM:
693 """ingest_npm parses package.json and creates dependency nodes."""
694
695 def test_ingests_dependencies(self, tmp_path):
696 from navegador.dependencies import DependencyIngester
697
698 pkg = {
699 "name": "myapp",
700 "dependencies": {"react": "^18.0.0", "lodash": "4.17.21"},
701 "devDependencies": {"jest": "^29.0.0"},
702 }
703 pkg_file = tmp_path / "package.json"
704 pkg_file.write_text(json.dumps(pkg))
705
706 store = _make_store()
707 ing = DependencyIngester(store)
708 stats = ing.ingest_npm(str(pkg_file))
709
710 assert stats["packages"] == 3 # 2 deps + 1 devDep
711 assert store.create_node.call_count >= 3
712
713 def test_empty_dependencies(self, tmp_path):
714 from navegador.dependencies import DependencyIngester
715
716 pkg = {"name": "empty", "dependencies": {}}
717 pkg_file = tmp_path / "package.json"
718 pkg_file.write_text(json.dumps(pkg))
719
720 store = _make_store()
721 stats = DependencyIngester(store).ingest_npm(str(pkg_file))
722 assert stats["packages"] == 0
723
724 def test_peer_dependencies_included(self, tmp_path):
725 from navegador.dependencies import DependencyIngester
726
727 pkg = {
728 "peerDependencies": {"react": ">=17"},
729 }
730 pkg_file = tmp_path / "package.json"
731 pkg_file.write_text(json.dumps(pkg))
732
733 store = _make_store()
734 stats = DependencyIngester(store).ingest_npm(str(pkg_file))
735 assert stats["packages"] == 1
736
737 def test_creates_depends_on_edge(self, tmp_path):
738 from navegador.dependencies import DependencyIngester
739
740 pkg = {"dependencies": {"axios": "^1.0.0"}}
741 pkg_file = tmp_path / "package.json"
742 pkg_file.write_text(json.dumps(pkg))
743
744 store = _make_store()
745 DependencyIngester(store).ingest_npm(str(pkg_file))
746 store.create_edge.assert_called()
747
748
749 class TestDependencyIngesterPip:
750 """ingest_pip parses requirements.txt and creates dependency nodes."""
751
752 def test_requirements_txt(self, tmp_path):
753 from navegador.dependencies import DependencyIngester
754
755 req_file = tmp_path / "requirements.txt"
756 req_file.write_text(
757 "requests>=2.28.0\n"
758 "flask[async]==2.3.0\n"
759 "# a comment\n"
760 "\n"
761 "pytest>=7.0 # dev\n"
762 )
763
764 store = _make_store()
765 stats = DependencyIngester(store).ingest_pip(str(req_file))
766 assert stats["packages"] == 3
767
768 def test_skips_comments_and_blanks(self, tmp_path):
769 from navegador.dependencies import DependencyIngester
770
771 req_file = tmp_path / "requirements.txt"
772 req_file.write_text("# comment\n\n-r other.txt\n")
773
774 store = _make_store()
775 stats = DependencyIngester(store).ingest_pip(str(req_file))
776 assert stats["packages"] == 0
777
778 def test_pyproject_toml(self, tmp_path):
779 from navegador.dependencies import DependencyIngester
780
781 toml_content = """\
782 [project]
783 name = "myproject"
784 dependencies = [
785 "click>=8.0",
786 "rich>=12.0",
787 "pydantic>=2.0",
788 ]
789 """
790 pyproject = tmp_path / "pyproject.toml"
791 pyproject.write_text(toml_content)
792
793 store = _make_store()
794 stats = DependencyIngester(store).ingest_pip(str(pyproject))
795 assert stats["packages"] >= 3
796
797
798 class TestDependencyIngesterCargo:
799 """ingest_cargo parses Cargo.toml and creates dependency nodes."""
800
801 def test_basic_cargo_toml(self, tmp_path):
802 from navegador.dependencies import DependencyIngester
803
804 cargo_content = """\
805 [package]
806 name = "myapp"
807
808 [dependencies]
809 serde = "1.0"
810 tokio = { version = "1.0", features = ["full"] }
811
812 [dev-dependencies]
813 criterion = "0.4"
814 """
815 cargo_file = tmp_path / "Cargo.toml"
816 cargo_file.write_text(cargo_content)
817
818 store = _make_store()
819 stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
820 assert stats["packages"] == 3 # serde, tokio, criterion
821
822 def test_empty_cargo_toml(self, tmp_path):
823 from navegador.dependencies import DependencyIngester
824
825 cargo_file = tmp_path / "Cargo.toml"
826 cargo_file.write_text("[package]\nname = \"empty\"\n")
827
828 store = _make_store()
829 stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
830 assert stats["packages"] == 0
831
832 def test_build_dependencies_included(self, tmp_path):
833 from navegador.dependencies import DependencyIngester
834
835 cargo_content = "[build-dependencies]\nbuild-helper = \"0.3\"\n"
836 cargo_file = tmp_path / "Cargo.toml"
837 cargo_file.write_text(cargo_content)
838
839 store = _make_store()
840 stats = DependencyIngester(store).ingest_cargo(str(cargo_file))
841 assert stats["packages"] == 1
842
843
844 # =============================================================================
845 # #61 — SubmoduleIngester
846 # =============================================================================
847
848
849 class TestDetectSubmodules:
850 """detect_submodules parses .gitmodules into structured dicts."""
851
852 def test_no_gitmodules_returns_empty(self, tmp_path):
853 from navegador.submodules import SubmoduleIngester
854
855 result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
856 assert result == []
857
858 def test_single_submodule(self, tmp_path):
859 from navegador.submodules import SubmoduleIngester
860
861 gitmodules = tmp_path / ".gitmodules"
862 gitmodules.write_text(
863 '[submodule "vendor/lib"]\n'
864 " path = vendor/lib\n"
865 " url = https://github.com/org/lib.git\n"
866 )
867
868 result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
869 assert len(result) == 1
870 assert result[0]["name"] == "vendor/lib"
871 assert result[0]["path"] == "vendor/lib"
872 assert result[0]["url"] == "https://github.com/org/lib.git"
873 assert result[0]["abs_path"] == str(tmp_path / "vendor/lib")
874
875 def test_multiple_submodules(self, tmp_path):
876 from navegador.submodules import SubmoduleIngester
877
878 gitmodules = tmp_path / ".gitmodules"
879 gitmodules.write_text(
880 '[submodule "a"]\n path = sub/a\n url = https://example.com/a.git\n'
881 '[submodule "b"]\n path = sub/b\n url = https://example.com/b.git\n'
882 )
883
884 result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
885 assert len(result) == 2
886 names = {r["name"] for r in result}
887 assert names == {"a", "b"}
888
889 def test_missing_url_returns_empty_string(self, tmp_path):
890 from navegador.submodules import SubmoduleIngester
891
892 gitmodules = tmp_path / ".gitmodules"
893 gitmodules.write_text('[submodule "x"]\n path = sub/x\n')
894
895 result = SubmoduleIngester(_make_store()).detect_submodules(tmp_path)
896 assert result[0]["url"] == ""
897
898
899 class TestIngestWithSubmodules:
900 """ingest_with_submodules ingests parent + submodules, creates DEPENDS_ON edges."""
901
902 def test_no_gitmodules_ingests_parent_only(self, tmp_path):
903 from navegador.submodules import SubmoduleIngester
904
905 store = _make_store()
906 ing = SubmoduleIngester(store)
907
908 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
909 mock_inst = MagicMock()
910 mock_inst.ingest.return_value = {"files": 5, "nodes": 10}
911 MockIngester.return_value = mock_inst
912
913 stats = ing.ingest_with_submodules(str(tmp_path))
914
915 assert stats["parent"]["files"] == 5
916 assert stats["submodules"] == {}
917 assert stats["total_files"] == 5
918
919 def test_missing_submodule_path_recorded_as_error(self, tmp_path):
920 from navegador.submodules import SubmoduleIngester
921
922 gitmodules = tmp_path / ".gitmodules"
923 gitmodules.write_text(
924 '[submodule "missing"]\n path = does/not/exist\n url = https://x.com/r.git\n'
925 )
926
927 store = _make_store()
928 ing = SubmoduleIngester(store)
929
930 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
931 mock_inst = MagicMock()
932 mock_inst.ingest.return_value = {"files": 3, "nodes": 6}
933 MockIngester.return_value = mock_inst
934
935 stats = ing.ingest_with_submodules(str(tmp_path))
936
937 assert "missing" in stats["submodules"]
938 assert "error" in stats["submodules"]["missing"]
939
940 def test_existing_submodule_ingested(self, tmp_path):
941 from navegador.submodules import SubmoduleIngester
942
943 sub_dir = tmp_path / "libs" / "core"
944 sub_dir.mkdir(parents=True)
945
946 gitmodules = tmp_path / ".gitmodules"
947 gitmodules.write_text(
948 '[submodule "core"]\n path = libs/core\n url = https://x.com/core.git\n'
949 )
950
951 store = _make_store()
952 ing = SubmoduleIngester(store)
953
954 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
955 mock_inst = MagicMock()
956 mock_inst.ingest.return_value = {"files": 4, "nodes": 8}
957 MockIngester.return_value = mock_inst
958
959 stats = ing.ingest_with_submodules(str(tmp_path))
960
961 assert "core" in stats["submodules"]
962 assert stats["submodules"]["core"]["files"] == 4
963 assert stats["total_files"] == 8 # parent 4 + submodule 4
964
965 # DEPENDS_ON edge from parent → submodule
966 store.create_edge.assert_called()
967
968
969 # =============================================================================
970 # #62 — WorkspaceMode + WorkspaceManager
971 # =============================================================================
972
973
974 class TestWorkspaceMode:
975 """WorkspaceMode enum has UNIFIED and FEDERATED values."""
976
977 def test_has_unified(self):
978 from navegador.multirepo import WorkspaceMode
979
980 assert WorkspaceMode.UNIFIED == "unified"
981
982 def test_has_federated(self):
983 from navegador.multirepo import WorkspaceMode
984
985 assert WorkspaceMode.FEDERATED == "federated"
986
987 def test_is_str_enum(self):
988 from navegador.multirepo import WorkspaceMode
989
990 assert isinstance(WorkspaceMode.UNIFIED, str)
991 assert isinstance(WorkspaceMode.FEDERATED, str)
992
993 def test_from_string(self):
994 from navegador.multirepo import WorkspaceMode
995
996 assert WorkspaceMode("unified") == WorkspaceMode.UNIFIED
997 assert WorkspaceMode("federated") == WorkspaceMode.FEDERATED
998
999
1000 class TestWorkspaceManagerUnified:
1001 """WorkspaceManager in UNIFIED mode uses a single shared graph."""
1002
1003 def test_add_repo_creates_repository_node(self, tmp_path):
1004 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1005
1006 store = _make_store()
1007 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1008 wm.add_repo("backend", str(tmp_path))
1009
1010 store.create_node.assert_called()
1011
1012 def test_list_repos(self, tmp_path):
1013 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1014
1015 store = _make_store()
1016 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1017 wm.add_repo("backend", str(tmp_path))
1018 wm.add_repo("frontend", str(tmp_path))
1019
1020 repos = wm.list_repos()
1021 names = {r["name"] for r in repos}
1022 assert names == {"backend", "frontend"}
1023
1024 def test_ingest_all_calls_repo_ingester(self, tmp_path):
1025 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1026
1027 store = _make_store()
1028 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1029 wm.add_repo("repo1", str(tmp_path))
1030
1031 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1032 mock_inst = MagicMock()
1033 mock_inst.ingest.return_value = {"files": 2, "nodes": 5}
1034 MockIngester.return_value = mock_inst
1035
1036 summary = wm.ingest_all()
1037
1038 assert "repo1" in summary
1039 assert summary["repo1"]["files"] == 2
1040
1041 def test_ingest_all_no_repos_returns_empty(self):
1042 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1043
1044 wm = WorkspaceManager(_make_store(), mode=WorkspaceMode.UNIFIED)
1045 assert wm.ingest_all() == {}
1046
1047 def test_search_unified_queries_single_store(self):
1048 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1049
1050 store = _make_store()
1051 store.query.return_value = MagicMock(
1052 result_set=[["Function", "authenticate", "/src/auth.py"]]
1053 )
1054 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1055 wm.add_repo("repo", "/tmp/repo")
1056
1057 results = wm.search("authenticate")
1058 assert len(results) >= 1
1059 assert results[0]["name"] == "authenticate"
1060
1061 def test_ingest_error_recorded_in_summary(self, tmp_path):
1062 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1063
1064 store = _make_store()
1065 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1066 wm.add_repo("broken", str(tmp_path))
1067
1068 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1069 MockIngester.return_value.ingest.side_effect = RuntimeError("parse error")
1070 summary = wm.ingest_all()
1071
1072 assert "broken" in summary
1073 assert "error" in summary["broken"]
1074
1075
1076 class TestWorkspaceManagerFederated:
1077 """WorkspaceManager in FEDERATED mode creates per-repo graphs."""
1078
1079 def test_add_repo_sets_federated_graph_name(self, tmp_path):
1080 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1081
1082 store = _make_store()
1083 wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1084 wm.add_repo("api", str(tmp_path))
1085
1086 repos = wm.list_repos()
1087 assert repos[0]["graph_name"] == "navegador_api"
1088
1089 def test_unified_graph_name_is_navegador(self, tmp_path):
1090 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1091
1092 store = _make_store()
1093 wm = WorkspaceManager(store, mode=WorkspaceMode.UNIFIED)
1094 wm.add_repo("api", str(tmp_path))
1095
1096 repos = wm.list_repos()
1097 assert repos[0]["graph_name"] == "navegador"
1098
1099 def test_federated_ingest_uses_per_repo_store(self, tmp_path):
1100 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1101
1102 store = _make_store()
1103 # select_graph returns a different mock each time
1104 store._client.select_graph.return_value = MagicMock()
1105
1106 wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1107 wm.add_repo("svc", str(tmp_path))
1108
1109 with patch("navegador.ingestion.parser.RepoIngester") as MockIngester:
1110 mock_inst = MagicMock()
1111 mock_inst.ingest.return_value = {"files": 1, "nodes": 3}
1112 MockIngester.return_value = mock_inst
1113
1114 summary = wm.ingest_all()
1115
1116 assert "svc" in summary
1117 # select_graph should have been called with "navegador_svc"
1118 called_graphs = [
1119 c.args[0] for c in store._client.select_graph.call_args_list
1120 ]
1121 assert any("navegador_svc" in g for g in called_graphs)
1122
1123 def test_federated_search_merges_results(self):
1124 from navegador.multirepo import WorkspaceManager, WorkspaceMode
1125
1126 store = _make_store()
1127
1128 # Each per-repo graph returns a result
1129 per_repo_store_mock = MagicMock()
1130 per_repo_store_mock.query.return_value = MagicMock(
1131 result_set=[["Function", "auth_check", "/src/auth.py"]]
1132 )
1133 store._client.select_graph.return_value = per_repo_store_mock
1134
1135 wm = WorkspaceManager(store, mode=WorkspaceMode.FEDERATED)
1136 wm._repos = {
1137 "backend": {"path": "/tmp/backend", "graph_name": "navegador_backend"},
1138 "frontend": {"path": "/tmp/frontend", "graph_name": "navegador_frontend"},
1139 }
1140
1141 results = wm.search("auth")
1142 # Two repos each return one result → 2 total (deduplicated to 1 because same name)
1143 assert len(results) >= 1
1144
1145
1146 # =============================================================================
1147 # CLI smoke tests
1148 # =============================================================================
1149
1150
1151 class TestCLIPMGroup:
1152 """pm group is registered on the main CLI."""
1153
1154 def test_pm_group_exists(self):
1155 from click.testing import CliRunner
1156
1157 from navegador.cli.commands import main
1158
1159 runner = CliRunner()
1160 result = runner.invoke(main, ["pm", "--help"])
1161 assert result.exit_code == 0
1162 assert "ingest" in result.output
1163
1164 def test_pm_ingest_requires_github(self):
1165 from click.testing import CliRunner
1166
1167 from navegador.cli.commands import main
1168
1169 runner = CliRunner()
1170 result = runner.invoke(main, ["pm", "ingest"])
1171 assert result.exit_code != 0
1172
1173
1174 class TestCLIDepsGroup:
1175 """deps group is registered on the main CLI."""
1176
1177 def test_deps_group_exists(self):
1178 from click.testing import CliRunner
1179
1180 from navegador.cli.commands import main
1181
1182 runner = CliRunner()
1183 result = runner.invoke(main, ["deps", "--help"])
1184 assert result.exit_code == 0
1185 assert "ingest" in result.output
1186
1187
1188 class TestCLISubmodulesGroup:
1189 """submodules group is registered on the main CLI."""
1190
1191 def test_submodules_group_exists(self):
1192 from click.testing import CliRunner
1193
1194 from navegador.cli.commands import main
1195
1196 runner = CliRunner()
1197 result = runner.invoke(main, ["submodules", "--help"])
1198 assert result.exit_code == 0
1199
1200 def test_submodules_list_empty(self, tmp_path):
1201 from click.testing import CliRunner
1202
1203 from navegador.cli.commands import main
1204
1205 runner = CliRunner()
1206 result = runner.invoke(main, ["submodules", "list", str(tmp_path)])
1207 assert result.exit_code == 0
1208 assert "No submodules" in result.output
1209
1210
1211 class TestCLIWorkspaceGroup:
1212 """workspace group is registered on the main CLI."""
1213
1214 def test_workspace_group_exists(self):
1215 from click.testing import CliRunner
1216
1217 from navegador.cli.commands import main
1218
1219 runner = CliRunner()
1220 result = runner.invoke(main, ["workspace", "--help"])
1221 assert result.exit_code == 0
1222 assert "ingest" in result.output
1223
1224 def test_workspace_ingest_requires_repos(self, tmp_path):
1225 from click.testing import CliRunner
1226
1227 from navegador.cli.commands import main
1228
1229 runner = CliRunner()
1230 result = runner.invoke(
1231 main,
1232 ["workspace", "ingest", "--db", str(tmp_path / "g.db")],
1233 )
1234 assert result.exit_code != 0
+46 -17
--- tests/test_vcs.py
+++ tests/test_vcs.py
@@ -249,33 +249,62 @@
249249
250250
def test_false_for_git_repo(self, git_repo: Path):
251251
assert FossilAdapter(git_repo).is_repo() is False
252252
253253
254
-# ── FossilAdapter stubs raise NotImplementedError ─────────────────────────────
254
+# ── FossilAdapter implemented methods (#55) ────────────────────────────────────
255
+#
256
+# These methods are now fully implemented; they call `fossil` via subprocess.
257
+# Since fossil may not be installed in CI, we mock subprocess.run.
255258
256259
257
-class TestFossilAdapterStubs:
260
+class TestFossilAdapterImplemented:
261
+ """FossilAdapter methods are implemented — they call fossil via subprocess."""
262
+
258263
@pytest.fixture()
259264
def adapter(self, fossil_dir: Path) -> FossilAdapter:
260265
return FossilAdapter(fossil_dir)
261266
262
- def test_current_branch_raises(self, adapter: FossilAdapter):
263
- with pytest.raises(NotImplementedError, match="current_branch"):
264
- adapter.current_branch()
265
-
266
- def test_changed_files_raises(self, adapter: FossilAdapter):
267
- with pytest.raises(NotImplementedError, match="changed_files"):
268
- adapter.changed_files()
269
-
270
- def test_file_history_raises(self, adapter: FossilAdapter):
271
- with pytest.raises(NotImplementedError, match="file_history"):
272
- adapter.file_history("README.md")
273
-
274
- def test_blame_raises(self, adapter: FossilAdapter):
275
- with pytest.raises(NotImplementedError, match="blame"):
276
- adapter.blame("README.md")
267
+ def test_current_branch_returns_string(self, adapter: FossilAdapter):
268
+ from unittest.mock import MagicMock, patch
269
+
270
+ mock_result = MagicMock()
271
+ mock_result.stdout = "trunk\n"
272
+ with patch("subprocess.run", return_value=mock_result):
273
+ branch = adapter.current_branch()
274
+ assert branch == "trunk"
275
+
276
+ def test_changed_files_returns_list(self, adapter: FossilAdapter):
277
+ from unittest.mock import MagicMock, patch
278
+
279
+ mock_result = MagicMock()
280
+ mock_result.stdout = "EDITED src/main.py\n"
281
+ with patch("subprocess.run", return_value=mock_result):
282
+ files = adapter.changed_files()
283
+ assert isinstance(files, list)
284
+ assert "src/main.py" in files
285
+
286
+ def test_file_history_returns_list(self, adapter: FossilAdapter):
287
+ from unittest.mock import MagicMock, patch
288
+
289
+ mock_result = MagicMock()
290
+ mock_result.stdout = (
291
+ "=== 2024-01-15 ===\n"
292
+ "14:23:07 [abc123] Fix bug. (user: alice, tags: trunk)\n"
293
+ )
294
+ with patch("subprocess.run", return_value=mock_result):
295
+ history = adapter.file_history("README.md")
296
+ assert isinstance(history, list)
297
+
298
+ def test_blame_returns_list(self, adapter: FossilAdapter):
299
+ from unittest.mock import MagicMock, patch
300
+
301
+ mock_result = MagicMock()
302
+ mock_result.stdout = "1.1 alice 2024-01-15: # line content\n"
303
+ with patch("subprocess.run", return_value=mock_result):
304
+ result = adapter.blame("README.md")
305
+ assert isinstance(result, list)
277306
278307
279308
# ── detect_vcs factory ─────────────────────────────────────────────────────────
280309
281310
282311
--- tests/test_vcs.py
+++ tests/test_vcs.py
@@ -249,33 +249,62 @@
249
250 def test_false_for_git_repo(self, git_repo: Path):
251 assert FossilAdapter(git_repo).is_repo() is False
252
253
254 # ── FossilAdapter stubs raise NotImplementedError ─────────────────────────────
 
 
 
255
256
257 class TestFossilAdapterStubs:
 
 
258 @pytest.fixture()
259 def adapter(self, fossil_dir: Path) -> FossilAdapter:
260 return FossilAdapter(fossil_dir)
261
262 def test_current_branch_raises(self, adapter: FossilAdapter):
263 with pytest.raises(NotImplementedError, match="current_branch"):
264 adapter.current_branch()
265
266 def test_changed_files_raises(self, adapter: FossilAdapter):
267 with pytest.raises(NotImplementedError, match="changed_files"):
268 adapter.changed_files()
269
270 def test_file_history_raises(self, adapter: FossilAdapter):
271 with pytest.raises(NotImplementedError, match="file_history"):
272 adapter.file_history("README.md")
273
274 def test_blame_raises(self, adapter: FossilAdapter):
275 with pytest.raises(NotImplementedError, match="blame"):
276 adapter.blame("README.md")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
277
278
279 # ── detect_vcs factory ─────────────────────────────────────────────────────────
280
281
282
--- tests/test_vcs.py
+++ tests/test_vcs.py
@@ -249,33 +249,62 @@
249
250 def test_false_for_git_repo(self, git_repo: Path):
251 assert FossilAdapter(git_repo).is_repo() is False
252
253
254 # ── FossilAdapter implemented methods (#55) ────────────────────────────────────
255 #
256 # These methods are now fully implemented; they call `fossil` via subprocess.
257 # Since fossil may not be installed in CI, we mock subprocess.run.
258
259
260 class TestFossilAdapterImplemented:
261 """FossilAdapter methods are implemented — they call fossil via subprocess."""
262
263 @pytest.fixture()
264 def adapter(self, fossil_dir: Path) -> FossilAdapter:
265 return FossilAdapter(fossil_dir)
266
267 def test_current_branch_returns_string(self, adapter: FossilAdapter):
268 from unittest.mock import MagicMock, patch
269
270 mock_result = MagicMock()
271 mock_result.stdout = "trunk\n"
272 with patch("subprocess.run", return_value=mock_result):
273 branch = adapter.current_branch()
274 assert branch == "trunk"
275
276 def test_changed_files_returns_list(self, adapter: FossilAdapter):
277 from unittest.mock import MagicMock, patch
278
279 mock_result = MagicMock()
280 mock_result.stdout = "EDITED src/main.py\n"
281 with patch("subprocess.run", return_value=mock_result):
282 files = adapter.changed_files()
283 assert isinstance(files, list)
284 assert "src/main.py" in files
285
286 def test_file_history_returns_list(self, adapter: FossilAdapter):
287 from unittest.mock import MagicMock, patch
288
289 mock_result = MagicMock()
290 mock_result.stdout = (
291 "=== 2024-01-15 ===\n"
292 "14:23:07 [abc123] Fix bug. (user: alice, tags: trunk)\n"
293 )
294 with patch("subprocess.run", return_value=mock_result):
295 history = adapter.file_history("README.md")
296 assert isinstance(history, list)
297
298 def test_blame_returns_list(self, adapter: FossilAdapter):
299 from unittest.mock import MagicMock, patch
300
301 mock_result = MagicMock()
302 mock_result.stdout = "1.1 alice 2024-01-15: # line content\n"
303 with patch("subprocess.run", return_value=mock_result):
304 result = adapter.blame("README.md")
305 assert isinstance(result, list)
306
307
308 # ── detect_vcs factory ─────────────────────────────────────────────────────────
309
310
311

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button