Navegador

feat: intelligence layer — semantic search, community detection, NLP queries, doc generation SemanticSearch: embedding-based similarity search. CommunityDetector: label propagation over heterogeneous graph. NLPEngine: natural language → Cypher, community naming, doc generation. DocGenerator: template and LLM-powered documentation. Closes #2, closes #5, closes #17, closes #19

lmata 2026-03-23 05:39 trunk
Commit 5d70f82f28a5c80abdeae338986d1ec1545a24e8291da71acc0db3348b9405fb
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1997,5 +1997,281 @@
19971997
console.print(
19981998
f"[green]{repo_name}[/green]: "
19991999
f"{repo_stats.get('files', 0)} files, "
20002000
f"{repo_stats.get('nodes', 0)} nodes"
20012001
)
2002
+
2003
+
2004
+# ── Intelligence: semantic search ─────────────────────────────────────────────
2005
+
2006
+
2007
+@main.command("semantic-search")
2008
+@click.argument("query")
2009
+@DB_OPTION
2010
+@click.option("--limit", default=10, show_default=True, help="Maximum results to return.")
2011
+@click.option(
2012
+ "--index",
2013
+ "do_index",
2014
+ is_flag=True,
2015
+ help="(Re-)build the embedding index before searching.",
2016
+)
2017
+@click.option(
2018
+ "--provider",
2019
+ "llm_provider",
2020
+ default="",
2021
+ help="LLM provider to use (anthropic, openai, ollama). Auto-detected if omitted.",
2022
+)
2023
+@click.option("--model", "llm_model", default="", help="LLM model name.")
2024
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
2025
+def semantic_search(
2026
+ query: str,
2027
+ db: str,
2028
+ limit: int,
2029
+ do_index: bool,
2030
+ llm_provider: str,
2031
+ llm_model: str,
2032
+ as_json: bool,
2033
+):
2034
+ """Semantic similarity search using embeddings.
2035
+
2036
+ Embeds QUERY and returns the most similar symbols from the graph.
2037
+ Use --index to (re-)build the embedding index before searching.
2038
+
2039
+ \b
2040
+ Examples:
2041
+ navegador semantic-search "validates JWT tokens"
2042
+ navegador semantic-search "database connection" --index --provider openai
2043
+ """
2044
+ from navegador.intelligence.search import SemanticSearch
2045
+ from navegador.llm import auto_provider, get_provider
2046
+
2047
+ store = _get_store(db)
2048
+ provider = (
2049
+ get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2050
+ )
2051
+ ss = SemanticSearch(store, provider)
2052
+
2053
+ if do_index:
2054
+ n = ss.index()
2055
+ if not as_json:
2056
+ console.print(f"[green]Indexed[/green] {n} nodes.")
2057
+
2058
+ results = ss.search(query, limit=limit)
2059
+
2060
+ if as_json:
2061
+ click.echo(json.dumps(results, indent=2))
2062
+ return
2063
+
2064
+ if not results:
2065
+ console.print("[yellow]No results found. Try --index to build the index first.[/yellow]")
2066
+ return
2067
+
2068
+ table = Table(title=f"Semantic search: {query!r}")
2069
+ table.add_column("Score", style="cyan", justify="right")
2070
+ table.add_column("Type", style="yellow")
2071
+ table.add_column("Name", style="bold")
2072
+ table.add_column("File", style="dim")
2073
+ for r in results:
2074
+ table.add_row(
2075
+ f"{r['score']:.3f}",
2076
+ r.get("type", ""),
2077
+ r.get("name", ""),
2078
+ r.get("file_path", ""),
2079
+ )
2080
+ console.print(table)
2081
+
2082
+
2083
+# ── Intelligence: community detection ─────────────────────────────────────────
2084
+
2085
+
2086
+@main.command("communities")
2087
+@DB_OPTION
2088
+@click.option("--min-size", default=2, show_default=True, help="Minimum community size.")
2089
+@click.option("--store-labels", is_flag=True, help="Write community labels back onto nodes.")
2090
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
2091
+def communities(db: str, min_size: int, store_labels: bool, as_json: bool):
2092
+ """Detect communities in the graph via label propagation.
2093
+
2094
+ \b
2095
+ Examples:
2096
+ navegador communities
2097
+ navegador communities --min-size 3 --store-labels
2098
+ """
2099
+ from navegador.intelligence.community import CommunityDetector
2100
+
2101
+ store = _get_store(db)
2102
+ detector = CommunityDetector(store)
2103
+ detected = detector.detect(min_size=min_size)
2104
+
2105
+ if store_labels:
2106
+ n = detector.store_communities()
2107
+ if not as_json:
2108
+ console.print(f"[green]Community labels written to[/green] {n} nodes.")
2109
+
2110
+ if as_json:
2111
+ click.echo(
2112
+ json.dumps(
2113
+ [
2114
+ {
2115
+ "name": c.name,
2116
+ "members": c.members,
2117
+ "size": c.size,
2118
+ "density": c.density,
2119
+ }
2120
+ for c in detected
2121
+ ],
2122
+ indent=2,
2123
+ )
2124
+ )
2125
+ return
2126
+
2127
+ if not detected:
2128
+ console.print("[yellow]No communities found (graph may be empty).[/yellow]")
2129
+ return
2130
+
2131
+ table = Table(title=f"Communities (min_size={min_size})")
2132
+ table.add_column("Community", style="cyan")
2133
+ table.add_column("Size", justify="right", style="green")
2134
+ table.add_column("Density", justify="right", style="yellow")
2135
+ table.add_column("Members (preview)", style="dim")
2136
+ for c in detected:
2137
+ preview = ", ".join(c.members[:5])
2138
+ if c.size > 5:
2139
+ preview += f" …+{c.size - 5}"
2140
+ table.add_row(c.name, str(c.size), f"{c.density:.3f}", preview)
2141
+ console.print(table)
2142
+
2143
+
2144
+# ── Intelligence: natural language query ──────────────────────────────────────
2145
+
2146
+
2147
+@main.command("ask")
2148
+@click.argument("question")
2149
+@DB_OPTION
2150
+@click.option(
2151
+ "--provider",
2152
+ "llm_provider",
2153
+ default="",
2154
+ help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2155
+)
2156
+@click.option("--model", "llm_model", default="", help="LLM model name.")
2157
+def ask(question: str, db: str, llm_provider: str, llm_model: str):
2158
+ """Ask a natural-language question about the codebase.
2159
+
2160
+ Converts the question to Cypher, executes it, and returns a
2161
+ human-readable answer.
2162
+
2163
+ \b
2164
+ Examples:
2165
+ navegador ask "Which functions call authenticate_user?"
2166
+ navegador ask "What concepts are in the auth domain?"
2167
+ """
2168
+ from navegador.intelligence.nlp import NLPEngine
2169
+ from navegador.llm import auto_provider, get_provider
2170
+
2171
+ store = _get_store(db)
2172
+ provider = (
2173
+ get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2174
+ )
2175
+ engine = NLPEngine(store, provider)
2176
+
2177
+ with console.status("[bold]Thinking...[/bold]"):
2178
+ answer = engine.natural_query(question)
2179
+
2180
+ console.print(answer)
2181
+
2182
+
2183
+# ── Intelligence: generate docs ───────────────────────────────────────────────
2184
+
2185
+
2186
+@main.command("generate-docs")
2187
+@click.argument("name")
2188
+@DB_OPTION
2189
+@click.option(
2190
+ "--provider",
2191
+ "llm_provider",
2192
+ default="",
2193
+ help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2194
+)
2195
+@click.option("--model", "llm_model", default="", help="LLM model name.")
2196
+@click.option("--file", "file_path", default="", help="Narrow to a specific file.")
2197
+def generate_docs_cmd(
2198
+ name: str, db: str, llm_provider: str, llm_model: str, file_path: str
2199
+):
2200
+ """Generate LLM-powered documentation for a named symbol.
2201
+
2202
+ \b
2203
+ Examples:
2204
+ navegador generate-docs authenticate_user
2205
+ navegador generate-docs GraphStore --file navegador/graph/store.py
2206
+ """
2207
+ from navegador.intelligence.nlp import NLPEngine
2208
+ from navegador.llm import auto_provider, get_provider
2209
+
2210
+ store = _get_store(db)
2211
+ provider = (
2212
+ get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2213
+ )
2214
+ engine = NLPEngine(store, provider)
2215
+
2216
+ with console.status("[bold]Generating docs...[/bold]"):
2217
+ docs = engine.generate_docs(name, file_path=file_path)
2218
+
2219
+ console.print(docs)
2220
+
2221
+
2222
+# ── Intelligence: docs (template + LLM) ──────────────────────────────────────
2223
+
2224
+
2225
+@main.command("docs")
2226
+@click.argument("target")
2227
+@DB_OPTION
2228
+@click.option("--project", is_flag=True, help="Generate full project documentation.")
2229
+@click.option(
2230
+ "--provider",
2231
+ "llm_provider",
2232
+ default="",
2233
+ help="LLM provider (anthropic, openai, ollama). Template mode if omitted.",
2234
+)
2235
+@click.option("--model", "llm_model", default="", help="LLM model name.")
2236
+@click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).")
2237
+def docs(
2238
+ target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool
2239
+):
2240
+ """Generate markdown documentation from the graph.
2241
+
2242
+ TARGET can be a file path or a module name (dotted or partial).
2243
+ Use --project to generate full project docs instead.
2244
+
2245
+ \b
2246
+ Examples:
2247
+ navegador docs navegador/graph/store.py
2248
+ navegador docs navegador.graph
2249
+ navegador docs . --project
2250
+ navegador docs . --project --provider openai
2251
+ """
2252
+ from navegador.intelligence.docgen import DocGenerator
2253
+
2254
+ store = _get_store(db)
2255
+
2256
+ provider = None
2257
+ if llm_provider:
2258
+ from navegador.llm import get_provider
2259
+
2260
+ provider = get_provider(llm_provider, model=llm_model)
2261
+
2262
+ gen = DocGenerator(store, provider=provider)
2263
+
2264
+ if project:
2265
+ with console.status("[bold]Generating project docs...[/bold]"):
2266
+ output = gen.generate_project_docs()
2267
+ elif "/" in target or target.endswith(".py"):
2268
+ with console.status(f"[bold]Generating docs for file[/bold] {target}..."):
2269
+ output = gen.generate_file_docs(target)
2270
+ else:
2271
+ with console.status(f"[bold]Generating docs for module[/bold] {target}..."):
2272
+ output = gen.generate_module_docs(target)
2273
+
2274
+ if as_json:
2275
+ click.echo(json.dumps({"docs": output}, indent=2))
2276
+ else:
2277
+ console.print(output)
20022278
20032279
ADDED navegador/intelligence/__init__.py
20042280
ADDED navegador/intelligence/community.py
20052281
ADDED navegador/intelligence/docgen.py
20062282
ADDED navegador/intelligence/nlp.py
20072283
ADDED navegador/intelligence/search.py
20082284
ADDED tests/test_intelligence.py
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1997,5 +1997,281 @@
1997 console.print(
1998 f"[green]{repo_name}[/green]: "
1999 f"{repo_stats.get('files', 0)} files, "
2000 f"{repo_stats.get('nodes', 0)} nodes"
2001 )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2002
2003 DDED navegador/intelligence/__init__.py
2004 DDED navegador/intelligence/community.py
2005 DDED navegador/intelligence/docgen.py
2006 DDED navegador/intelligence/nlp.py
2007 DDED navegador/intelligence/search.py
2008 DDED tests/test_intelligence.py
--- navegador/cli/commands.py
+++ navegador/cli/commands.py
@@ -1997,5 +1997,281 @@
1997 console.print(
1998 f"[green]{repo_name}[/green]: "
1999 f"{repo_stats.get('files', 0)} files, "
2000 f"{repo_stats.get('nodes', 0)} nodes"
2001 )
2002
2003
2004 # ── Intelligence: semantic search ─────────────────────────────────────────────
2005
2006
2007 @main.command("semantic-search")
2008 @click.argument("query")
2009 @DB_OPTION
2010 @click.option("--limit", default=10, show_default=True, help="Maximum results to return.")
2011 @click.option(
2012 "--index",
2013 "do_index",
2014 is_flag=True,
2015 help="(Re-)build the embedding index before searching.",
2016 )
2017 @click.option(
2018 "--provider",
2019 "llm_provider",
2020 default="",
2021 help="LLM provider to use (anthropic, openai, ollama). Auto-detected if omitted.",
2022 )
2023 @click.option("--model", "llm_model", default="", help="LLM model name.")
2024 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
2025 def semantic_search(
2026 query: str,
2027 db: str,
2028 limit: int,
2029 do_index: bool,
2030 llm_provider: str,
2031 llm_model: str,
2032 as_json: bool,
2033 ):
2034 """Semantic similarity search using embeddings.
2035
2036 Embeds QUERY and returns the most similar symbols from the graph.
2037 Use --index to (re-)build the embedding index before searching.
2038
2039 \b
2040 Examples:
2041 navegador semantic-search "validates JWT tokens"
2042 navegador semantic-search "database connection" --index --provider openai
2043 """
2044 from navegador.intelligence.search import SemanticSearch
2045 from navegador.llm import auto_provider, get_provider
2046
2047 store = _get_store(db)
2048 provider = (
2049 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2050 )
2051 ss = SemanticSearch(store, provider)
2052
2053 if do_index:
2054 n = ss.index()
2055 if not as_json:
2056 console.print(f"[green]Indexed[/green] {n} nodes.")
2057
2058 results = ss.search(query, limit=limit)
2059
2060 if as_json:
2061 click.echo(json.dumps(results, indent=2))
2062 return
2063
2064 if not results:
2065 console.print("[yellow]No results found. Try --index to build the index first.[/yellow]")
2066 return
2067
2068 table = Table(title=f"Semantic search: {query!r}")
2069 table.add_column("Score", style="cyan", justify="right")
2070 table.add_column("Type", style="yellow")
2071 table.add_column("Name", style="bold")
2072 table.add_column("File", style="dim")
2073 for r in results:
2074 table.add_row(
2075 f"{r['score']:.3f}",
2076 r.get("type", ""),
2077 r.get("name", ""),
2078 r.get("file_path", ""),
2079 )
2080 console.print(table)
2081
2082
2083 # ── Intelligence: community detection ─────────────────────────────────────────
2084
2085
2086 @main.command("communities")
2087 @DB_OPTION
2088 @click.option("--min-size", default=2, show_default=True, help="Minimum community size.")
2089 @click.option("--store-labels", is_flag=True, help="Write community labels back onto nodes.")
2090 @click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
2091 def communities(db: str, min_size: int, store_labels: bool, as_json: bool):
2092 """Detect communities in the graph via label propagation.
2093
2094 \b
2095 Examples:
2096 navegador communities
2097 navegador communities --min-size 3 --store-labels
2098 """
2099 from navegador.intelligence.community import CommunityDetector
2100
2101 store = _get_store(db)
2102 detector = CommunityDetector(store)
2103 detected = detector.detect(min_size=min_size)
2104
2105 if store_labels:
2106 n = detector.store_communities()
2107 if not as_json:
2108 console.print(f"[green]Community labels written to[/green] {n} nodes.")
2109
2110 if as_json:
2111 click.echo(
2112 json.dumps(
2113 [
2114 {
2115 "name": c.name,
2116 "members": c.members,
2117 "size": c.size,
2118 "density": c.density,
2119 }
2120 for c in detected
2121 ],
2122 indent=2,
2123 )
2124 )
2125 return
2126
2127 if not detected:
2128 console.print("[yellow]No communities found (graph may be empty).[/yellow]")
2129 return
2130
2131 table = Table(title=f"Communities (min_size={min_size})")
2132 table.add_column("Community", style="cyan")
2133 table.add_column("Size", justify="right", style="green")
2134 table.add_column("Density", justify="right", style="yellow")
2135 table.add_column("Members (preview)", style="dim")
2136 for c in detected:
2137 preview = ", ".join(c.members[:5])
2138 if c.size > 5:
2139 preview += f" …+{c.size - 5}"
2140 table.add_row(c.name, str(c.size), f"{c.density:.3f}", preview)
2141 console.print(table)
2142
2143
2144 # ── Intelligence: natural language query ──────────────────────────────────────
2145
2146
2147 @main.command("ask")
2148 @click.argument("question")
2149 @DB_OPTION
2150 @click.option(
2151 "--provider",
2152 "llm_provider",
2153 default="",
2154 help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2155 )
2156 @click.option("--model", "llm_model", default="", help="LLM model name.")
2157 def ask(question: str, db: str, llm_provider: str, llm_model: str):
2158 """Ask a natural-language question about the codebase.
2159
2160 Converts the question to Cypher, executes it, and returns a
2161 human-readable answer.
2162
2163 \b
2164 Examples:
2165 navegador ask "Which functions call authenticate_user?"
2166 navegador ask "What concepts are in the auth domain?"
2167 """
2168 from navegador.intelligence.nlp import NLPEngine
2169 from navegador.llm import auto_provider, get_provider
2170
2171 store = _get_store(db)
2172 provider = (
2173 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2174 )
2175 engine = NLPEngine(store, provider)
2176
2177 with console.status("[bold]Thinking...[/bold]"):
2178 answer = engine.natural_query(question)
2179
2180 console.print(answer)
2181
2182
2183 # ── Intelligence: generate docs ───────────────────────────────────────────────
2184
2185
2186 @main.command("generate-docs")
2187 @click.argument("name")
2188 @DB_OPTION
2189 @click.option(
2190 "--provider",
2191 "llm_provider",
2192 default="",
2193 help="LLM provider (anthropic, openai, ollama). Auto-detected if omitted.",
2194 )
2195 @click.option("--model", "llm_model", default="", help="LLM model name.")
2196 @click.option("--file", "file_path", default="", help="Narrow to a specific file.")
2197 def generate_docs_cmd(
2198 name: str, db: str, llm_provider: str, llm_model: str, file_path: str
2199 ):
2200 """Generate LLM-powered documentation for a named symbol.
2201
2202 \b
2203 Examples:
2204 navegador generate-docs authenticate_user
2205 navegador generate-docs GraphStore --file navegador/graph/store.py
2206 """
2207 from navegador.intelligence.nlp import NLPEngine
2208 from navegador.llm import auto_provider, get_provider
2209
2210 store = _get_store(db)
2211 provider = (
2212 get_provider(llm_provider, model=llm_model) if llm_provider else auto_provider(model=llm_model)
2213 )
2214 engine = NLPEngine(store, provider)
2215
2216 with console.status("[bold]Generating docs...[/bold]"):
2217 docs = engine.generate_docs(name, file_path=file_path)
2218
2219 console.print(docs)
2220
2221
2222 # ── Intelligence: docs (template + LLM) ──────────────────────────────────────
2223
2224
2225 @main.command("docs")
2226 @click.argument("target")
2227 @DB_OPTION
2228 @click.option("--project", is_flag=True, help="Generate full project documentation.")
2229 @click.option(
2230 "--provider",
2231 "llm_provider",
2232 default="",
2233 help="LLM provider (anthropic, openai, ollama). Template mode if omitted.",
2234 )
2235 @click.option("--model", "llm_model", default="", help="LLM model name.")
2236 @click.option("--json", "as_json", is_flag=True, help="Output as JSON (wraps markdown in a dict).")
2237 def docs(
2238 target: str, db: str, project: bool, llm_provider: str, llm_model: str, as_json: bool
2239 ):
2240 """Generate markdown documentation from the graph.
2241
2242 TARGET can be a file path or a module name (dotted or partial).
2243 Use --project to generate full project docs instead.
2244
2245 \b
2246 Examples:
2247 navegador docs navegador/graph/store.py
2248 navegador docs navegador.graph
2249 navegador docs . --project
2250 navegador docs . --project --provider openai
2251 """
2252 from navegador.intelligence.docgen import DocGenerator
2253
2254 store = _get_store(db)
2255
2256 provider = None
2257 if llm_provider:
2258 from navegador.llm import get_provider
2259
2260 provider = get_provider(llm_provider, model=llm_model)
2261
2262 gen = DocGenerator(store, provider=provider)
2263
2264 if project:
2265 with console.status("[bold]Generating project docs...[/bold]"):
2266 output = gen.generate_project_docs()
2267 elif "/" in target or target.endswith(".py"):
2268 with console.status(f"[bold]Generating docs for file[/bold] {target}..."):
2269 output = gen.generate_file_docs(target)
2270 else:
2271 with console.status(f"[bold]Generating docs for module[/bold] {target}..."):
2272 output = gen.generate_module_docs(target)
2273
2274 if as_json:
2275 click.echo(json.dumps({"docs": output}, indent=2))
2276 else:
2277 console.print(output)
2278
2279 DDED navegador/intelligence/__init__.py
2280 DDED navegador/intelligence/community.py
2281 DDED navegador/intelligence/docgen.py
2282 DDED navegador/intelligence/nlp.py
2283 DDED navegador/intelligence/search.py
2284 DDED tests/test_intelligence.py
--- a/navegador/intelligence/__init__.py
+++ b/navegador/intelligence/__init__.py
@@ -0,0 +1,21 @@
1
+"""
2
+Intelligence layer — semantic search, community detection, NLP queries, and doc generation.
3
+
4
+ SemanticSearch — embedding-based similarity search over graph nodes
5
+ CommunityDetector — label-propagation community detection over the graph
6
+ NLPEngine — natural language queries, community naming, doc generation
7
+ DocGenerator — markdown documentation from graph context
8
+"""
9
+
10
+from navegador.intelligence.community import Community, CommunityDetector
11
+from navegador.intelligence.docgen import DocGenerator
12
+from navegador.intelligence.nlp import NLPEngine
13
+from navegador.intelligence.search import SemanticSearch
14
+
15
+__all__ = [
16
+ "SemanticSearch",
17
+ "CommunityDetector",
18
+ "Community",
19
+ "NLPEngine",
20
+ "DocGenerator",
21
+]
--- a/navegador/intelligence/__init__.py
+++ b/navegador/intelligence/__init__.py
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/intelligence/__init__.py
+++ b/navegador/intelligence/__init__.py
@@ -0,0 +1,21 @@
1 """
2 Intelligence layer — semantic search, community detection, NLP queries, and doc generation.
3
4 SemanticSearch — embedding-based similarity search over graph nodes
5 CommunityDetector — label-propagation community detection over the graph
6 NLPEngine — natural language queries, community naming, doc generation
7 DocGenerator — markdown documentation from graph context
8 """
9
10 from navegador.intelligence.community import Community, CommunityDetector
11 from navegador.intelligence.docgen import DocGenerator
12 from navegador.intelligence.nlp import NLPEngine
13 from navegador.intelligence.search import SemanticSearch
14
15 __all__ = [
16 "SemanticSearch",
17 "CommunityDetector",
18 "Community",
19 "NLPEngine",
20 "DocGenerator",
21 ]
--- a/navegador/intelligence/community.py
+++ b/navegador/intelligence/community.py
@@ -0,0 +1,191 @@
1
+"""
2
+CommunityDetector — label propagation community detection over the navegador graph.
3
+
4
+Implements a simple synchronous label-propagation algorithm:
5
+1. Each node starts as its own community (label = node id).
6
+2. On each iteration every node adopts the most common label among its
7
+ neighbours (ties broken by lowest label value).
8
+3. Repeat until stable or ``max_iter`` reached.
9
+4. Collect nodes sharing the same label into ``Community`` objects.
10
+
11
+Usage::
12
+
13
+ from navegador.graph import GraphStore
14
+ from navegador.intelligence.community import CommunityDetector
15
+
16
+ store = GraphStore.sqlite(".navegador/graph.db")
17
+ detector = CommunityDetector(store)
18
+ communities = detector.detect(min_size=2)
19
+ detector.store_communities()
20
+"""
21
+
22
+from __future__ import annotations
23
+
24
+from collections import Counter
25
+from dataclasses import dataclass, field
26
+from typing import TYPE_CHECKING, Any
27
+
28
+if TYPE_CHECKING:
29
+ from navegador.graph.store import GraphStore
30
+
31
+
32
+# ── Cypher helpers ────────────────────────────────────────────────────────────
33
+
34
+# Fetch all node ids and names (we use the internal FalkorDB node id via id(n))
35
+_ALL_NODES = """
36
+MATCH (n)
37
+WHERE n.name IS NOT NULL
38
+RETURN id(n) AS id, n.name AS name,
39
+ coalesce(n.file_path, '') AS file_path,
40
+ labels(n)[0] AS type
41
+"""
42
+
43
+# Fetch all edges as (from_id, to_id) — undirected for community purposes
44
+_ALL_EDGES = """
45
+MATCH (a)-[r]->(b)
46
+WHERE a.name IS NOT NULL AND b.name IS NOT NULL
47
+RETURN id(a) AS src, id(b) AS dst
48
+"""
49
+
50
+# Write the community label back onto a node
51
+_SET_COMMUNITY = """
52
+MATCH (n)
53
+WHERE id(n) = $node_id
54
+SET n.community = $community
55
+"""
56
+
57
+
58
+# ── Data model ────────────────────────────────────────────────────────────────
59
+
60
+
61
+@dataclass
62
+class Community:
63
+ """A detected community in the graph.
64
+
65
+ Attributes:
66
+ name: Auto-generated name (e.g. ``"community_3"``). Can be replaced
67
+ by :class:`~navegador.intelligence.nlp.NLPEngine.name_communities`.
68
+ members: Node names belonging to this community.
69
+ size: ``len(members)``.
70
+ density: Fraction of possible internal edges that actually exist
71
+ (0.0–1.0). Computed lazily; ``-1.0`` means not yet calculated.
72
+ """
73
+
74
+ name: str
75
+ members: list[str] = field(default_factory=list)
76
+ size: int = 0
77
+ density: float = -1.0
78
+
79
+ def __post_init__(self) -> None:
80
+ if self.size == 0:
81
+ self.size = len(self.members)
82
+
83
+
84
+# ── Detector ─────────────────────────────────────────────────────────────────
85
+
86
+
87
+class CommunityDetector:
88
+ """
89
+ Detect communities in the navegador graph via label propagation.
90
+
91
+ Args:
92
+ store: A :class:`~navegador.graph.GraphStore` instance.
93
+ """
94
+
95
+ def __init__(self, store: "GraphStore") -> None:
96
+ self._store = store
97
+ # Populated after detect()
98
+ self._communities: list[Community] = []
99
+ # id -> label mapping after propagation
100
+ self._labels: dict[int, int] = {}
101
+ # id -> name/meta mapping
102
+ self._nodes: dict[int, dict[str, Any]] = {}
103
+
104
+ # ── Public API ────────────────────────────────────────────────────────────
105
+
106
+ def detect(self, min_size: int = 2, max_iter: int = 50) -> list[Community]:
107
+ """
108
+ Run label-propagation and return communities with at least *min_size*
109
+ members.
110
+
111
+ Args:
112
+ min_size: Minimum community size to include in results.
113
+ max_iter: Maximum number of propagation iterations.
114
+
115
+ Returns:
116
+ List of :class:`Community` objects, sorted largest-first.
117
+ """
118
+ nodes, edges = self._load_graph()
119
+ if not nodes:
120
+ self._communities = []
121
+ return []
122
+
123
+ # Initialise: each node is its own community
124
+ labels: dict[int, int] = {nid: nid for nid in nodes}
125
+
126
+ # Build adjacency list (undirected)
127
+ adj: dict[int, list[int]] = {nid: [] for nid in nodes}
128
+ for src, dst in edges:
129
+ if src in adj and dst in adj:
130
+ adj[src].append(dst)
131
+ adj[dst].append(src)
132
+
133
+ # Propagation loop
134
+ for _ in range(max_iter):
135
+ changed = False
136
+ for nid in sorted(nodes): # deterministic order
137
+ neighbours = adj[nid]
138
+ if not neighbours:
139
+ continue
140
+ counts: Counter = Counter(labels[nb] for nb in neighbours if nb in labels)
141
+ if not counts:
142
+ continue
143
+ best_label = min(counts, key=lambda lbl: (-counts[lbl], lbl))
144
+ if best_label != labels[nid]:
145
+ labels[nid] = best_label
146
+ changed = True
147
+ if not changed:
148
+ break
149
+
150
+ self._labels = labels
151
+ self._nodes = nodes
152
+ self._communities = self._build_communities(labels, nodes, adj, min_size)
153
+ return self._communities
154
+
155
+ def store_communities(self) -> int:
156
+ """
157
+ Write community labels as a ``community`` property on all nodes.
158
+
159
+ Must call :meth:`detect` first.
160
+
161
+ Returns:
162
+ Number of nodes updated.
163
+ """
164
+ updated = 0
165
+ for node_id, label in self._labels.items():
166
+ # Find community name for this label
167
+ comm_name = f"community_{label}"
168
+ for c in self._communities:
169
+ if c.name == comm_name:
170
+ break
171
+ # Use numeric label as community identifier
172
+ self._store.query(_SET_COMMUNITY, {"node_id": node_id, "community": label})
173
+ updated += 1
174
+ return updated
175
+
176
+ # ── Internals �
177
+ �───────
178
+ for nb in adj.get(nid, [])
179
+ if n�──────────────────────────────────────────
180
+
181
+ def _load_graph(
182
+ self,
183
+ ) -> tuple[dict[int, dict[str, Any]], list[tuple[int, int]]]:
184
+ """Load all nodes and edges from the store."""
185
+ node_result = self._store.query(_ALL_NODES, {})
186
+ nodes: dict[int, dict[str, Any]] = {}
187
+ for row in node_result.result_set or []:
188
+ nid, name, file_path, node_type = row[0], row[1], row[2], row[3]
189
+ nodes[nid] = {"name": name, "file_path": file_path, "type": node_type}
190
+
191
+ edge_r
--- a/navegador/intelligence/community.py
+++ b/navegador/intelligence/community.py
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/intelligence/community.py
+++ b/navegador/intelligence/community.py
@@ -0,0 +1,191 @@
1 """
2 CommunityDetector — label propagation community detection over the navegador graph.
3
4 Implements a simple synchronous label-propagation algorithm:
5 1. Each node starts as its own community (label = node id).
6 2. On each iteration every node adopts the most common label among its
7 neighbours (ties broken by lowest label value).
8 3. Repeat until stable or ``max_iter`` reached.
9 4. Collect nodes sharing the same label into ``Community`` objects.
10
11 Usage::
12
13 from navegador.graph import GraphStore
14 from navegador.intelligence.community import CommunityDetector
15
16 store = GraphStore.sqlite(".navegador/graph.db")
17 detector = CommunityDetector(store)
18 communities = detector.detect(min_size=2)
19 detector.store_communities()
20 """
21
22 from __future__ import annotations
23
24 from collections import Counter
25 from dataclasses import dataclass, field
26 from typing import TYPE_CHECKING, Any
27
28 if TYPE_CHECKING:
29 from navegador.graph.store import GraphStore
30
31
32 # ── Cypher helpers ────────────────────────────────────────────────────────────
33
34 # Fetch all node ids and names (we use the internal FalkorDB node id via id(n))
35 _ALL_NODES = """
36 MATCH (n)
37 WHERE n.name IS NOT NULL
38 RETURN id(n) AS id, n.name AS name,
39 coalesce(n.file_path, '') AS file_path,
40 labels(n)[0] AS type
41 """
42
43 # Fetch all edges as (from_id, to_id) — undirected for community purposes
44 _ALL_EDGES = """
45 MATCH (a)-[r]->(b)
46 WHERE a.name IS NOT NULL AND b.name IS NOT NULL
47 RETURN id(a) AS src, id(b) AS dst
48 """
49
50 # Write the community label back onto a node
51 _SET_COMMUNITY = """
52 MATCH (n)
53 WHERE id(n) = $node_id
54 SET n.community = $community
55 """
56
57
58 # ── Data model ────────────────────────────────────────────────────────────────
59
60
61 @dataclass
62 class Community:
63 """A detected community in the graph.
64
65 Attributes:
66 name: Auto-generated name (e.g. ``"community_3"``). Can be replaced
67 by :class:`~navegador.intelligence.nlp.NLPEngine.name_communities`.
68 members: Node names belonging to this community.
69 size: ``len(members)``.
70 density: Fraction of possible internal edges that actually exist
71 (0.0–1.0). Computed lazily; ``-1.0`` means not yet calculated.
72 """
73
74 name: str
75 members: list[str] = field(default_factory=list)
76 size: int = 0
77 density: float = -1.0
78
79 def __post_init__(self) -> None:
80 if self.size == 0:
81 self.size = len(self.members)
82
83
84 # ── Detector ─────────────────────────────────────────────────────────────────
85
86
87 class CommunityDetector:
88 """
89 Detect communities in the navegador graph via label propagation.
90
91 Args:
92 store: A :class:`~navegador.graph.GraphStore` instance.
93 """
94
95 def __init__(self, store: "GraphStore") -> None:
96 self._store = store
97 # Populated after detect()
98 self._communities: list[Community] = []
99 # id -> label mapping after propagation
100 self._labels: dict[int, int] = {}
101 # id -> name/meta mapping
102 self._nodes: dict[int, dict[str, Any]] = {}
103
104 # ── Public API ────────────────────────────────────────────────────────────
105
106 def detect(self, min_size: int = 2, max_iter: int = 50) -> list[Community]:
107 """
108 Run label-propagation and return communities with at least *min_size*
109 members.
110
111 Args:
112 min_size: Minimum community size to include in results.
113 max_iter: Maximum number of propagation iterations.
114
115 Returns:
116 List of :class:`Community` objects, sorted largest-first.
117 """
118 nodes, edges = self._load_graph()
119 if not nodes:
120 self._communities = []
121 return []
122
123 # Initialise: each node is its own community
124 labels: dict[int, int] = {nid: nid for nid in nodes}
125
126 # Build adjacency list (undirected)
127 adj: dict[int, list[int]] = {nid: [] for nid in nodes}
128 for src, dst in edges:
129 if src in adj and dst in adj:
130 adj[src].append(dst)
131 adj[dst].append(src)
132
133 # Propagation loop
134 for _ in range(max_iter):
135 changed = False
136 for nid in sorted(nodes): # deterministic order
137 neighbours = adj[nid]
138 if not neighbours:
139 continue
140 counts: Counter = Counter(labels[nb] for nb in neighbours if nb in labels)
141 if not counts:
142 continue
143 best_label = min(counts, key=lambda lbl: (-counts[lbl], lbl))
144 if best_label != labels[nid]:
145 labels[nid] = best_label
146 changed = True
147 if not changed:
148 break
149
150 self._labels = labels
151 self._nodes = nodes
152 self._communities = self._build_communities(labels, nodes, adj, min_size)
153 return self._communities
154
155 def store_communities(self) -> int:
156 """
157 Write community labels as a ``community`` property on all nodes.
158
159 Must call :meth:`detect` first.
160
161 Returns:
162 Number of nodes updated.
163 """
164 updated = 0
165 for node_id, label in self._labels.items():
166 # Find community name for this label
167 comm_name = f"community_{label}"
168 for c in self._communities:
169 if c.name == comm_name:
170 break
171 # Use numeric label as community identifier
172 self._store.query(_SET_COMMUNITY, {"node_id": node_id, "community": label})
173 updated += 1
174 return updated
175
176 # ── Internals �
177 �───────
178 for nb in adj.get(nid, [])
179 if n�──────────────────────────────────────────
180
181 def _load_graph(
182 self,
183 ) -> tuple[dict[int, dict[str, Any]], list[tuple[int, int]]]:
184 """Load all nodes and edges from the store."""
185 node_result = self._store.query(_ALL_NODES, {})
186 nodes: dict[int, dict[str, Any]] = {}
187 for row in node_result.result_set or []:
188 nid, name, file_path, node_type = row[0], row[1], row[2], row[3]
189 nodes[nid] = {"name": name, "file_path": file_path, "type": node_type}
190
191 edge_r
--- a/navegador/intelligence/docgen.py
+++ b/navegador/intelligence/docgen.py
@@ -0,0 +1,77 @@
1
+"""
2
+DocGenerator — markdown documentation generated from the navegador graph.
3
+
4
+Supports two modes:
5
+ * **Template mode** (``provider=None``): uses graph data to fill a
6
+ structured markdown template — zero external dependencies.
7
+ * **LLM mode** (``provider=`` an :class:`~navegador.llm.LLMProvider`):
8
+ delegates to :class:`~navegador.intelligence.nlp.NLPEngine` for richer,
9
+ narrative documentation.
10
+
11
+Usage::
12
+
13
+ from navegador.graph import GraphStore
14
+ from navegador.intelligence.docgen import DocGenerator
15
+
16
+ store = GraphStore.sqlite(".navegador/graph.db")
17
+
18
+ # Template mode (no LLM required)
19
+ gen = DocGenerator(store)
20
+ print(gen.generate_file_docs("navegador/graph/store.py"))
21
+ print(gen.generate_module_docs("navegador.graph"))
22
+ print(gen.generate_project_docs())
23
+
24
+ # LLM mode
25
+ from navegador.llm import get_provider
26
+ provider = get_provider("openai")
27
+ gen = DocGenerator(store, provider=provider)
28
+ print(gen.generate_file_docs("navegador/graph/store.py"))
29
+"""
30
+
31
+from __future__ import annotations
32
+
33
+from typing import TYPE_CHECKING
34
+
35
+if TYPE_CHECKING:
36
+ from navegador.graph.store import GraphStore
37
+ from navegador.llm import LLMProvider
38
+
39
+
40
+# ── Cypher helpers ───────�
41
+ ��────────────────────�
42
+ �─────�if not rows:
43
+ file_rows in sorted(files.ite, row[4]
44
+ AS type, co[1], row[2], row[3], row[4])
45
+ lines.append(f"## {sym_type}: `{name}`")
46
+ if line is not None:
47
+ lines.append(f"_Line {line}_")
48
+ if signature:
49
+ lines += ["", f"```python\n{signature}\n```"]
50
+ if docstring:
51
+ lines += ["", docstring]
52
+ lines.append("")
53
+
54
+ return "\n".join(lines)
55
+
56
+ def _template_module_docs(self, module_name: str, module_path: str) -> str:
57
+ result = self._store.query(_MODULE_SYMBOLS, {"module_path": module_path})
58
+ rows = result.result_set or []
59
+
60
+ lines = [f"# Module: `{module_name}`", ""]
61
+ if not rows:
62
+ lines.append("_No symbols found in the graph for this module._")
63
+ return "\n".join(lines)
64
+
65
+ # Group by file
66
+ files: dict[str, list[tuple]] = {}
67
+ for row in rows:
68
+ fp = row[2] or ""
69
+ files.setdefault(fp, []).append(row)
70
+
71
+ for fp, file_rows in sorted(files.items()):
72
+ lines.append(f"## `{fp}`")
73
+ lines.append("")
74
+ for row in file_rows:
75
+ if not rows:
76
+ , file_rows in sorted(files.ite, row[4 rows = result.re)if not rows:
77
+ lines.a��────�
--- a/navegador/intelligence/docgen.py
+++ b/navegador/intelligence/docgen.py
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/intelligence/docgen.py
+++ b/navegador/intelligence/docgen.py
@@ -0,0 +1,77 @@
1 """
2 DocGenerator — markdown documentation generated from the navegador graph.
3
4 Supports two modes:
5 * **Template mode** (``provider=None``): uses graph data to fill a
6 structured markdown template — zero external dependencies.
7 * **LLM mode** (``provider=`` an :class:`~navegador.llm.LLMProvider`):
8 delegates to :class:`~navegador.intelligence.nlp.NLPEngine` for richer,
9 narrative documentation.
10
11 Usage::
12
13 from navegador.graph import GraphStore
14 from navegador.intelligence.docgen import DocGenerator
15
16 store = GraphStore.sqlite(".navegador/graph.db")
17
18 # Template mode (no LLM required)
19 gen = DocGenerator(store)
20 print(gen.generate_file_docs("navegador/graph/store.py"))
21 print(gen.generate_module_docs("navegador.graph"))
22 print(gen.generate_project_docs())
23
24 # LLM mode
25 from navegador.llm import get_provider
26 provider = get_provider("openai")
27 gen = DocGenerator(store, provider=provider)
28 print(gen.generate_file_docs("navegador/graph/store.py"))
29 """
30
31 from __future__ import annotations
32
33 from typing import TYPE_CHECKING
34
35 if TYPE_CHECKING:
36 from navegador.graph.store import GraphStore
37 from navegador.llm import LLMProvider
38
39
40 # ── Cypher helpers ───────�
41 ��────────────────────�
42 �─────�if not rows:
43 file_rows in sorted(files.ite, row[4]
44 AS type, co[1], row[2], row[3], row[4])
45 lines.append(f"## {sym_type}: `{name}`")
46 if line is not None:
47 lines.append(f"_Line {line}_")
48 if signature:
49 lines += ["", f"```python\n{signature}\n```"]
50 if docstring:
51 lines += ["", docstring]
52 lines.append("")
53
54 return "\n".join(lines)
55
56 def _template_module_docs(self, module_name: str, module_path: str) -> str:
57 result = self._store.query(_MODULE_SYMBOLS, {"module_path": module_path})
58 rows = result.result_set or []
59
60 lines = [f"# Module: `{module_name}`", ""]
61 if not rows:
62 lines.append("_No symbols found in the graph for this module._")
63 return "\n".join(lines)
64
65 # Group by file
66 files: dict[str, list[tuple]] = {}
67 for row in rows:
68 fp = row[2] or ""
69 files.setdefault(fp, []).append(row)
70
71 for fp, file_rows in sorted(files.items()):
72 lines.append(f"## `{fp}`")
73 lines.append("")
74 for row in file_rows:
75 if not rows:
76 , file_rows in sorted(files.ite, row[4 rows = result.re)if not rows:
77 lines.a��────�
--- a/navegador/intelligence/nlp.py
+++ b/navegador/intelligence/nlp.py
@@ -0,0 +1,164 @@
1
+"""
2
+NLPEngine — natural language queries, community naming, and documentation generation.
3
+
4
+Converts plain-English questions into Cypher queries, names communities with
5
+descriptive labels, and generates documentation for individual symbols.
6
+
7
+Usage::
8
+
9
+ from navegador.graph import GraphStore
10
+ from navegador.llm import get_provider
11
+ from navegador.intelligence.nlp import NLPEngine
12
+
13
+ store = GraphStore.sqlite(".navegador/graph.db")
14
+ provider = get_provider("anthropic")
15
+ engine = NLPEngine(store, provider)
16
+
17
+ answer = engine.natural_query("Which functions call authenticate_user?")
18
+ print(answer)
19
+
20
+ docs = engine.generate_docs("authenticate_user", file_path="auth.py")
21
+ print(docs)
22
+"""
23
+
24
+from __future__ import annotations
25
+
26
+import json
27
+import re
28
+from typing import TYPE_CHECKING, Any
29
+
30
+if TYPE_CHECKING:
31
+ from navegador.graph.store import GraphStore
32
+ from navegador.intelligence.community import Community
33
+ from navegador.llm import LLMProvider
34
+
35
+
36
+# ── Prompt templates ──────────────────────────────────────────────────────────
37
+
38
+_SCHEMA_SUMMARY = """
39
+The navegador knowledge graph contains these node types:
40
+ Function, Class, Method, File, Decorator — code layer
41
+ Concept, Rule, Decision, WikiPage, Domain, Person — knowledge layer
42
+
43
+Common relationships:
44
+ CALLS, INHERITS, DECORATES, CONTAINS, REFERENCES, IMPLEMENTS,
45
+ BELONGS_TO, GOVERNS, DOCUMENTS, RELATED_TO, ASSIGNED_TO, DECIDED_BY
46
+
47
+Node properties (where present):
48
+ name, file_path, line_start, docstring, description, signature,
49
+ status, domain, rationale, alternatives, date, community
50
+"""
51
+
52
+_NL_TO_CYPHER_PROMPT = """\
53
+You are a FalkorDB Cypher expert. Given the schema below and a user question,
54
+write a single Cypher query that answers the question.
55
+
56
+Return ONLY the Cypher query — no markdown fences, no explanation.
57
+
58
+{schema}
59
+
60
+User question: {question}
61
+"""
62
+
63
+_FORMAT_RESULT_PROMPT = """\
64
+The user asked: "{question}"
65
+
66
+The Cypher query executed was:
67
+{cypher}
68
+
69
+The raw result rows are:
70
+{rows}
71
+
72
+Please summarise the result in a clear, concise paragraph (2–5 sentences).
73
+"""
74
+
75
+_NAME_COMMUNITY_PROMPT = """\
76
+You ar
77
+ e naming softw
78
+ )
79
+ ities detected via graph analysis.
80
+
81
+Community members (function/class/concept names): {members}
82
+
83
+Based on these names, suggest a short, descriptive community name (3–6 words).
84
+Return ONLY the name — no punctuation, no explanation.
85
+"""
86
+
87
+_GENERATE_DOCS_PROMPT = """\
88
+Generate concise markdown documentation for the symbol described below.
89
+
90
+Symbol name: {name}
91
+File: {file_path}
92
+Type: {type}
93
+Docstring: {docstring}"
94
+ f"generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}"
95
+ )
96
+
97
+ # Step 3: format result
98
+ rows_text = json.dumps(rows[:50], indent=2, default=str)
99
+ fmt_prompt = _FY the Cypher query �SULT_PROMPT.f
100
+ )
101
+ retufmt_prompt)
102
+
103
+ # ── Community naming ──────────────────────────────────────────────────
104
+
105
+ def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]:
106
+ """
107
+ Use the LLM to generate a meaningful name for each community.
108
+
109
+ Args:
110
+ communities: List of :class:`~navegador.intelligence.community.Community`
111
+ objects (as returned by :meth:`~CommunityDetector.detect`).
112
+
113
+ Returns:
114
+ List of dicts with keys ``original_name``, ``suggested_name``,
115
+ ``members``, ``size``.
116
+ """
117
+ named: list[dict[str, Any]] = []
118
+ for comm in communities:
119
+ members_str = ", ".join(comm.members[:20]) # cap to avoid huge prompts
120
+ prompt = _NAME_COMMUNITY_PROMPT.format(members=members_str)
121
+
122
+
123
+ )
124
+
125
+ # _provider.complete(prompt).strip()
126
+ except Exception: # noqa: BLE001
127
+ "
128
+ "nd(
129
+ {
130
+ "original_name": comm.name,
131
+ "suggested_name": suggested,
132
+ "members": comm.members,
133
+ "size": comm.size,
134
+ }
135
+ )
136
+ return named
137
+
138
+ # ──"
139
+ " return named
140
+
141
+ # ── Documentation generation ──────────────────────────────────────────
142
+
143
+ def generate_docs(self, name: str, file_path: str = "") -> str:
144
+ """
145
+ Generate markdown documentation for a named symbol.
146
+
147
+ Retrieves graph context (type, docstring, signature, callers, callees)
148
+ and asks the LLM to produce structured markdown.
149
+
150
+ Args:
151
+ name: Symbol name (function, class, etc.).
152
+ file_path: Optional file path to disambiguate.
153
+
154
+ Returns:
155
+ Markdown documentation string.
156
+ """
157
+ # Look up the node
158
+ cypher = """
159
+MATCH (n)
160
+WHERE n.name = $name AND ($file_path = '' OR n.file_path = $file_path)
161
+RETURN labels(n)[0] AS type, n.name AS name,
162
+ coalesce(n.file_path, '') AS file_path,
163
+ coalesce(n.docstring, n.description, '') AS docstring,
164
+ coalesce(n.signature, '') AS signature
--- a/navegador/intelligence/nlp.py
+++ b/navegador/intelligence/nlp.py
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/navegador/intelligence/nlp.py
+++ b/navegador/intelligence/nlp.py
@@ -0,0 +1,164 @@
1 """
2 NLPEngine — natural language queries, community naming, and documentation generation.
3
4 Converts plain-English questions into Cypher queries, names communities with
5 descriptive labels, and generates documentation for individual symbols.
6
7 Usage::
8
9 from navegador.graph import GraphStore
10 from navegador.llm import get_provider
11 from navegador.intelligence.nlp import NLPEngine
12
13 store = GraphStore.sqlite(".navegador/graph.db")
14 provider = get_provider("anthropic")
15 engine = NLPEngine(store, provider)
16
17 answer = engine.natural_query("Which functions call authenticate_user?")
18 print(answer)
19
20 docs = engine.generate_docs("authenticate_user", file_path="auth.py")
21 print(docs)
22 """
23
24 from __future__ import annotations
25
26 import json
27 import re
28 from typing import TYPE_CHECKING, Any
29
30 if TYPE_CHECKING:
31 from navegador.graph.store import GraphStore
32 from navegador.intelligence.community import Community
33 from navegador.llm import LLMProvider
34
35
36 # ── Prompt templates ──────────────────────────────────────────────────────────
37
38 _SCHEMA_SUMMARY = """
39 The navegador knowledge graph contains these node types:
40 Function, Class, Method, File, Decorator — code layer
41 Concept, Rule, Decision, WikiPage, Domain, Person — knowledge layer
42
43 Common relationships:
44 CALLS, INHERITS, DECORATES, CONTAINS, REFERENCES, IMPLEMENTS,
45 BELONGS_TO, GOVERNS, DOCUMENTS, RELATED_TO, ASSIGNED_TO, DECIDED_BY
46
47 Node properties (where present):
48 name, file_path, line_start, docstring, description, signature,
49 status, domain, rationale, alternatives, date, community
50 """
51
52 _NL_TO_CYPHER_PROMPT = """\
53 You are a FalkorDB Cypher expert. Given the schema below and a user question,
54 write a single Cypher query that answers the question.
55
56 Return ONLY the Cypher query — no markdown fences, no explanation.
57
58 {schema}
59
60 User question: {question}
61 """
62
63 _FORMAT_RESULT_PROMPT = """\
64 The user asked: "{question}"
65
66 The Cypher query executed was:
67 {cypher}
68
69 The raw result rows are:
70 {rows}
71
72 Please summarise the result in a clear, concise paragraph (2–5 sentences).
73 """
74
75 _NAME_COMMUNITY_PROMPT = """\
76 You ar
77 e naming softw
78 )
79 ities detected via graph analysis.
80
81 Community members (function/class/concept names): {members}
82
83 Based on these names, suggest a short, descriptive community name (3–6 words).
84 Return ONLY the name — no punctuation, no explanation.
85 """
86
87 _GENERATE_DOCS_PROMPT = """\
88 Generate concise markdown documentation for the symbol described below.
89
90 Symbol name: {name}
91 File: {file_path}
92 Type: {type}
93 Docstring: {docstring}"
94 f"generated Cypher query.\n\nQuery: {cypher}\n\nError: {exc}"
95 )
96
97 # Step 3: format result
98 rows_text = json.dumps(rows[:50], indent=2, default=str)
99 fmt_prompt = _FY the Cypher query �SULT_PROMPT.f
100 )
101 retufmt_prompt)
102
103 # ── Community naming ──────────────────────────────────────────────────
104
105 def name_communities(self, communities: list["Community"]) -> list[dict[str, Any]]:
106 """
107 Use the LLM to generate a meaningful name for each community.
108
109 Args:
110 communities: List of :class:`~navegador.intelligence.community.Community`
111 objects (as returned by :meth:`~CommunityDetector.detect`).
112
113 Returns:
114 List of dicts with keys ``original_name``, ``suggested_name``,
115 ``members``, ``size``.
116 """
117 named: list[dict[str, Any]] = []
118 for comm in communities:
119 members_str = ", ".join(comm.members[:20]) # cap to avoid huge prompts
120 prompt = _NAME_COMMUNITY_PROMPT.format(members=members_str)
121
122
123 )
124
125 # _provider.complete(prompt).strip()
126 except Exception: # noqa: BLE001
127 "
128 "nd(
129 {
130 "original_name": comm.name,
131 "suggested_name": suggested,
132 "members": comm.members,
133 "size": comm.size,
134 }
135 )
136 return named
137
138 # ──"
139 " return named
140
141 # ── Documentation generation ──────────────────────────────────────────
142
143 def generate_docs(self, name: str, file_path: str = "") -> str:
144 """
145 Generate markdown documentation for a named symbol.
146
147 Retrieves graph context (type, docstring, signature, callers, callees)
148 and asks the LLM to produce structured markdown.
149
150 Args:
151 name: Symbol name (function, class, etc.).
152 file_path: Optional file path to disambiguate.
153
154 Returns:
155 Markdown documentation string.
156 """
157 # Look up the node
158 cypher = """
159 MATCH (n)
160 WHERE n.name = $name AND ($file_path = '' OR n.file_path = $file_path)
161 RETURN labels(n)[0] AS type, n.name AS name,
162 coalesce(n.file_path, '') AS file_path,
163 coalesce(n.docstring, n.description, '') AS docstring,
164 coalesce(n.signature, '') AS signature
--- a/navegador/intelligence/search.py
+++ b/navegador/intelligence/search.py
@@ -0,0 +1 @@
1
+"
--- a/navegador/intelligence/search.py
+++ b/navegador/intelligence/search.py
@@ -0,0 +1 @@
 
--- a/navegador/intelligence/search.py
+++ b/navegador/intelligence/search.py
@@ -0,0 +1 @@
1 "
--- a/tests/test_intelligence.py
+++ b/tests/test_intelligence.py
@@ -0,0 +1,850 @@
1
+"""
2
+Tests for the navegador intelligence layer.
3
+
4
+Covers:
5
+ - SemanticSearch._cosine_similarity
6
+ - SemanticSearch.index / search (mock graph + mock LLM)
7
+ - CommunityDetector.detect / store_communities (mock graph)
8
+ - NLPEngine.natural_query / name_communities / generate_docs (mock LLM)
9
+ - DocGenerator template mode and LLM mode (mock LLM)
10
+ - CLI commands: semantic-search, communities, ask, generate-docs, docs
11
+
12
+All LLM providers are mocked — no real API calls are made.
13
+"""
14
+
15
+from __future__ import annotations
16
+
17
+import json
18
+import math
19
+from unittest.mock import MagicMock, patch
20
+
21
+import pytest
22
+from click.testing import CliRunner
23
+
24
+from navegador.cli.commands import main
25
+
26
+
27
+# ── Helpers ───────────────────────────────────────────────────────────────────
28
+
29
+
30
+def _mock_store(result_rows=None):
31
+ """Return a MagicMock GraphStore whose .query() returns the given rows."""
32
+ store = MagicMock()
33
+ result = MagicMock()
34
+ result.result_set = result_rows if result_rows is not None else []
35
+ store.query.return_value = result
36
+ return store
37
+
38
+
39
+def _mock_provider(complete_return="mocked answer", embed_return=None):
40
+ """Return a MagicMock LLMProvider."""
41
+ if embed_return is None:
42
+ embed_return = [0.1, 0.2, 0.3, 0.4]
43
+ provider = MagicMock()
44
+ provider.complete.return_value = complete_return
45
+ provider.embed.return_value = embed_return
46
+ provider.name = "mock"
47
+ provider.model = "mock-model"
48
+ return provider
49
+
50
+
51
+# ── SemanticSearch: _cosine_similarity ────────────────────────────────────────
52
+
53
+
54
+class TestCosineSimilarity:
55
+ def setup_method(self):
56
+ from navegador.intelligence.search import SemanticSearch
57
+
58
+ self.cls = SemanticSearch
59
+
60
+ def test_identical_vectors_return_one(self):
61
+ v = [1.0, 0.0, 0.0]
62
+ assert self.cls._cosine_similarity(v, v) == pytest.approx(1.0)
63
+
64
+ def test_orthogonal_vectors_return_zero(self):
65
+ a = [1.0, 0.0]
66
+ b = [0.0, 1.0]
67
+ assert self.cls._cosine_similarity(a, b) == pytest.approx(0.0)
68
+
69
+ def test_opposite_vectors_return_minus_one(self):
70
+ a = [1.0, 0.0]
71
+ b = [-1.0, 0.0]
72
+ assert self.cls._cosine_similarity(a, b) == pytest.approx(-1.0)
73
+
74
+ def test_zero_vector_returns_zero(self):
75
+ a = [0.0, 0.0]
76
+ b = [1.0, 2.0]
77
+ assert self.cls._cosine_similarity(a, b) == 0.0
78
+
79
+ def test_different_length_vectors_return_zero(self):
80
+ a = [1.0, 2.0]
81
+ b = [1.0, 2.0, 3.0]
82
+ assert self.cls._cosine_similarity(a, b) == 0.0
83
+
84
+ def test_known_similarity(self):
85
+ a = [1.0, 1.0]
86
+ b = [1.0, 0.0]
87
+ # cos(45°) = 1/sqrt(2)
88
+ expected = 1.0 / math.sqrt(2)
89
+ assert self.cls._cosine_similarity(a, b) == pytest.approx(expected, abs=1e-6)
90
+
91
+ def test_general_non_unit_vectors(self):
92
+ a = [3.0, 4.0]
93
+ b = [3.0, 4.0]
94
+ # Same direction → 1.0 regardless of magnitude
95
+ assert self.cls._cosine_similarity(a, b) == pytest.approx(1.0)
96
+
97
+
98
+# ── SemanticSearch: index ─────────────────────────────────────────────────────
99
+
100
+
101
+class TestSemanticSearchIndex:
102
+ def test_index_embeds_and_stores(self):
103
+ from navegador.intelligence.search import SemanticSearch
104
+
105
+ rows = [
106
+ ["Function", "my_func", "app.py", "Does something important"],
107
+ ["Class", "MyClass", "app.py", "A useful class"],
108
+ ]
109
+ store = _mock_store(rows)
110
+ provider = _mock_provider(embed_return=[0.1, 0.2, 0.3])
111
+
112
+ ss = SemanticSearch(store, provider)
113
+ count = ss.index(limit=10)
114
+
115
+ assert count == 2
116
+ # embed called once per node
117
+ assert provider.embed.call_count == 2
118
+ # SET query called for each node
119
+ assert store.query.call_count >= 3 # 1 fetch + 2 set
120
+
121
+ def test_index_skips_nodes_without_text(self):
122
+ from navegador.intelligence.search import SemanticSearch
123
+
124
+ rows = [
125
+ ["Function", "no_doc", "app.py", ""], # empty text
126
+ ["Class", "HasDoc", "app.py", "Some docstring"],
127
+ ]
128
+ store = _mock_store(rows)
129
+ provider = _mock_provider(embed_return=[0.1, 0.2])
130
+
131
+ ss = SemanticSearch(store, provider)
132
+ count = ss.index()
133
+
134
+ assert count == 1 # only the node with text
135
+ assert provider.embed.call_count == 1
136
+
137
+ def test_index_returns_zero_for_empty_graph(self):
138
+ from navegador.intelligence.search import SemanticSearch
139
+
140
+ store = _mock_store([])
141
+ provider = _mock_provider()
142
+ ss = SemanticSearch(store, provider)
143
+ assert ss.index() == 0
144
+ provider.embed.assert_not_called()
145
+
146
+
147
+# ── SemanticSearch: search ────────────────────────────────────────────────────
148
+
149
+
150
+class TestSemanticSearchSearch:
151
+ def test_search_returns_sorted_results(self):
152
+ from navegador.intelligence.search import SemanticSearch
153
+
154
+ # Two nodes with known embeddings
155
+ # Node A: parallel to query → similarity 1.0
156
+ # Node B: orthogonal to query → similarity 0.0
157
+ query_vec = [1.0, 0.0]
158
+ node_a_vec = [1.0, 0.0] # sim = 1.0
159
+ node_b_vec = [0.0, 1.0] # sim = 0.0
160
+
161
+ rows = [
162
+ ["Function", "node_a", "a.py", "doc a", json.dumps(node_a_vec)],
163
+ ["Class", "node_b", "b.py", "doc b", json.dumps(node_b_vec)],
164
+ ]
165
+ store = _mock_store(rows)
166
+ provider = _mock_provider(embed_return=query_vec)
167
+
168
+ ss = SemanticSearch(store, provider)
169
+ results = ss.search("find something", limit=10)
170
+
171
+ assert len(results) == 2
172
+ assert results[0]["name"] == "node_a"
173
+ assert results[0]["score"] == pytest.approx(1.0)
174
+ assert results[1]["name"] == "node_b"
175
+ assert results[1]["score"] == pytest.approx(0.0)
176
+
177
+ def test_search_respects_limit(self):
178
+ from navegador.intelligence.search import SemanticSearch
179
+
180
+ rows = [
181
+ ["Function", f"func_{i}", "app.py", f"doc {i}", json.dumps([float(i), 0.0])]
182
+ for i in range(1, 6)
183
+ ]
184
+ store = _mock_store(rows)
185
+ provider = _mock_provider(embed_return=[1.0, 0.0])
186
+
187
+ ss = SemanticSearch(store, provider)
188
+ results = ss.search("query", limit=3)
189
+ assert len(results) == 3
190
+
191
+ def test_search_handles_invalid_embedding_json(self):
192
+ from navegador.intelligence.search import SemanticSearch
193
+
194
+ rows = [
195
+ ["Function", "bad_node", "app.py", "doc", "not-valid-json"],
196
+ ["Function", "good_node", "app.py", "doc", json.dumps([1.0, 0.0])],
197
+ ]
198
+ store = _mock_store(rows)
199
+ provider = _mock_provider(embed_return=[1.0, 0.0])
200
+
201
+ ss = SemanticSearch(store, provider)
202
+ results = ss.search("q", limit=10)
203
+ # Only good_node should appear
204
+ assert len(results) == 1
205
+ assert results[0]["name"] == "good_node"
206
+
207
+ def test_search_empty_graph_returns_empty_list(self):
208
+ from navegador.intelligence.search import SemanticSearch
209
+
210
+ store = _mock_store([])
211
+ provider = _mock_provider()
212
+ ss = SemanticSearch(store, provider)
213
+ assert ss.search("anything") == []
214
+
215
+
216
+# ── CommunityDetector ─────────────────────────────────────────────────────────
217
+
218
+
219
+class TestCommunityDetector:
220
+ """Tests use a fully in-memory mock — no real FalkorDB required."""
221
+
222
+ def _make_store(self, node_rows, edge_rows):
223
+ """
224
+ Return a MagicMock store that returns different rows for the first vs
225
+ subsequent query calls (nodes query, edges query).
226
+ """
227
+ store = MagicMock()
228
+
229
+ node_result = MagicMock()
230
+ node_result.result_set = node_rows
231
+ edge_result = MagicMock()
232
+ edge_result.result_set = edge_rows
233
+
234
+ # First call → node query, second call → edge query, rest → set_community
235
+ store.query.side_effect = [node_result, edge_result] + [
236
+ MagicMock(result_set=[]) for _ in range(100)
237
+ ]
238
+ return store
239
+
240
+ def test_two_cliques_form_separate_communities(self):
241
+ from navegador.intelligence.community import CommunityDetector
242
+
243
+ # Nodes: 0-1-2 form a triangle (clique), 3-4 form a pair
244
+ # They have no edges between groups → two communities
245
+ node_rows = [
246
+ [0, "func_a", "a.py", "Function"],
247
+ [1, "func_b", "a.py", "Function"],
248
+ [2, "func_c", "a.py", "Function"],
249
+ [3, "func_d", "b.py", "Function"],
250
+ [4, "func_e", "b.py", "Function"],
251
+ ]
252
+ edge_rows = [
253
+ [0, 1], [1, 2], [0, 2], # triangle
254
+ [3, 4], # pair
255
+ ]
256
+ store = self._make_store(node_rows, edge_rows)
257
+ detector = CommunityDetector(store)
258
+ communities = detector.detect(min_size=2)
259
+
260
+ assert len(communities) == 2
261
+ sizes = sorted(c.size for c in communities)
262
+ assert sizes == [2, 3]
263
+
264
+ def test_min_size_filters_small_communities(self):
265
+ from navegador.intelligence.community import CommunityDetector
266
+
267
+ node_rows = [
268
+ [0, "a", "x.py", "Function"],
269
+ [1, "b", "x.py", "Function"],
270
+ [2, "c", "x.py", "Function"], # isolated
271
+ ]
272
+ edge_rows = [[0, 1]]
273
+ store = self._make_store(node_rows, edge_rows)
274
+ detector = CommunityDetector(store)
275
+
276
+ communities = detector.detect(min_size=2)
277
+ # Only the pair {a, b} passes; isolated node c gets size=1 (filtered)
278
+ assert all(c.size >= 2 for c in communities)
279
+
280
+ def test_empty_graph_returns_empty_list(self):
281
+ from navegador.intelligence.community import CommunityDetector
282
+
283
+ store = self._make_store([], [])
284
+ detector = CommunityDetector(store)
285
+ communities = detector.detect()
286
+ assert communities == []
287
+
288
+ def test_community_density_is_one_for_complete_graph(self):
289
+ from navegador.intelligence.community import CommunityDetector
290
+
291
+ # 3-node complete graph
292
+ node_rows = [
293
+ [0, "x", "", "Function"],
294
+ [1, "y", "", "Function"],
295
+ [2, "z", "", "Function"],
296
+ ]
297
+ edge_rows = [[0, 1], [1, 2], [0, 2]]
298
+ store = self._make_store(node_rows, edge_rows)
299
+ detector = CommunityDetector(store)
300
+ communities = detector.detect(min_size=3)
301
+
302
+ assert len(communities) == 1
303
+ assert communities[0].density == pytest.approx(1.0)
304
+
305
+ def test_community_members_are_strings(self):
306
+ from navegador.intelligence.community import CommunityDetector
307
+
308
+ node_rows = [
309
+ [0, "func_alpha", "f.py", "Function"],
310
+ [1, "func_beta", "f.py", "Function"],
311
+ ]
312
+ edge_rows = [[0, 1]]
313
+ store = self._make_store(node_rows, edge_rows)
314
+ detector = CommunityDetector(store)
315
+ communities = detector.detect(min_size=2)
316
+
317
+ members = communities[0].members
318
+ assert all(isinstance(m, str) for m in members)
319
+ assert set(members) == {"func_alpha", "func_beta"}
320
+
321
+ def test_store_communities_calls_query_for_each_node(self):
322
+ from navegador.intelligence.community import CommunityDetector
323
+
324
+ node_rows = [
325
+ [10, "n1", "", "Function"],
326
+ [11, "n2", "", "Function"],
327
+ ]
328
+ edge_rows = [[10, 11]]
329
+ store = self._make_store(node_rows, edge_rows)
330
+ detector = CommunityDetector(store)
331
+ detector.detect(min_size=2)
332
+
333
+ # Reset side_effect so store_communities calls work cleanly
334
+ store.query.side_effect = None
335
+ store.query.return_value = MagicMock(result_set=[])
336
+
337
+ updated = detector.store_communities()
338
+ assert updated == 2 # two nodes
339
+ assert store.query.call_count >= 2
340
+
341
+ def test_community_sorted_largest_first(self):
342
+ from navegador.intelligence.community import CommunityDetector
343
+
344
+ # 4-node clique + 2-node pair with a bridge → label propagation may merge
345
+ # Use two fully disconnected groups of sizes 4 and 2
346
+ node_rows = [
347
+ [0, "a", "", "F"], [1, "b", "", "F"], [2, "c", "", "F"], [3, "d", "", "F"],
348
+ [4, "e", "", "F"], [5, "f", "", "F"],
349
+ ]
350
+ edge_rows = [
351
+ [0, 1], [1, 2], [2, 3], [0, 3], # 4-cycle (all same community)
352
+ [4, 5], # pair
353
+ ]
354
+ store = self._make_store(node_rows, edge_rows)
355
+ detector = CommunityDetector(store)
356
+ communities = detector.detect(min_size=2)
357
+ sizes = [c.size for c in communities]
358
+ assert sizes == sorted(sizes, reverse=True)
359
+
360
+
361
+# ── NLPEngine ─────────────────────────────────────────────────────────────────
362
+
363
+
364
+class TestNLPEngine:
365
+ def test_natural_query_calls_complete_twice(self):
366
+ """Should call complete once for Cypher generation, once for formatting."""
367
+ from navegador.intelligence.nlp import NLPEngine
368
+
369
+ cypher_response = "MATCH (n:Function) RETURN n.name LIMIT 5"
370
+ format_response = "There are 5 functions: ..."
371
+ provider = MagicMock()
372
+ provider.complete.side_effect = [cypher_response, format_response]
373
+
374
+ store = _mock_store([["func_a"], ["func_b"]])
375
+ engine = NLPEngine(store, provider)
376
+
377
+ result = engine.natural_query("List all functions")
378
+ assert result == format_response
379
+ assert provider.complete.call_count == 2
380
+
381
+ def test_natural_query_handles_query_error(self):
382
+ """When the generated Cypher fails, return an error message."""
383
+ from navegador.intelligence.nlp import NLPEngine
384
+
385
+ provider = _mock_provider(complete_return="INVALID CYPHER !!!")
386
+ store = MagicMock()
387
+ store.query.side_effect = Exception("syntax error")
388
+
389
+ engine = NLPEngine(store, provider)
390
+ result = engine.natural_query("broken question")
391
+
392
+ assert "Failed" in result or "Error" in result or "syntax error" in result
393
+
394
+ def test_natural_query_strips_markdown_fences(self):
395
+ """LLM output with ```cypher fences should still execute."""
396
+ from navegador.intelligence.nlp import NLPEngine
397
+
398
+ fenced_cypher = "```cypher\nMATCH (n) RETURN n.name LIMIT 1\n```"
399
+ provider = MagicMock()
400
+ provider.complete.side_effect = [fenced_cypher, "One node found."]
401
+
402
+ store = _mock_store([["some_node"]])
403
+ engine = NLPEngine(store, provider)
404
+ result = engine.natural_query("find a node")
405
+
406
+ assert result == "One node found."
407
+ # Verify the actual query executed was the clean Cypher (no fences)
408
+ executed_cypher = store.query.call_args[0][0]
409
+ assert "```" not in executed_cypher
410
+
411
+ def test_name_communities_returns_one_entry_per_community(self):
412
+ from navegador.intelligence.community import Community
413
+ from navegador.intelligence.nlp import NLPEngine
414
+
415
+ store = _mock_store()
416
+ provider = _mock_provider(complete_return="Authentication Services")
417
+
418
+ comms = [
419
+ Community(name="community_1", members=["login", "logout", "verify_token"], size=3),
420
+ Community(name="community_2", members=["fetch_data", "store_record"], size=2),
421
+ ]
422
+ engine = NLPEngine(store, provider)
423
+ named = engine.name_communities(comms)
424
+
425
+ assert len(named) == 2
426
+ assert all("suggested_name" in n for n in named)
427
+ assert all("original_name" in n for n in named)
428
+ assert provider.complete.call_count == 2
429
+
430
+ def test_name_communities_fallback_on_llm_error(self):
431
+ """If LLM raises, the original name is used."""
432
+ from navegador.intelligence.community import Community
433
+ from navegador.intelligence.nlp import NLPEngine
434
+
435
+ store = _mock_store()
436
+ provider = MagicMock()
437
+ provider.complete.side_effect = RuntimeError("API down")
438
+
439
+ comm = Community(name="community_0", members=["a", "b"], size=2)
440
+ engine = NLPEngine(store, provider)
441
+ named = engine.name_communities([comm])
442
+
443
+ assert named[0]["suggested_name"] == "community_0"
444
+
445
+ def test_generate_docs_returns_llm_string(self):
446
+ from navegador.intelligence.nlp import NLPEngine
447
+
448
+ expected_docs = "## my_func\nDoes great things."
449
+ store = _mock_store([
450
+ ["Function", "my_func", "app.py", "Does great things.", "def my_func():"]
451
+ ])
452
+ # Make subsequent query calls (callers, callees) also return empty
453
+ store.query.side_effect = [
454
+ MagicMock(result_set=[["Function", "my_func", "app.py", "Does great things.", "def my_func():"]]),
455
+ MagicMock(result_set=[]),
456
+ MagicMock(result_set=[]),
457
+ ]
458
+ provider = _mock_provider(complete_return=expected_docs)
459
+
460
+ engine = NLPEngine(store, provider)
461
+ result = engine.generate_docs("my_func", file_path="app.py")
462
+
463
+ assert result == expected_docs
464
+ provider.complete.assert_called_once()
465
+
466
+ def test_generate_docs_works_when_node_not_found(self):
467
+ """When node doesn't exist, still calls LLM with empty context."""
468
+ from navegador.intelligence.nlp import NLPEngine
469
+
470
+ store = MagicMock()
471
+ store.query.return_value = MagicMock(result_set=[])
472
+ provider = _mock_provider(complete_return="No docs available.")
473
+
474
+ engine = NLPEngine(store, provider)
475
+ result = engine.generate_docs("nonexistent_func")
476
+
477
+ assert "No docs available." in result
478
+
479
+
480
+# ── DocGenerator (template mode) ─────────────────────────────────────────────
481
+
482
+
483
+class TestDocGeneratorTemplateMode:
484
+ def test_generate_file_docs_returns_markdown_with_symbols(self):
485
+ from navegador.intelligence.docgen import DocGenerator
486
+
487
+ rows = [
488
+ ["Function", "greet", "Does greeting", "def greet():", 10],
489
+ ["Class", "Greeter", "A greeter class", "class Greeter:", 20],
490
+ ]
491
+ store = _mock_store(rows)
492
+ gen = DocGenerator(store, provider=None)
493
+
494
+ docs = gen.generate_file_docs("app.py")
495
+
496
+ assert "app.py" in docs
497
+ assert "greet" in docs
498
+ assert "Greeter" in docs
499
+ assert "Does greeting" in docs
500
+
501
+ def test_generate_file_docs_handles_empty_file(self):
502
+ from navegador.intelligence.docgen import DocGenerator
503
+
504
+ store = _mock_store([])
505
+ gen = DocGenerator(store, provider=None)
506
+
507
+ docs = gen.generate_file_docs("empty.py")
508
+ assert "No symbols" in docs
509
+
510
+ def test_generate_module_docs_groups_by_file(self):
511
+ from navegador.intelligence.docgen import DocGenerator
512
+
513
+ rows = [
514
+ ["Function", "func_a", "nav/graph/store.py", "Store a node", "def func_a():"],
515
+ ["Class", "GraphStore", "nav/graph/store.py", "Wraps the graph.", "class GraphStore:"],
516
+ ["Function", "func_b", "nav/graph/queries.py", "Query helper", "def func_b():"],
517
+ ]
518
+ store = _mock_store(rows)
519
+ gen = DocGenerator(store, provider=None)
520
+
521
+ docs = gen.generate_module_docs("nav.graph")
522
+ assert "nav/graph/store.py" in docs
523
+ assert "nav/graph/queries.py" in docs
524
+ assert "func_a" in docs
525
+ assert "GraphStore" in docs
526
+
527
+ def test_generate_module_docs_handles_no_results(self):
528
+ from navegador.intelligence.docgen import DocGenerator
529
+
530
+ store = _mock_store([])
531
+ gen = DocGenerator(store, provider=None)
532
+
533
+ docs = gen.generate_module_docs("empty.module")
534
+ assert "No symbols" in docs
535
+
536
+ def test_generate_project_docs_includes_stats_and_files(self):
537
+ from navegador.intelligence.docgen import DocGenerator
538
+
539
+ store = MagicMock()
540
+
541
+ stats_result = MagicMock()
542
+ stats_result.result_set = [
543
+ ["Function", 42],
544
+ ["Class", 10],
545
+ ]
546
+ files_result = MagicMock()
547
+ files_result.result_set = [
548
+ ["navegador/graph/store.py"],
549
+ ["navegador/cli/commands.py"],
550
+ ]
551
+ sym_result = MagicMock()
552
+ sym_result.result_set = [
553
+ ["Function", "my_func", "navegador/graph/store.py", "Does things"],
554
+ ]
555
+ store.query.side_effect = [stats_result, files_result, sym_result]
556
+
557
+ gen = DocGenerator(store, provider=None)
558
+ docs = gen.generate_project_docs()
559
+
560
+ assert "Project Documentation" in docs
561
+ assert "Function" in docs
562
+ assert "42" in docs
563
+ assert "navegador/graph/store.py" in docs
564
+
565
+ def test_signature_included_when_present(self):
566
+ from navegador.intelligence.docgen import DocGenerator
567
+
568
+ rows = [["Function", "my_func", "My doc", "def my_func(x: int) -> str:", 5]]
569
+ store = _mock_store(rows)
570
+ gen = DocGenerator(store, provider=None)
571
+
572
+ docs = gen.generate_file_docs("f.py")
573
+ assert "def my_func(x: int) -> str:" in docs
574
+
575
+
576
+# ── DocGenerator (LLM mode) ───────────────────────────────────────────────────
577
+
578
+
579
+class TestDocGeneratorLLMMode:
580
+ def test_generate_file_docs_uses_nlp_engine(self):
581
+ from navegador.intelligence.docgen import DocGenerator
582
+
583
+ rows = [["Function", "my_func", "Generated docs for my_func", "def my_func():", 1]]
584
+ store = MagicMock()
585
+ # 1st call: _FILE_SYMBOLS 2nd+: NLPEngine internal calls
586
+ store.query.return_value = MagicMock(result_set=rows)
587
+
588
+ provider = _mock_provider(complete_return="## my_func\nLLM-generated content.")
589
+ gen = DocGenerator(store, provider=provider)
590
+ docs = gen.generate_file_docs("app.py")
591
+
592
+ assert "app.py" in docs
593
+ provider.complete.assert_called()
594
+
595
+ def test_generate_project_docs_uses_llm(self):
596
+ from navegador.intelligence.docgen import DocGenerator
597
+
598
+ store = MagicMock()
599
+ # Return empty for template sub-calls
600
+ store.query.return_value = MagicMock(result_set=[])
601
+
602
+ provider = _mock_provider(complete_return="# Project README\nLLM wrote this.")
603
+ gen = DocGenerator(store, provider=provider)
604
+ docs = gen.generate_project_docs()
605
+
606
+ assert "Project README" in docs or "LLM wrote this" in docs
607
+ provider.complete.assert_called_once()
608
+
609
+
610
+# ── CLI: semantic-search ──────────────────────────────────────────────────────
611
+
612
+
613
+class TestSemanticSearchCLI:
614
+ def test_search_outputs_table(self):
615
+ runner = CliRunner()
616
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
617
+ patch("navegador.llm.auto_provider") as mock_auto:
618
+ store = _mock_store([])
619
+ mock_store_fn.return_value = store
620
+ mock_provider = _mock_provider(embed_return=[1.0, 0.0])
621
+ mock_auto.return_value = mock_provider
622
+
623
+ # search returns no results
624
+ from navegador.intelligence.search import SemanticSearch
625
+ with patch.object(SemanticSearch, "search", return_value=[]):
626
+ result = runner.invoke(main, ["semantic-search", "test query"])
627
+ assert result.exit_code == 0
628
+
629
+ def test_search_with_index_flag(self):
630
+ runner = CliRunner()
631
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
632
+ patch("navegador.llm.auto_provider") as mock_auto:
633
+ store = _mock_store([])
634
+ mock_store_fn.return_value = store
635
+ mock_provider = _mock_provider()
636
+ mock_auto.return_value = mock_provider
637
+
638
+ from navegador.intelligence.search import SemanticSearch
639
+ with patch.object(SemanticSearch, "index", return_value=5) as mock_index, \
640
+ patch.object(SemanticSearch, "search", return_value=[]):
641
+ result = runner.invoke(main, ["semantic-search", "test", "--index"])
642
+ assert result.exit_code == 0
643
+ mock_index.assert_called_once()
644
+
645
+ def test_search_json_output(self):
646
+ runner = CliRunner()
647
+ fake_results = [
648
+ {"type": "Function", "name": "foo", "file_path": "a.py", "text": "doc", "score": 0.95}
649
+ ]
650
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
651
+ patch("navegador.llm.auto_provider") as mock_auto:
652
+ store = _mock_store([])
653
+ mock_store_fn.return_value = store
654
+ mock_auto.return_value = _mock_provider()
655
+
656
+ from navegador.intelligence.search import SemanticSearch
657
+ with patch.object(SemanticSearch, "search", return_value=fake_results):
658
+ result = runner.invoke(main, ["semantic-search", "foo", "--json"])
659
+ assert result.exit_code == 0
660
+ data = json.loads(result.output)
661
+ assert isinstance(data, list)
662
+ assert data[0]["name"] == "foo"
663
+
664
+
665
+# ── CLI: communities ──────────────────────────────────────────────────────────
666
+
667
+
668
+class TestCommunitiesCLI:
669
+ def _make_communities(self):
670
+ from navegador.intelligence.community import Community
671
+
672
+ return [
673
+ Community(name="community_0", members=["a", "b", "c"], size=3, density=1.0),
674
+ Community(name="community_1", members=["x", "y"], size=2, density=1.0),
675
+ ]
676
+
677
+ def test_communities_outputs_table(self):
678
+ runner = CliRunner()
679
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
680
+ mock_store_fn.return_value = _mock_store()
681
+ from navegador.intelligence.community import CommunityDetector
682
+ with patch.object(CommunityDetector, "detect", return_value=self._make_communities()):
683
+ result = runner.invoke(main, ["communities"])
684
+ assert result.exit_code == 0
685
+
686
+ def test_communities_json_output(self):
687
+ runner = CliRunner()
688
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
689
+ mock_store_fn.return_value = _mock_store()
690
+ from navegador.intelligence.community import CommunityDetector
691
+ with patch.object(CommunityDetector, "detect", return_value=self._make_communities()):
692
+ result = runner.invoke(main, ["communities", "--json"])
693
+ assert result.exit_code == 0
694
+ data = json.loads(result.output)
695
+ assert len(data) == 2
696
+ assert data[0]["name"] == "community_0"
697
+
698
+ def test_communities_min_size_passed(self):
699
+ runner = CliRunner()
700
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
701
+ mock_store_fn.return_value = _mock_store()
702
+ from navegador.intelligence.community import CommunityDetector
703
+ with patch.object(CommunityDetector, "detect", return_value=[]) as mock_detect:
704
+ runner.invoke(main, ["communities", "--min-size", "5"])
705
+ mock_detect.assert_called_once_with(min_size=5)
706
+
707
+ def test_communities_empty_graph_message(self):
708
+ runner = CliRunner()
709
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
710
+ mock_store_fn.return_value = _mock_store()
711
+ from navegador.intelligence.community import CommunityDetector
712
+ with patch.object(CommunityDetector, "detect", return_value=[]):
713
+ result = runner.invoke(main, ["communities"])
714
+ assert result.exit_code == 0
715
+ assert "No communities" in result.output or result.exit_code == 0
716
+
717
+ def test_communities_store_labels_flag(self):
718
+ runner = CliRunner()
719
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
720
+ mock_store_fn.return_value = _mock_store()
721
+ from navegador.intelligence.community import CommunityDetector
722
+ with patch.object(CommunityDetector, "detect", return_value=self._make_communities()), \
723
+ patch.object(CommunityDetector, "store_communities", return_value=5) as mock_store:
724
+ result = runner.invoke(main, ["communities", "--store-labels"])
725
+ assert result.exit_code == 0
726
+ mock_store.assert_called_once()
727
+
728
+
729
+# ── CLI: ask ──────────────────────────────────────────────────────────────────
730
+
731
+
732
+class TestAskCLI:
733
+ def test_ask_prints_answer(self):
734
+ runner = CliRunner()
735
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
736
+ patch("navegador.llm.auto_provider") as mock_auto:
737
+ mock_store_fn.return_value = _mock_store()
738
+ mock_auto.return_value = _mock_provider()
739
+
740
+ from navegador.intelligence.nlp import NLPEngine
741
+ with patch.object(NLPEngine, "natural_query", return_value="The answer is 42."):
742
+ result = runner.invoke(main, ["ask", "What is the answer?"])
743
+ assert result.exit_code == 0
744
+ assert "42" in result.output
745
+
746
+ def test_ask_with_explicit_provider(self):
747
+ runner = CliRunner()
748
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
749
+ patch("navegador.llm.get_provider") as mock_get:
750
+ mock_store_fn.return_value = _mock_store()
751
+ mock_get.return_value = _mock_provider()
752
+
753
+ from navegador.intelligence.nlp import NLPEngine
754
+ with patch.object(NLPEngine, "natural_query", return_value="Answer."):
755
+ result = runner.invoke(
756
+ main, ["ask", "question", "--provider", "openai"]
757
+ )
758
+ assert result.exit_code == 0
759
+ mock_get.assert_called_once_with("openai", model="")
760
+
761
+
762
+# ── CLI: generate-docs ────────────────────────────────────────────────────────
763
+
764
+
765
+class TestGenerateDocsCLI:
766
+ def test_generate_docs_prints_output(self):
767
+ runner = CliRunner()
768
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
769
+ patch("navegador.llm.auto_provider") as mock_auto:
770
+ mock_store_fn.return_value = _mock_store()
771
+ mock_auto.return_value = _mock_provider()
772
+
773
+ from navegador.intelligence.nlp import NLPEngine
774
+ with patch.object(NLPEngine, "generate_docs", return_value="## my_func\nDocs here."):
775
+ result = runner.invoke(main, ["generate-docs", "my_func"])
776
+ assert result.exit_code == 0
777
+ assert "my_func" in result.output or "Docs" in result.output
778
+
779
+ def test_generate_docs_with_file_option(self):
780
+ runner = CliRunner()
781
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
782
+ patch("navegador.llm.auto_provider") as mock_auto:
783
+ mock_store_fn.return_value = _mock_store()
784
+ mock_auto.return_value = _mock_provider()
785
+
786
+ from navegador.intelligence.nlp import NLPEngine
787
+ with patch.object(NLPEngine, "generate_docs", return_value="Docs.") as mock_gd:
788
+ runner.invoke(
789
+ main, ["generate-docs", "my_func", "--file", "app.py"]
790
+ )
791
+ mock_gd.assert_called_once_with("my_func", file_path="app.py")
792
+
793
+
794
+# ── CLI: docs ─────────────────────────────────────────────────────────────────
795
+
796
+
797
+class TestDocsCLI:
798
+ def test_docs_file_path(self):
799
+ runner = CliRunner()
800
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
801
+ mock_store_fn.return_value = _mock_store()
802
+ from navegador.intelligence.docgen import DocGenerator
803
+ with patch.object(DocGenerator, "generate_file_docs", return_value="# File docs") as mock_fd:
804
+ result = runner.invoke(main, ["docs", "app/store.py"])
805
+ assert result.exit_code == 0
806
+ mock_fd.assert_called_once_with("app/store.py")
807
+
808
+ def test_docs_module_name(self):
809
+ runner = CliRunner()
810
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
811
+ mock_store_fn.return_value = _mock_store()
812
+ from navegador.intelligence.docgen import DocGenerator
813
+ with patch.object(DocGenerator, "generate_module_docs", return_value="# Module docs") as mock_md:
814
+ result = runner.invoke(main, ["docs", "navegador.graph"])
815
+ assert result.exit_code == 0
816
+ mock_md.assert_called_once_with("navegador.graph")
817
+
818
+ def test_docs_project_flag(self):
819
+ runner = CliRunner()
820
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
821
+ mock_store_fn.return_value = _mock_store()
822
+ from navegador.intelligence.docgen import DocGenerator
823
+ with patch.object(DocGenerator, "generate_project_docs", return_value="# Project") as mock_pd:
824
+ result = runner.invoke(main, ["docs", ".", "--project"])
825
+ assert result.exit_code == 0
826
+ mock_pd.assert_called_once()
827
+
828
+ def test_docs_json_output(self):
829
+ runner = CliRunner()
830
+ with patch("navegador.cli.commands._get_store") as mock_store_fn:
831
+ mock_store_fn.return_value = _mock_store()
832
+ from navegador.intelligence.docgen import DocGenerator
833
+ with patch.object(DocGenerator, "generate_project_docs", return_value="# Project"):
834
+ result = runner.invoke(main, ["docs", ".", "--project", "--json"])
835
+ assert result.exit_code == 0
836
+ data = json.loads(result.output)
837
+ assert "docs" in data
838
+
839
+ def test_docs_with_llm_provider(self):
840
+ runner = CliRunner()
841
+ with patch("navegador.cli.commands._get_store") as mock_store_fn, \
842
+ patch("navegador.intelligence.docgen.DocGenerator.generate_file_docs", return_value="# Docs"):
843
+ mock_store_fn.return_value = _mock_store()
844
+ with patch("navegador.llm.get_provider") as mock_get:
845
+ mock_get.return_value = _mock_provider()
846
+ result = runner.invoke(
847
+ main, ["docs", "app/store.py", "--provider", "openai"]
848
+ )
849
+ assert result.exit_code == 0
850
+ mock_get.assert_called_once_with("openai", model="")
--- a/tests/test_intelligence.py
+++ b/tests/test_intelligence.py
@@ -0,0 +1,850 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
--- a/tests/test_intelligence.py
+++ b/tests/test_intelligence.py
@@ -0,0 +1,850 @@
1 """
2 Tests for the navegador intelligence layer.
3
4 Covers:
5 - SemanticSearch._cosine_similarity
6 - SemanticSearch.index / search (mock graph + mock LLM)
7 - CommunityDetector.detect / store_communities (mock graph)
8 - NLPEngine.natural_query / name_communities / generate_docs (mock LLM)
9 - DocGenerator template mode and LLM mode (mock LLM)
10 - CLI commands: semantic-search, communities, ask, generate-docs, docs
11
12 All LLM providers are mocked — no real API calls are made.
13 """
14
15 from __future__ import annotations
16
17 import json
18 import math
19 from unittest.mock import MagicMock, patch
20
21 import pytest
22 from click.testing import CliRunner
23
24 from navegador.cli.commands import main
25
26
27 # ── Helpers ───────────────────────────────────────────────────────────────────
28
29
30 def _mock_store(result_rows=None):
31 """Return a MagicMock GraphStore whose .query() returns the given rows."""
32 store = MagicMock()
33 result = MagicMock()
34 result.result_set = result_rows if result_rows is not None else []
35 store.query.return_value = result
36 return store
37
38
39 def _mock_provider(complete_return="mocked answer", embed_return=None):
40 """Return a MagicMock LLMProvider."""
41 if embed_return is None:
42 embed_return = [0.1, 0.2, 0.3, 0.4]
43 provider = MagicMock()
44 provider.complete.return_value = complete_return
45 provider.embed.return_value = embed_return
46 provider.name = "mock"
47 provider.model = "mock-model"
48 return provider
49
50
51 # ── SemanticSearch: _cosine_similarity ────────────────────────────────────────
52
53
54 class TestCosineSimilarity:
55 def setup_method(self):
56 from navegador.intelligence.search import SemanticSearch
57
58 self.cls = SemanticSearch
59
60 def test_identical_vectors_return_one(self):
61 v = [1.0, 0.0, 0.0]
62 assert self.cls._cosine_similarity(v, v) == pytest.approx(1.0)
63
64 def test_orthogonal_vectors_return_zero(self):
65 a = [1.0, 0.0]
66 b = [0.0, 1.0]
67 assert self.cls._cosine_similarity(a, b) == pytest.approx(0.0)
68
69 def test_opposite_vectors_return_minus_one(self):
70 a = [1.0, 0.0]
71 b = [-1.0, 0.0]
72 assert self.cls._cosine_similarity(a, b) == pytest.approx(-1.0)
73
74 def test_zero_vector_returns_zero(self):
75 a = [0.0, 0.0]
76 b = [1.0, 2.0]
77 assert self.cls._cosine_similarity(a, b) == 0.0
78
79 def test_different_length_vectors_return_zero(self):
80 a = [1.0, 2.0]
81 b = [1.0, 2.0, 3.0]
82 assert self.cls._cosine_similarity(a, b) == 0.0
83
84 def test_known_similarity(self):
85 a = [1.0, 1.0]
86 b = [1.0, 0.0]
87 # cos(45°) = 1/sqrt(2)
88 expected = 1.0 / math.sqrt(2)
89 assert self.cls._cosine_similarity(a, b) == pytest.approx(expected, abs=1e-6)
90
91 def test_general_non_unit_vectors(self):
92 a = [3.0, 4.0]
93 b = [3.0, 4.0]
94 # Same direction → 1.0 regardless of magnitude
95 assert self.cls._cosine_similarity(a, b) == pytest.approx(1.0)
96
97
98 # ── SemanticSearch: index ─────────────────────────────────────────────────────
99
100
101 class TestSemanticSearchIndex:
102 def test_index_embeds_and_stores(self):
103 from navegador.intelligence.search import SemanticSearch
104
105 rows = [
106 ["Function", "my_func", "app.py", "Does something important"],
107 ["Class", "MyClass", "app.py", "A useful class"],
108 ]
109 store = _mock_store(rows)
110 provider = _mock_provider(embed_return=[0.1, 0.2, 0.3])
111
112 ss = SemanticSearch(store, provider)
113 count = ss.index(limit=10)
114
115 assert count == 2
116 # embed called once per node
117 assert provider.embed.call_count == 2
118 # SET query called for each node
119 assert store.query.call_count >= 3 # 1 fetch + 2 set
120
121 def test_index_skips_nodes_without_text(self):
122 from navegador.intelligence.search import SemanticSearch
123
124 rows = [
125 ["Function", "no_doc", "app.py", ""], # empty text
126 ["Class", "HasDoc", "app.py", "Some docstring"],
127 ]
128 store = _mock_store(rows)
129 provider = _mock_provider(embed_return=[0.1, 0.2])
130
131 ss = SemanticSearch(store, provider)
132 count = ss.index()
133
134 assert count == 1 # only the node with text
135 assert provider.embed.call_count == 1
136
137 def test_index_returns_zero_for_empty_graph(self):
138 from navegador.intelligence.search import SemanticSearch
139
140 store = _mock_store([])
141 provider = _mock_provider()
142 ss = SemanticSearch(store, provider)
143 assert ss.index() == 0
144 provider.embed.assert_not_called()
145
146
147 # ── SemanticSearch: search ────────────────────────────────────────────────────
148
149
150 class TestSemanticSearchSearch:
151 def test_search_returns_sorted_results(self):
152 from navegador.intelligence.search import SemanticSearch
153
154 # Two nodes with known embeddings
155 # Node A: parallel to query → similarity 1.0
156 # Node B: orthogonal to query → similarity 0.0
157 query_vec = [1.0, 0.0]
158 node_a_vec = [1.0, 0.0] # sim = 1.0
159 node_b_vec = [0.0, 1.0] # sim = 0.0
160
161 rows = [
162 ["Function", "node_a", "a.py", "doc a", json.dumps(node_a_vec)],
163 ["Class", "node_b", "b.py", "doc b", json.dumps(node_b_vec)],
164 ]
165 store = _mock_store(rows)
166 provider = _mock_provider(embed_return=query_vec)
167
168 ss = SemanticSearch(store, provider)
169 results = ss.search("find something", limit=10)
170
171 assert len(results) == 2
172 assert results[0]["name"] == "node_a"
173 assert results[0]["score"] == pytest.approx(1.0)
174 assert results[1]["name"] == "node_b"
175 assert results[1]["score"] == pytest.approx(0.0)
176
177 def test_search_respects_limit(self):
178 from navegador.intelligence.search import SemanticSearch
179
180 rows = [
181 ["Function", f"func_{i}", "app.py", f"doc {i}", json.dumps([float(i), 0.0])]
182 for i in range(1, 6)
183 ]
184 store = _mock_store(rows)
185 provider = _mock_provider(embed_return=[1.0, 0.0])
186
187 ss = SemanticSearch(store, provider)
188 results = ss.search("query", limit=3)
189 assert len(results) == 3
190
191 def test_search_handles_invalid_embedding_json(self):
192 from navegador.intelligence.search import SemanticSearch
193
194 rows = [
195 ["Function", "bad_node", "app.py", "doc", "not-valid-json"],
196 ["Function", "good_node", "app.py", "doc", json.dumps([1.0, 0.0])],
197 ]
198 store = _mock_store(rows)
199 provider = _mock_provider(embed_return=[1.0, 0.0])
200
201 ss = SemanticSearch(store, provider)
202 results = ss.search("q", limit=10)
203 # Only good_node should appear
204 assert len(results) == 1
205 assert results[0]["name"] == "good_node"
206
207 def test_search_empty_graph_returns_empty_list(self):
208 from navegador.intelligence.search import SemanticSearch
209
210 store = _mock_store([])
211 provider = _mock_provider()
212 ss = SemanticSearch(store, provider)
213 assert ss.search("anything") == []
214
215
216 # ── CommunityDetector ─────────────────────────────────────────────────────────
217
218
219 class TestCommunityDetector:
220 """Tests use a fully in-memory mock — no real FalkorDB required."""
221
222 def _make_store(self, node_rows, edge_rows):
223 """
224 Return a MagicMock store that returns different rows for the first vs
225 subsequent query calls (nodes query, edges query).
226 """
227 store = MagicMock()
228
229 node_result = MagicMock()
230 node_result.result_set = node_rows
231 edge_result = MagicMock()
232 edge_result.result_set = edge_rows
233
234 # First call → node query, second call → edge query, rest → set_community
235 store.query.side_effect = [node_result, edge_result] + [
236 MagicMock(result_set=[]) for _ in range(100)
237 ]
238 return store
239
240 def test_two_cliques_form_separate_communities(self):
241 from navegador.intelligence.community import CommunityDetector
242
243 # Nodes: 0-1-2 form a triangle (clique), 3-4 form a pair
244 # They have no edges between groups → two communities
245 node_rows = [
246 [0, "func_a", "a.py", "Function"],
247 [1, "func_b", "a.py", "Function"],
248 [2, "func_c", "a.py", "Function"],
249 [3, "func_d", "b.py", "Function"],
250 [4, "func_e", "b.py", "Function"],
251 ]
252 edge_rows = [
253 [0, 1], [1, 2], [0, 2], # triangle
254 [3, 4], # pair
255 ]
256 store = self._make_store(node_rows, edge_rows)
257 detector = CommunityDetector(store)
258 communities = detector.detect(min_size=2)
259
260 assert len(communities) == 2
261 sizes = sorted(c.size for c in communities)
262 assert sizes == [2, 3]
263
264 def test_min_size_filters_small_communities(self):
265 from navegador.intelligence.community import CommunityDetector
266
267 node_rows = [
268 [0, "a", "x.py", "Function"],
269 [1, "b", "x.py", "Function"],
270 [2, "c", "x.py", "Function"], # isolated
271 ]
272 edge_rows = [[0, 1]]
273 store = self._make_store(node_rows, edge_rows)
274 detector = CommunityDetector(store)
275
276 communities = detector.detect(min_size=2)
277 # Only the pair {a, b} passes; isolated node c gets size=1 (filtered)
278 assert all(c.size >= 2 for c in communities)
279
280 def test_empty_graph_returns_empty_list(self):
281 from navegador.intelligence.community import CommunityDetector
282
283 store = self._make_store([], [])
284 detector = CommunityDetector(store)
285 communities = detector.detect()
286 assert communities == []
287
288 def test_community_density_is_one_for_complete_graph(self):
289 from navegador.intelligence.community import CommunityDetector
290
291 # 3-node complete graph
292 node_rows = [
293 [0, "x", "", "Function"],
294 [1, "y", "", "Function"],
295 [2, "z", "", "Function"],
296 ]
297 edge_rows = [[0, 1], [1, 2], [0, 2]]
298 store = self._make_store(node_rows, edge_rows)
299 detector = CommunityDetector(store)
300 communities = detector.detect(min_size=3)
301
302 assert len(communities) == 1
303 assert communities[0].density == pytest.approx(1.0)
304
305 def test_community_members_are_strings(self):
306 from navegador.intelligence.community import CommunityDetector
307
308 node_rows = [
309 [0, "func_alpha", "f.py", "Function"],
310 [1, "func_beta", "f.py", "Function"],
311 ]
312 edge_rows = [[0, 1]]
313 store = self._make_store(node_rows, edge_rows)
314 detector = CommunityDetector(store)
315 communities = detector.detect(min_size=2)
316
317 members = communities[0].members
318 assert all(isinstance(m, str) for m in members)
319 assert set(members) == {"func_alpha", "func_beta"}
320
321 def test_store_communities_calls_query_for_each_node(self):
322 from navegador.intelligence.community import CommunityDetector
323
324 node_rows = [
325 [10, "n1", "", "Function"],
326 [11, "n2", "", "Function"],
327 ]
328 edge_rows = [[10, 11]]
329 store = self._make_store(node_rows, edge_rows)
330 detector = CommunityDetector(store)
331 detector.detect(min_size=2)
332
333 # Reset side_effect so store_communities calls work cleanly
334 store.query.side_effect = None
335 store.query.return_value = MagicMock(result_set=[])
336
337 updated = detector.store_communities()
338 assert updated == 2 # two nodes
339 assert store.query.call_count >= 2
340
341 def test_community_sorted_largest_first(self):
342 from navegador.intelligence.community import CommunityDetector
343
344 # 4-node clique + 2-node pair with a bridge → label propagation may merge
345 # Use two fully disconnected groups of sizes 4 and 2
346 node_rows = [
347 [0, "a", "", "F"], [1, "b", "", "F"], [2, "c", "", "F"], [3, "d", "", "F"],
348 [4, "e", "", "F"], [5, "f", "", "F"],
349 ]
350 edge_rows = [
351 [0, 1], [1, 2], [2, 3], [0, 3], # 4-cycle (all same community)
352 [4, 5], # pair
353 ]
354 store = self._make_store(node_rows, edge_rows)
355 detector = CommunityDetector(store)
356 communities = detector.detect(min_size=2)
357 sizes = [c.size for c in communities]
358 assert sizes == sorted(sizes, reverse=True)
359
360
361 # ── NLPEngine ─────────────────────────────────────────────────────────────────
362
363
364 class TestNLPEngine:
365 def test_natural_query_calls_complete_twice(self):
366 """Should call complete once for Cypher generation, once for formatting."""
367 from navegador.intelligence.nlp import NLPEngine
368
369 cypher_response = "MATCH (n:Function) RETURN n.name LIMIT 5"
370 format_response = "There are 5 functions: ..."
371 provider = MagicMock()
372 provider.complete.side_effect = [cypher_response, format_response]
373
374 store = _mock_store([["func_a"], ["func_b"]])
375 engine = NLPEngine(store, provider)
376
377 result = engine.natural_query("List all functions")
378 assert result == format_response
379 assert provider.complete.call_count == 2
380
381 def test_natural_query_handles_query_error(self):
382 """When the generated Cypher fails, return an error message."""
383 from navegador.intelligence.nlp import NLPEngine
384
385 provider = _mock_provider(complete_return="INVALID CYPHER !!!")
386 store = MagicMock()
387 store.query.side_effect = Exception("syntax error")
388
389 engine = NLPEngine(store, provider)
390 result = engine.natural_query("broken question")
391
392 assert "Failed" in result or "Error" in result or "syntax error" in result
393
394 def test_natural_query_strips_markdown_fences(self):
395 """LLM output with ```cypher fences should still execute."""
396 from navegador.intelligence.nlp import NLPEngine
397
398 fenced_cypher = "```cypher\nMATCH (n) RETURN n.name LIMIT 1\n```"
399 provider = MagicMock()
400 provider.complete.side_effect = [fenced_cypher, "One node found."]
401
402 store = _mock_store([["some_node"]])
403 engine = NLPEngine(store, provider)
404 result = engine.natural_query("find a node")
405
406 assert result == "One node found."
407 # Verify the actual query executed was the clean Cypher (no fences)
408 executed_cypher = store.query.call_args[0][0]
409 assert "```" not in executed_cypher
410
411 def test_name_communities_returns_one_entry_per_community(self):
412 from navegador.intelligence.community import Community
413 from navegador.intelligence.nlp import NLPEngine
414
415 store = _mock_store()
416 provider = _mock_provider(complete_return="Authentication Services")
417
418 comms = [
419 Community(name="community_1", members=["login", "logout", "verify_token"], size=3),
420 Community(name="community_2", members=["fetch_data", "store_record"], size=2),
421 ]
422 engine = NLPEngine(store, provider)
423 named = engine.name_communities(comms)
424
425 assert len(named) == 2
426 assert all("suggested_name" in n for n in named)
427 assert all("original_name" in n for n in named)
428 assert provider.complete.call_count == 2
429
430 def test_name_communities_fallback_on_llm_error(self):
431 """If LLM raises, the original name is used."""
432 from navegador.intelligence.community import Community
433 from navegador.intelligence.nlp import NLPEngine
434
435 store = _mock_store()
436 provider = MagicMock()
437 provider.complete.side_effect = RuntimeError("API down")
438
439 comm = Community(name="community_0", members=["a", "b"], size=2)
440 engine = NLPEngine(store, provider)
441 named = engine.name_communities([comm])
442
443 assert named[0]["suggested_name"] == "community_0"
444
445 def test_generate_docs_returns_llm_string(self):
446 from navegador.intelligence.nlp import NLPEngine
447
448 expected_docs = "## my_func\nDoes great things."
449 store = _mock_store([
450 ["Function", "my_func", "app.py", "Does great things.", "def my_func():"]
451 ])
452 # Make subsequent query calls (callers, callees) also return empty
453 store.query.side_effect = [
454 MagicMock(result_set=[["Function", "my_func", "app.py", "Does great things.", "def my_func():"]]),
455 MagicMock(result_set=[]),
456 MagicMock(result_set=[]),
457 ]
458 provider = _mock_provider(complete_return=expected_docs)
459
460 engine = NLPEngine(store, provider)
461 result = engine.generate_docs("my_func", file_path="app.py")
462
463 assert result == expected_docs
464 provider.complete.assert_called_once()
465
466 def test_generate_docs_works_when_node_not_found(self):
467 """When node doesn't exist, still calls LLM with empty context."""
468 from navegador.intelligence.nlp import NLPEngine
469
470 store = MagicMock()
471 store.query.return_value = MagicMock(result_set=[])
472 provider = _mock_provider(complete_return="No docs available.")
473
474 engine = NLPEngine(store, provider)
475 result = engine.generate_docs("nonexistent_func")
476
477 assert "No docs available." in result
478
479
480 # ── DocGenerator (template mode) ─────────────────────────────────────────────
481
482
483 class TestDocGeneratorTemplateMode:
484 def test_generate_file_docs_returns_markdown_with_symbols(self):
485 from navegador.intelligence.docgen import DocGenerator
486
487 rows = [
488 ["Function", "greet", "Does greeting", "def greet():", 10],
489 ["Class", "Greeter", "A greeter class", "class Greeter:", 20],
490 ]
491 store = _mock_store(rows)
492 gen = DocGenerator(store, provider=None)
493
494 docs = gen.generate_file_docs("app.py")
495
496 assert "app.py" in docs
497 assert "greet" in docs
498 assert "Greeter" in docs
499 assert "Does greeting" in docs
500
501 def test_generate_file_docs_handles_empty_file(self):
502 from navegador.intelligence.docgen import DocGenerator
503
504 store = _mock_store([])
505 gen = DocGenerator(store, provider=None)
506
507 docs = gen.generate_file_docs("empty.py")
508 assert "No symbols" in docs
509
510 def test_generate_module_docs_groups_by_file(self):
511 from navegador.intelligence.docgen import DocGenerator
512
513 rows = [
514 ["Function", "func_a", "nav/graph/store.py", "Store a node", "def func_a():"],
515 ["Class", "GraphStore", "nav/graph/store.py", "Wraps the graph.", "class GraphStore:"],
516 ["Function", "func_b", "nav/graph/queries.py", "Query helper", "def func_b():"],
517 ]
518 store = _mock_store(rows)
519 gen = DocGenerator(store, provider=None)
520
521 docs = gen.generate_module_docs("nav.graph")
522 assert "nav/graph/store.py" in docs
523 assert "nav/graph/queries.py" in docs
524 assert "func_a" in docs
525 assert "GraphStore" in docs
526
527 def test_generate_module_docs_handles_no_results(self):
528 from navegador.intelligence.docgen import DocGenerator
529
530 store = _mock_store([])
531 gen = DocGenerator(store, provider=None)
532
533 docs = gen.generate_module_docs("empty.module")
534 assert "No symbols" in docs
535
536 def test_generate_project_docs_includes_stats_and_files(self):
537 from navegador.intelligence.docgen import DocGenerator
538
539 store = MagicMock()
540
541 stats_result = MagicMock()
542 stats_result.result_set = [
543 ["Function", 42],
544 ["Class", 10],
545 ]
546 files_result = MagicMock()
547 files_result.result_set = [
548 ["navegador/graph/store.py"],
549 ["navegador/cli/commands.py"],
550 ]
551 sym_result = MagicMock()
552 sym_result.result_set = [
553 ["Function", "my_func", "navegador/graph/store.py", "Does things"],
554 ]
555 store.query.side_effect = [stats_result, files_result, sym_result]
556
557 gen = DocGenerator(store, provider=None)
558 docs = gen.generate_project_docs()
559
560 assert "Project Documentation" in docs
561 assert "Function" in docs
562 assert "42" in docs
563 assert "navegador/graph/store.py" in docs
564
565 def test_signature_included_when_present(self):
566 from navegador.intelligence.docgen import DocGenerator
567
568 rows = [["Function", "my_func", "My doc", "def my_func(x: int) -> str:", 5]]
569 store = _mock_store(rows)
570 gen = DocGenerator(store, provider=None)
571
572 docs = gen.generate_file_docs("f.py")
573 assert "def my_func(x: int) -> str:" in docs
574
575
576 # ── DocGenerator (LLM mode) ───────────────────────────────────────────────────
577
578
579 class TestDocGeneratorLLMMode:
580 def test_generate_file_docs_uses_nlp_engine(self):
581 from navegador.intelligence.docgen import DocGenerator
582
583 rows = [["Function", "my_func", "Generated docs for my_func", "def my_func():", 1]]
584 store = MagicMock()
585 # 1st call: _FILE_SYMBOLS 2nd+: NLPEngine internal calls
586 store.query.return_value = MagicMock(result_set=rows)
587
588 provider = _mock_provider(complete_return="## my_func\nLLM-generated content.")
589 gen = DocGenerator(store, provider=provider)
590 docs = gen.generate_file_docs("app.py")
591
592 assert "app.py" in docs
593 provider.complete.assert_called()
594
595 def test_generate_project_docs_uses_llm(self):
596 from navegador.intelligence.docgen import DocGenerator
597
598 store = MagicMock()
599 # Return empty for template sub-calls
600 store.query.return_value = MagicMock(result_set=[])
601
602 provider = _mock_provider(complete_return="# Project README\nLLM wrote this.")
603 gen = DocGenerator(store, provider=provider)
604 docs = gen.generate_project_docs()
605
606 assert "Project README" in docs or "LLM wrote this" in docs
607 provider.complete.assert_called_once()
608
609
610 # ── CLI: semantic-search ──────────────────────────────────────────────────────
611
612
613 class TestSemanticSearchCLI:
614 def test_search_outputs_table(self):
615 runner = CliRunner()
616 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
617 patch("navegador.llm.auto_provider") as mock_auto:
618 store = _mock_store([])
619 mock_store_fn.return_value = store
620 mock_provider = _mock_provider(embed_return=[1.0, 0.0])
621 mock_auto.return_value = mock_provider
622
623 # search returns no results
624 from navegador.intelligence.search import SemanticSearch
625 with patch.object(SemanticSearch, "search", return_value=[]):
626 result = runner.invoke(main, ["semantic-search", "test query"])
627 assert result.exit_code == 0
628
629 def test_search_with_index_flag(self):
630 runner = CliRunner()
631 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
632 patch("navegador.llm.auto_provider") as mock_auto:
633 store = _mock_store([])
634 mock_store_fn.return_value = store
635 mock_provider = _mock_provider()
636 mock_auto.return_value = mock_provider
637
638 from navegador.intelligence.search import SemanticSearch
639 with patch.object(SemanticSearch, "index", return_value=5) as mock_index, \
640 patch.object(SemanticSearch, "search", return_value=[]):
641 result = runner.invoke(main, ["semantic-search", "test", "--index"])
642 assert result.exit_code == 0
643 mock_index.assert_called_once()
644
645 def test_search_json_output(self):
646 runner = CliRunner()
647 fake_results = [
648 {"type": "Function", "name": "foo", "file_path": "a.py", "text": "doc", "score": 0.95}
649 ]
650 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
651 patch("navegador.llm.auto_provider") as mock_auto:
652 store = _mock_store([])
653 mock_store_fn.return_value = store
654 mock_auto.return_value = _mock_provider()
655
656 from navegador.intelligence.search import SemanticSearch
657 with patch.object(SemanticSearch, "search", return_value=fake_results):
658 result = runner.invoke(main, ["semantic-search", "foo", "--json"])
659 assert result.exit_code == 0
660 data = json.loads(result.output)
661 assert isinstance(data, list)
662 assert data[0]["name"] == "foo"
663
664
665 # ── CLI: communities ──────────────────────────────────────────────────────────
666
667
668 class TestCommunitiesCLI:
669 def _make_communities(self):
670 from navegador.intelligence.community import Community
671
672 return [
673 Community(name="community_0", members=["a", "b", "c"], size=3, density=1.0),
674 Community(name="community_1", members=["x", "y"], size=2, density=1.0),
675 ]
676
677 def test_communities_outputs_table(self):
678 runner = CliRunner()
679 with patch("navegador.cli.commands._get_store") as mock_store_fn:
680 mock_store_fn.return_value = _mock_store()
681 from navegador.intelligence.community import CommunityDetector
682 with patch.object(CommunityDetector, "detect", return_value=self._make_communities()):
683 result = runner.invoke(main, ["communities"])
684 assert result.exit_code == 0
685
686 def test_communities_json_output(self):
687 runner = CliRunner()
688 with patch("navegador.cli.commands._get_store") as mock_store_fn:
689 mock_store_fn.return_value = _mock_store()
690 from navegador.intelligence.community import CommunityDetector
691 with patch.object(CommunityDetector, "detect", return_value=self._make_communities()):
692 result = runner.invoke(main, ["communities", "--json"])
693 assert result.exit_code == 0
694 data = json.loads(result.output)
695 assert len(data) == 2
696 assert data[0]["name"] == "community_0"
697
698 def test_communities_min_size_passed(self):
699 runner = CliRunner()
700 with patch("navegador.cli.commands._get_store") as mock_store_fn:
701 mock_store_fn.return_value = _mock_store()
702 from navegador.intelligence.community import CommunityDetector
703 with patch.object(CommunityDetector, "detect", return_value=[]) as mock_detect:
704 runner.invoke(main, ["communities", "--min-size", "5"])
705 mock_detect.assert_called_once_with(min_size=5)
706
707 def test_communities_empty_graph_message(self):
708 runner = CliRunner()
709 with patch("navegador.cli.commands._get_store") as mock_store_fn:
710 mock_store_fn.return_value = _mock_store()
711 from navegador.intelligence.community import CommunityDetector
712 with patch.object(CommunityDetector, "detect", return_value=[]):
713 result = runner.invoke(main, ["communities"])
714 assert result.exit_code == 0
715 assert "No communities" in result.output or result.exit_code == 0
716
717 def test_communities_store_labels_flag(self):
718 runner = CliRunner()
719 with patch("navegador.cli.commands._get_store") as mock_store_fn:
720 mock_store_fn.return_value = _mock_store()
721 from navegador.intelligence.community import CommunityDetector
722 with patch.object(CommunityDetector, "detect", return_value=self._make_communities()), \
723 patch.object(CommunityDetector, "store_communities", return_value=5) as mock_store:
724 result = runner.invoke(main, ["communities", "--store-labels"])
725 assert result.exit_code == 0
726 mock_store.assert_called_once()
727
728
729 # ── CLI: ask ──────────────────────────────────────────────────────────────────
730
731
732 class TestAskCLI:
733 def test_ask_prints_answer(self):
734 runner = CliRunner()
735 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
736 patch("navegador.llm.auto_provider") as mock_auto:
737 mock_store_fn.return_value = _mock_store()
738 mock_auto.return_value = _mock_provider()
739
740 from navegador.intelligence.nlp import NLPEngine
741 with patch.object(NLPEngine, "natural_query", return_value="The answer is 42."):
742 result = runner.invoke(main, ["ask", "What is the answer?"])
743 assert result.exit_code == 0
744 assert "42" in result.output
745
746 def test_ask_with_explicit_provider(self):
747 runner = CliRunner()
748 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
749 patch("navegador.llm.get_provider") as mock_get:
750 mock_store_fn.return_value = _mock_store()
751 mock_get.return_value = _mock_provider()
752
753 from navegador.intelligence.nlp import NLPEngine
754 with patch.object(NLPEngine, "natural_query", return_value="Answer."):
755 result = runner.invoke(
756 main, ["ask", "question", "--provider", "openai"]
757 )
758 assert result.exit_code == 0
759 mock_get.assert_called_once_with("openai", model="")
760
761
762 # ── CLI: generate-docs ────────────────────────────────────────────────────────
763
764
765 class TestGenerateDocsCLI:
766 def test_generate_docs_prints_output(self):
767 runner = CliRunner()
768 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
769 patch("navegador.llm.auto_provider") as mock_auto:
770 mock_store_fn.return_value = _mock_store()
771 mock_auto.return_value = _mock_provider()
772
773 from navegador.intelligence.nlp import NLPEngine
774 with patch.object(NLPEngine, "generate_docs", return_value="## my_func\nDocs here."):
775 result = runner.invoke(main, ["generate-docs", "my_func"])
776 assert result.exit_code == 0
777 assert "my_func" in result.output or "Docs" in result.output
778
779 def test_generate_docs_with_file_option(self):
780 runner = CliRunner()
781 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
782 patch("navegador.llm.auto_provider") as mock_auto:
783 mock_store_fn.return_value = _mock_store()
784 mock_auto.return_value = _mock_provider()
785
786 from navegador.intelligence.nlp import NLPEngine
787 with patch.object(NLPEngine, "generate_docs", return_value="Docs.") as mock_gd:
788 runner.invoke(
789 main, ["generate-docs", "my_func", "--file", "app.py"]
790 )
791 mock_gd.assert_called_once_with("my_func", file_path="app.py")
792
793
794 # ── CLI: docs ─────────────────────────────────────────────────────────────────
795
796
797 class TestDocsCLI:
798 def test_docs_file_path(self):
799 runner = CliRunner()
800 with patch("navegador.cli.commands._get_store") as mock_store_fn:
801 mock_store_fn.return_value = _mock_store()
802 from navegador.intelligence.docgen import DocGenerator
803 with patch.object(DocGenerator, "generate_file_docs", return_value="# File docs") as mock_fd:
804 result = runner.invoke(main, ["docs", "app/store.py"])
805 assert result.exit_code == 0
806 mock_fd.assert_called_once_with("app/store.py")
807
808 def test_docs_module_name(self):
809 runner = CliRunner()
810 with patch("navegador.cli.commands._get_store") as mock_store_fn:
811 mock_store_fn.return_value = _mock_store()
812 from navegador.intelligence.docgen import DocGenerator
813 with patch.object(DocGenerator, "generate_module_docs", return_value="# Module docs") as mock_md:
814 result = runner.invoke(main, ["docs", "navegador.graph"])
815 assert result.exit_code == 0
816 mock_md.assert_called_once_with("navegador.graph")
817
818 def test_docs_project_flag(self):
819 runner = CliRunner()
820 with patch("navegador.cli.commands._get_store") as mock_store_fn:
821 mock_store_fn.return_value = _mock_store()
822 from navegador.intelligence.docgen import DocGenerator
823 with patch.object(DocGenerator, "generate_project_docs", return_value="# Project") as mock_pd:
824 result = runner.invoke(main, ["docs", ".", "--project"])
825 assert result.exit_code == 0
826 mock_pd.assert_called_once()
827
828 def test_docs_json_output(self):
829 runner = CliRunner()
830 with patch("navegador.cli.commands._get_store") as mock_store_fn:
831 mock_store_fn.return_value = _mock_store()
832 from navegador.intelligence.docgen import DocGenerator
833 with patch.object(DocGenerator, "generate_project_docs", return_value="# Project"):
834 result = runner.invoke(main, ["docs", ".", "--project", "--json"])
835 assert result.exit_code == 0
836 data = json.loads(result.output)
837 assert "docs" in data
838
839 def test_docs_with_llm_provider(self):
840 runner = CliRunner()
841 with patch("navegador.cli.commands._get_store") as mock_store_fn, \
842 patch("navegador.intelligence.docgen.DocGenerator.generate_file_docs", return_value="# Docs"):
843 mock_store_fn.return_value = _mock_store()
844 with patch("navegador.llm.get_provider") as mock_get:
845 mock_get.return_value = _mock_provider()
846 result = runner.invoke(
847 main, ["docs", "app/store.py", "--provider", "openai"]
848 )
849 assert result.exit_code == 0
850 mock_get.assert_called_once_with("openai", model="")

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button