Navegador

navegador / tests / test_llm.py
Blame History Raw 490 lines
1
"""
2
Tests for navegador/llm.py — LLM backend abstraction.
3
4
All tests are fully offline. SDK imports are patched to avoid requiring
5
any LLM SDK to be installed in the test environment.
6
"""
7
8
from __future__ import annotations
9
10
import sys
11
from unittest.mock import MagicMock, patch
12
13
import pytest
14
15
16
# ── Helpers ───────────────────────────────────────────────────────────────────
17
18
19
def _block_import(name: str):
20
"""
21
Context manager that makes ``import <name>`` raise ImportError for the
22
duration of the block, even if the package is installed.
23
"""
24
25
class _Blocker:
26
def __enter__(self):
27
self._original = sys.modules.get(name, None)
28
sys.modules[name] = None # type: ignore[assignment]
29
return self
30
31
def __exit__(self, *_):
32
if self._original is None:
33
sys.modules.pop(name, None)
34
else:
35
sys.modules[name] = self._original
36
37
return _Blocker()
38
39
40
def _fake_anthropic_module():
41
"""Return a minimal mock that satisfies AnthropicProvider's usage."""
42
mod = MagicMock()
43
client = MagicMock()
44
message = MagicMock()
45
message.content = [MagicMock(text="hello from anthropic")]
46
client.messages.create.return_value = message
47
mod.Anthropic.return_value = client
48
return mod, client
49
50
51
def _fake_openai_module():
52
"""Return a minimal mock that satisfies OpenAIProvider's usage."""
53
mod = MagicMock()
54
client = MagicMock()
55
choice = MagicMock()
56
choice.message.content = "hello from openai"
57
response = MagicMock()
58
response.choices = [choice]
59
client.chat.completions.create.return_value = response
60
embed_data = MagicMock()
61
embed_data.embedding = [0.1, 0.2, 0.3]
62
embed_response = MagicMock()
63
embed_response.data = [embed_data]
64
client.embeddings.create.return_value = embed_response
65
mod.OpenAI.return_value = client
66
return mod, client
67
68
69
def _fake_ollama_module():
70
"""Return a minimal mock that satisfies OllamaProvider's usage."""
71
mod = MagicMock()
72
client = MagicMock()
73
client.chat.return_value = {"message": {"content": "hello from ollama"}}
74
client.embeddings.return_value = {"embedding": [0.4, 0.5, 0.6]}
75
mod.Client.return_value = client
76
return mod, client
77
78
79
# ── AnthropicProvider ─────────────────────────────────────────────────────────
80
81
82
class TestAnthropicProvider:
83
def test_raises_import_error_when_sdk_missing(self):
84
with _block_import("anthropic"):
85
# Remove cached module from navegador.llm so the guard re-runs
86
import importlib
87
88
import navegador.llm as llm_mod
89
90
importlib.reload(llm_mod)
91
with pytest.raises(ImportError, match="pip install anthropic"):
92
llm_mod.AnthropicProvider()
93
94
def test_name_is_anthropic(self):
95
fake_mod, _ = _fake_anthropic_module()
96
with patch.dict(sys.modules, {"anthropic": fake_mod}):
97
import importlib
98
99
import navegador.llm as llm_mod
100
101
importlib.reload(llm_mod)
102
p = llm_mod.AnthropicProvider()
103
assert p.name == "anthropic"
104
105
def test_default_model(self):
106
fake_mod, _ = _fake_anthropic_module()
107
with patch.dict(sys.modules, {"anthropic": fake_mod}):
108
import importlib
109
110
import navegador.llm as llm_mod
111
112
importlib.reload(llm_mod)
113
p = llm_mod.AnthropicProvider()
114
assert p.model == "claude-3-5-haiku-20241022"
115
116
def test_custom_model(self):
117
fake_mod, _ = _fake_anthropic_module()
118
with patch.dict(sys.modules, {"anthropic": fake_mod}):
119
import importlib
120
121
import navegador.llm as llm_mod
122
123
importlib.reload(llm_mod)
124
p = llm_mod.AnthropicProvider(model="claude-opus-4")
125
assert p.model == "claude-opus-4"
126
127
def test_complete_returns_text(self):
128
fake_mod, client = _fake_anthropic_module()
129
with patch.dict(sys.modules, {"anthropic": fake_mod}):
130
import importlib
131
132
import navegador.llm as llm_mod
133
134
importlib.reload(llm_mod)
135
p = llm_mod.AnthropicProvider()
136
result = p.complete("say hello")
137
assert result == "hello from anthropic"
138
client.messages.create.assert_called_once()
139
140
def test_complete_passes_max_tokens(self):
141
fake_mod, client = _fake_anthropic_module()
142
with patch.dict(sys.modules, {"anthropic": fake_mod}):
143
import importlib
144
145
import navegador.llm as llm_mod
146
147
importlib.reload(llm_mod)
148
p = llm_mod.AnthropicProvider()
149
p.complete("hi", max_tokens=512)
150
_, kwargs = client.messages.create.call_args
151
assert kwargs["max_tokens"] == 512
152
153
def test_embed_raises_not_implemented(self):
154
fake_mod, _ = _fake_anthropic_module()
155
with patch.dict(sys.modules, {"anthropic": fake_mod}):
156
import importlib
157
158
import navegador.llm as llm_mod
159
160
importlib.reload(llm_mod)
161
p = llm_mod.AnthropicProvider()
162
with pytest.raises(NotImplementedError):
163
p.embed("text")
164
165
166
# ── OpenAIProvider ────────────────────────────────────────────────────────────
167
168
169
class TestOpenAIProvider:
170
def test_raises_import_error_when_sdk_missing(self):
171
with _block_import("openai"):
172
import importlib
173
174
import navegador.llm as llm_mod
175
176
importlib.reload(llm_mod)
177
with pytest.raises(ImportError, match="pip install openai"):
178
llm_mod.OpenAIProvider()
179
180
def test_name_is_openai(self):
181
fake_mod, _ = _fake_openai_module()
182
with patch.dict(sys.modules, {"openai": fake_mod}):
183
import importlib
184
185
import navegador.llm as llm_mod
186
187
importlib.reload(llm_mod)
188
p = llm_mod.OpenAIProvider()
189
assert p.name == "openai"
190
191
def test_default_model(self):
192
fake_mod, _ = _fake_openai_module()
193
with patch.dict(sys.modules, {"openai": fake_mod}):
194
import importlib
195
196
import navegador.llm as llm_mod
197
198
importlib.reload(llm_mod)
199
p = llm_mod.OpenAIProvider()
200
assert p.model == "gpt-4o-mini"
201
202
def test_custom_model(self):
203
fake_mod, _ = _fake_openai_module()
204
with patch.dict(sys.modules, {"openai": fake_mod}):
205
import importlib
206
207
import navegador.llm as llm_mod
208
209
importlib.reload(llm_mod)
210
p = llm_mod.OpenAIProvider(model="gpt-4o")
211
assert p.model == "gpt-4o"
212
213
def test_complete_returns_text(self):
214
fake_mod, client = _fake_openai_module()
215
with patch.dict(sys.modules, {"openai": fake_mod}):
216
import importlib
217
218
import navegador.llm as llm_mod
219
220
importlib.reload(llm_mod)
221
p = llm_mod.OpenAIProvider()
222
result = p.complete("say hello")
223
assert result == "hello from openai"
224
client.chat.completions.create.assert_called_once()
225
226
def test_embed_returns_list_of_floats(self):
227
fake_mod, client = _fake_openai_module()
228
with patch.dict(sys.modules, {"openai": fake_mod}):
229
import importlib
230
231
import navegador.llm as llm_mod
232
233
importlib.reload(llm_mod)
234
p = llm_mod.OpenAIProvider()
235
result = p.embed("hello world")
236
assert result == [0.1, 0.2, 0.3]
237
client.embeddings.create.assert_called_once()
238
239
240
# ── OllamaProvider ────────────────────────────────────────────────────────────
241
242
243
class TestOllamaProvider:
244
def test_raises_import_error_when_sdk_missing(self):
245
with _block_import("ollama"):
246
import importlib
247
248
import navegador.llm as llm_mod
249
250
importlib.reload(llm_mod)
251
with pytest.raises(ImportError, match="pip install ollama"):
252
llm_mod.OllamaProvider()
253
254
def test_name_is_ollama(self):
255
fake_mod, _ = _fake_ollama_module()
256
with patch.dict(sys.modules, {"ollama": fake_mod}):
257
import importlib
258
259
import navegador.llm as llm_mod
260
261
importlib.reload(llm_mod)
262
p = llm_mod.OllamaProvider()
263
assert p.name == "ollama"
264
265
def test_default_model(self):
266
fake_mod, _ = _fake_ollama_module()
267
with patch.dict(sys.modules, {"ollama": fake_mod}):
268
import importlib
269
270
import navegador.llm as llm_mod
271
272
importlib.reload(llm_mod)
273
p = llm_mod.OllamaProvider()
274
assert p.model == "llama3.2"
275
276
def test_custom_model(self):
277
fake_mod, _ = _fake_ollama_module()
278
with patch.dict(sys.modules, {"ollama": fake_mod}):
279
import importlib
280
281
import navegador.llm as llm_mod
282
283
importlib.reload(llm_mod)
284
p = llm_mod.OllamaProvider(model="mistral")
285
assert p.model == "mistral"
286
287
def test_complete_returns_text(self):
288
fake_mod, client = _fake_ollama_module()
289
with patch.dict(sys.modules, {"ollama": fake_mod}):
290
import importlib
291
292
import navegador.llm as llm_mod
293
294
importlib.reload(llm_mod)
295
p = llm_mod.OllamaProvider()
296
result = p.complete("say hello")
297
assert result == "hello from ollama"
298
client.chat.assert_called_once()
299
300
def test_embed_returns_list_of_floats(self):
301
fake_mod, client = _fake_ollama_module()
302
with patch.dict(sys.modules, {"ollama": fake_mod}):
303
import importlib
304
305
import navegador.llm as llm_mod
306
307
importlib.reload(llm_mod)
308
p = llm_mod.OllamaProvider()
309
result = p.embed("hello world")
310
assert result == [0.4, 0.5, 0.6]
311
client.embeddings.assert_called_once()
312
313
314
# ── discover_providers ────────────────────────────────────────────────────────
315
316
317
class TestDiscoverProviders:
318
def _reload(self, modules: dict):
319
import importlib
320
321
import navegador.llm as llm_mod
322
323
importlib.reload(llm_mod)
324
return llm_mod
325
326
def test_all_available(self):
327
fake_a, _ = _fake_anthropic_module()
328
fake_o, _ = _fake_openai_module()
329
fake_ol, _ = _fake_ollama_module()
330
with patch.dict(
331
sys.modules,
332
{"anthropic": fake_a, "openai": fake_o, "ollama": fake_ol},
333
):
334
llm_mod = self._reload({})
335
result = llm_mod.discover_providers()
336
assert result == ["anthropic", "openai", "ollama"]
337
338
def test_only_openai_available(self):
339
fake_o, _ = _fake_openai_module()
340
with (
341
_block_import("anthropic"),
342
patch.dict(sys.modules, {"openai": fake_o}),
343
_block_import("ollama"),
344
):
345
llm_mod = self._reload({})
346
result = llm_mod.discover_providers()
347
assert result == ["openai"]
348
349
def test_none_available(self):
350
with _block_import("anthropic"), _block_import("openai"), _block_import("ollama"):
351
llm_mod = self._reload({})
352
result = llm_mod.discover_providers()
353
assert result == []
354
355
def test_preserves_priority_order(self):
356
fake_a, _ = _fake_anthropic_module()
357
fake_ol, _ = _fake_ollama_module()
358
with (
359
patch.dict(sys.modules, {"anthropic": fake_a, "ollama": fake_ol}),
360
_block_import("openai"),
361
):
362
llm_mod = self._reload({})
363
result = llm_mod.discover_providers()
364
assert result == ["anthropic", "ollama"]
365
366
367
# ── get_provider ──────────────────────────────────────────────────────────────
368
369
370
class TestGetProvider:
371
def _reload(self):
372
import importlib
373
374
import navegador.llm as llm_mod
375
376
importlib.reload(llm_mod)
377
return llm_mod
378
379
def test_returns_anthropic_provider(self):
380
fake_mod, _ = _fake_anthropic_module()
381
with patch.dict(sys.modules, {"anthropic": fake_mod}):
382
llm_mod = self._reload()
383
p = llm_mod.get_provider("anthropic")
384
assert p.name == "anthropic"
385
386
def test_returns_openai_provider(self):
387
fake_mod, _ = _fake_openai_module()
388
with patch.dict(sys.modules, {"openai": fake_mod}):
389
llm_mod = self._reload()
390
p = llm_mod.get_provider("openai")
391
assert p.name == "openai"
392
393
def test_returns_ollama_provider(self):
394
fake_mod, _ = _fake_ollama_module()
395
with patch.dict(sys.modules, {"ollama": fake_mod}):
396
llm_mod = self._reload()
397
p = llm_mod.get_provider("ollama")
398
assert p.name == "ollama"
399
400
def test_passes_model_argument(self):
401
fake_mod, _ = _fake_anthropic_module()
402
with patch.dict(sys.modules, {"anthropic": fake_mod}):
403
llm_mod = self._reload()
404
p = llm_mod.get_provider("anthropic", model="claude-opus-4")
405
assert p.model == "claude-opus-4"
406
407
def test_unknown_provider_raises_value_error(self):
408
import importlib
409
410
import navegador.llm as llm_mod
411
412
importlib.reload(llm_mod)
413
with pytest.raises(ValueError, match="Unknown LLM provider"):
414
llm_mod.get_provider("grok")
415
416
def test_unknown_provider_message_includes_valid_options(self):
417
import importlib
418
419
import navegador.llm as llm_mod
420
421
importlib.reload(llm_mod)
422
with pytest.raises(ValueError, match="anthropic"):
423
llm_mod.get_provider("nonexistent")
424
425
426
# ── auto_provider ─────────────────────────────────────────────────────────────
427
428
429
class TestAutoProvider:
430
def _reload(self):
431
import importlib
432
433
import navegador.llm as llm_mod
434
435
importlib.reload(llm_mod)
436
return llm_mod
437
438
def test_prefers_anthropic_when_all_available(self):
439
fake_a, _ = _fake_anthropic_module()
440
fake_o, _ = _fake_openai_module()
441
fake_ol, _ = _fake_ollama_module()
442
with patch.dict(
443
sys.modules,
444
{"anthropic": fake_a, "openai": fake_o, "ollama": fake_ol},
445
):
446
llm_mod = self._reload()
447
p = llm_mod.auto_provider()
448
assert p.name == "anthropic"
449
450
def test_falls_back_to_openai_when_anthropic_missing(self):
451
fake_o, _ = _fake_openai_module()
452
fake_ol, _ = _fake_ollama_module()
453
with (
454
_block_import("anthropic"),
455
patch.dict(sys.modules, {"openai": fake_o, "ollama": fake_ol}),
456
):
457
llm_mod = self._reload()
458
p = llm_mod.auto_provider()
459
assert p.name == "openai"
460
461
def test_falls_back_to_ollama_when_anthropic_and_openai_missing(self):
462
fake_ol, _ = _fake_ollama_module()
463
with (
464
_block_import("anthropic"),
465
_block_import("openai"),
466
patch.dict(sys.modules, {"ollama": fake_ol}),
467
):
468
llm_mod = self._reload()
469
p = llm_mod.auto_provider()
470
assert p.name == "ollama"
471
472
def test_raises_runtime_error_when_no_sdk_available(self):
473
with _block_import("anthropic"), _block_import("openai"), _block_import("ollama"):
474
llm_mod = self._reload()
475
with pytest.raises(RuntimeError, match="No LLM SDK is installed"):
476
llm_mod.auto_provider()
477
478
def test_runtime_error_message_includes_install_hints(self):
479
with _block_import("anthropic"), _block_import("openai"), _block_import("ollama"):
480
llm_mod = self._reload()
481
with pytest.raises(RuntimeError, match="pip install"):
482
llm_mod.auto_provider()
483
484
def test_passes_model_to_provider(self):
485
fake_a, _ = _fake_anthropic_module()
486
with patch.dict(sys.modules, {"anthropic": fake_a}):
487
llm_mod = self._reload()
488
p = llm_mod.auto_provider(model="claude-opus-4")
489
assert p.model == "claude-opus-4"
490

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button