ScuttleBot

scuttlebot / internal / llm / factory.go
Blame History Raw 107 lines
1
package llm
2
3
import (
4
"context"
5
"fmt"
6
"net/http"
7
)
8
9
// KnownBackends maps OpenAI-compatible backend names to their default base URLs.
10
var KnownBackends = map[string]string{
11
"openai": "https://api.openai.com/v1",
12
"openrouter": "https://openrouter.ai/api/v1",
13
"together": "https://api.together.xyz/v1",
14
"groq": "https://api.groq.com/openai/v1",
15
"fireworks": "https://api.fireworks.ai/inference/v1",
16
"mistral": "https://api.mistral.ai/v1",
17
"ai21": "https://api.ai21.com/studio/v1",
18
"huggingface": "https://api-inference.huggingface.co/v1",
19
"deepseek": "https://api.deepseek.com/v1",
20
"cerebras": "https://api.cerebras.ai/v1",
21
"xai": "https://api.x.ai/v1",
22
// Local / self-hosted (defaults — override with base_url)
23
"litellm": "http://localhost:4000/v1",
24
"lmstudio": "http://localhost:1234/v1",
25
"jan": "http://localhost:1337/v1",
26
"localai": "http://localhost:8080/v1",
27
"vllm": "http://localhost:8000/v1",
28
"anythingllm": "http://localhost:3001/v1",
29
}
30
31
// New creates a Provider from the given config. The returned value may also
32
// implement ModelDiscoverer — check with a type assertion before calling
33
// DiscoverModels. Allow/block filters in cfg are applied transparently by
34
// wrapping the discoverer.
35
func New(cfg BackendConfig) (Provider, error) {
36
hc := &http.Client{}
37
switch cfg.Backend {
38
case "anthropic":
39
return newAnthropicProvider(cfg, hc), nil
40
41
case "gemini":
42
return newGeminiProvider(cfg, hc), nil
43
44
case "bedrock":
45
return newBedrockProvider(cfg, hc)
46
47
case "ollama":
48
baseURL := cfg.BaseURL
49
if baseURL == "" {
50
baseURL = "http://localhost:11434"
51
}
52
return newOllamaProvider(cfg, baseURL, hc), nil
53
54
default:
55
// OpenAI-compatible backend.
56
baseURL := cfg.BaseURL
57
if baseURL == "" {
58
u, ok := KnownBackends[cfg.Backend]
59
if !ok {
60
return nil, fmt.Errorf("llm: unknown backend %q — set base_url for custom endpoints", cfg.Backend)
61
}
62
baseURL = u
63
}
64
model := cfg.Model
65
if model == "" {
66
model = "gpt-4o-mini"
67
}
68
return newOpenAIProvider(cfg.APIKey, baseURL, model, hc), nil
69
}
70
}
71
72
// Discover runs model discovery for the given config, applying any allow/block
73
// filters from the config. Returns an error if the provider doesn't support
74
// discovery.
75
func Discover(ctx context.Context, cfg BackendConfig) ([]ModelInfo, error) {
76
p, err := New(cfg)
77
if err != nil {
78
return nil, err
79
}
80
d, ok := p.(ModelDiscoverer)
81
if !ok {
82
return nil, fmt.Errorf("llm: backend %q does not support model discovery", cfg.Backend)
83
}
84
models, err := d.DiscoverModels(ctx)
85
if err != nil {
86
return nil, err
87
}
88
if len(cfg.Allow) > 0 || len(cfg.Block) > 0 {
89
f, ferr := NewModelFilter(cfg.Allow, cfg.Block)
90
if ferr != nil {
91
return nil, ferr
92
}
93
models = f.Apply(models)
94
}
95
return models, nil
96
}
97
98
// BackendNames returns a sorted list of all known backend names.
99
func BackendNames() []string {
100
names := make([]string, 0, len(KnownBackends)+4)
101
for k := range KnownBackends {
102
names = append(names, k)
103
}
104
names = append(names, "anthropic", "gemini", "bedrock", "ollama")
105
return names
106
}
107

Keyboard Shortcuts

Open search /
Next entry (timeline) j
Previous entry (timeline) k
Open focused entry Enter
Show this help ?
Toggle theme Top nav button