|
1
|
package llm |
|
2
|
|
|
3
|
import ( |
|
4
|
"bytes" |
|
5
|
"context" |
|
6
|
"encoding/json" |
|
7
|
"fmt" |
|
8
|
"io" |
|
9
|
"net/http" |
|
10
|
) |
|
11
|
|
|
12
|
type ollamaProvider struct { |
|
13
|
baseURL string |
|
14
|
model string |
|
15
|
http *http.Client |
|
16
|
} |
|
17
|
|
|
18
|
func newOllamaProvider(cfg BackendConfig, baseURL string, hc *http.Client) *ollamaProvider { |
|
19
|
model := cfg.Model |
|
20
|
if model == "" { |
|
21
|
model = "llama3.2" |
|
22
|
} |
|
23
|
return &ollamaProvider{ |
|
24
|
baseURL: baseURL, |
|
25
|
model: model, |
|
26
|
http: hc, |
|
27
|
} |
|
28
|
} |
|
29
|
|
|
30
|
func (p *ollamaProvider) Summarize(ctx context.Context, prompt string) (string, error) { |
|
31
|
body, _ := json.Marshal(map[string]any{ |
|
32
|
"model": p.model, |
|
33
|
"prompt": prompt, |
|
34
|
"stream": false, |
|
35
|
}) |
|
36
|
req, err := http.NewRequestWithContext(ctx, "POST", p.baseURL+"/api/generate", bytes.NewReader(body)) |
|
37
|
if err != nil { |
|
38
|
return "", err |
|
39
|
} |
|
40
|
req.Header.Set("Content-Type", "application/json") |
|
41
|
|
|
42
|
resp, err := p.http.Do(req) |
|
43
|
if err != nil { |
|
44
|
return "", fmt.Errorf("ollama request: %w", err) |
|
45
|
} |
|
46
|
defer resp.Body.Close() |
|
47
|
|
|
48
|
data, _ := io.ReadAll(resp.Body) |
|
49
|
if resp.StatusCode != http.StatusOK { |
|
50
|
return "", fmt.Errorf("ollama error %d: %s", resp.StatusCode, string(data)) |
|
51
|
} |
|
52
|
|
|
53
|
var result struct { |
|
54
|
Response string `json:"response"` |
|
55
|
} |
|
56
|
if err := json.Unmarshal(data, &result); err != nil { |
|
57
|
return "", fmt.Errorf("ollama parse: %w", err) |
|
58
|
} |
|
59
|
return result.Response, nil |
|
60
|
} |
|
61
|
|
|
62
|
// DiscoverModels calls the Ollama /api/tags endpoint to list installed models. |
|
63
|
func (p *ollamaProvider) DiscoverModels(ctx context.Context) ([]ModelInfo, error) { |
|
64
|
req, err := http.NewRequestWithContext(ctx, "GET", p.baseURL+"/api/tags", nil) |
|
65
|
if err != nil { |
|
66
|
return nil, err |
|
67
|
} |
|
68
|
|
|
69
|
resp, err := p.http.Do(req) |
|
70
|
if err != nil { |
|
71
|
return nil, fmt.Errorf("ollama models request: %w", err) |
|
72
|
} |
|
73
|
defer resp.Body.Close() |
|
74
|
|
|
75
|
data, _ := io.ReadAll(resp.Body) |
|
76
|
if resp.StatusCode != http.StatusOK { |
|
77
|
return nil, fmt.Errorf("ollama models error %d: %s", resp.StatusCode, string(data)) |
|
78
|
} |
|
79
|
|
|
80
|
var result struct { |
|
81
|
Models []struct { |
|
82
|
Name string `json:"name"` |
|
83
|
} `json:"models"` |
|
84
|
} |
|
85
|
if err := json.Unmarshal(data, &result); err != nil { |
|
86
|
return nil, fmt.Errorf("ollama models parse: %w", err) |
|
87
|
} |
|
88
|
|
|
89
|
models := make([]ModelInfo, len(result.Models)) |
|
90
|
for i, m := range result.Models { |
|
91
|
models[i] = ModelInfo{ID: m.Name, Name: m.Name} |
|
92
|
} |
|
93
|
return models, nil |
|
94
|
} |
|
95
|
|