|
a94205b…
|
leo
|
1 |
"""Anthropic provider implementation.""" |
|
a94205b…
|
leo
|
2 |
|
|
a94205b…
|
leo
|
3 |
import base64 |
|
a94205b…
|
leo
|
4 |
import logging |
|
a94205b…
|
leo
|
5 |
import os |
|
a94205b…
|
leo
|
6 |
from pathlib import Path |
|
a94205b…
|
leo
|
7 |
from typing import Optional |
|
a94205b…
|
leo
|
8 |
|
|
a94205b…
|
leo
|
9 |
import anthropic |
|
a94205b…
|
leo
|
10 |
from dotenv import load_dotenv |
|
a94205b…
|
leo
|
11 |
|
|
0981a08…
|
noreply
|
12 |
from video_processor.providers.base import BaseProvider, ModelInfo, ProviderRegistry |
|
a94205b…
|
leo
|
13 |
|
|
a94205b…
|
leo
|
14 |
load_dotenv() |
|
a94205b…
|
leo
|
15 |
logger = logging.getLogger(__name__) |
|
a94205b…
|
leo
|
16 |
|
|
a94205b…
|
leo
|
17 |
|
|
a94205b…
|
leo
|
18 |
class AnthropicProvider(BaseProvider): |
|
a94205b…
|
leo
|
19 |
"""Anthropic Claude API provider.""" |
|
a94205b…
|
leo
|
20 |
|
|
a94205b…
|
leo
|
21 |
provider_name = "anthropic" |
|
a94205b…
|
leo
|
22 |
|
|
a94205b…
|
leo
|
23 |
def __init__(self, api_key: Optional[str] = None): |
|
a94205b…
|
leo
|
24 |
self.api_key = api_key or os.getenv("ANTHROPIC_API_KEY") |
|
a94205b…
|
leo
|
25 |
if not self.api_key: |
|
a94205b…
|
leo
|
26 |
raise ValueError("ANTHROPIC_API_KEY not set") |
|
a94205b…
|
leo
|
27 |
self.client = anthropic.Anthropic(api_key=self.api_key) |
|
a94205b…
|
leo
|
28 |
|
|
a94205b…
|
leo
|
29 |
def chat( |
|
a94205b…
|
leo
|
30 |
self, |
|
a94205b…
|
leo
|
31 |
messages: list[dict], |
|
a94205b…
|
leo
|
32 |
max_tokens: int = 4096, |
|
a94205b…
|
leo
|
33 |
temperature: float = 0.7, |
|
a94205b…
|
leo
|
34 |
model: Optional[str] = None, |
|
a94205b…
|
leo
|
35 |
) -> str: |
|
0981a08…
|
noreply
|
36 |
model = model or "claude-haiku-4-5-20251001" |
|
0981a08…
|
noreply
|
37 |
|
|
0981a08…
|
noreply
|
38 |
# Anthropic requires system messages as a top-level parameter |
|
0981a08…
|
noreply
|
39 |
system_parts = [] |
|
0981a08…
|
noreply
|
40 |
chat_messages = [] |
|
0981a08…
|
noreply
|
41 |
for msg in messages: |
|
0981a08…
|
noreply
|
42 |
if msg.get("role") == "system": |
|
0981a08…
|
noreply
|
43 |
system_parts.append(msg["content"]) |
|
0981a08…
|
noreply
|
44 |
else: |
|
0981a08…
|
noreply
|
45 |
chat_messages.append(msg) |
|
0981a08…
|
noreply
|
46 |
|
|
0981a08…
|
noreply
|
47 |
kwargs = { |
|
0981a08…
|
noreply
|
48 |
"model": model, |
|
0981a08…
|
noreply
|
49 |
"messages": chat_messages, |
|
0981a08…
|
noreply
|
50 |
"max_tokens": max_tokens, |
|
0981a08…
|
noreply
|
51 |
"temperature": temperature, |
|
0981a08…
|
noreply
|
52 |
} |
|
0981a08…
|
noreply
|
53 |
if system_parts: |
|
0981a08…
|
noreply
|
54 |
kwargs["system"] = "\n\n".join(system_parts) |
|
0981a08…
|
noreply
|
55 |
|
|
0981a08…
|
noreply
|
56 |
response = self.client.messages.create(**kwargs) |
|
287a3bb…
|
leo
|
57 |
self._last_usage = { |
|
287a3bb…
|
leo
|
58 |
"input_tokens": getattr(response.usage, "input_tokens", 0), |
|
287a3bb…
|
leo
|
59 |
"output_tokens": getattr(response.usage, "output_tokens", 0), |
|
287a3bb…
|
leo
|
60 |
} |
|
a94205b…
|
leo
|
61 |
return response.content[0].text |
|
a94205b…
|
leo
|
62 |
|
|
a94205b…
|
leo
|
63 |
def analyze_image( |
|
a94205b…
|
leo
|
64 |
self, |
|
a94205b…
|
leo
|
65 |
image_bytes: bytes, |
|
a94205b…
|
leo
|
66 |
prompt: str, |
|
a94205b…
|
leo
|
67 |
max_tokens: int = 4096, |
|
a94205b…
|
leo
|
68 |
model: Optional[str] = None, |
|
a94205b…
|
leo
|
69 |
) -> str: |
|
0981a08…
|
noreply
|
70 |
model = model or "claude-haiku-4-5-20251001" |
|
a94205b…
|
leo
|
71 |
b64 = base64.b64encode(image_bytes).decode() |
|
a94205b…
|
leo
|
72 |
response = self.client.messages.create( |
|
a94205b…
|
leo
|
73 |
model=model, |
|
a94205b…
|
leo
|
74 |
messages=[ |
|
a94205b…
|
leo
|
75 |
{ |
|
a94205b…
|
leo
|
76 |
"role": "user", |
|
a94205b…
|
leo
|
77 |
"content": [ |
|
a94205b…
|
leo
|
78 |
{ |
|
a94205b…
|
leo
|
79 |
"type": "image", |
|
a94205b…
|
leo
|
80 |
"source": { |
|
a94205b…
|
leo
|
81 |
"type": "base64", |
|
a94205b…
|
leo
|
82 |
"media_type": "image/jpeg", |
|
a94205b…
|
leo
|
83 |
"data": b64, |
|
a94205b…
|
leo
|
84 |
}, |
|
a94205b…
|
leo
|
85 |
}, |
|
a94205b…
|
leo
|
86 |
{"type": "text", "text": prompt}, |
|
a94205b…
|
leo
|
87 |
], |
|
a94205b…
|
leo
|
88 |
} |
|
a94205b…
|
leo
|
89 |
], |
|
a94205b…
|
leo
|
90 |
max_tokens=max_tokens, |
|
a94205b…
|
leo
|
91 |
) |
|
287a3bb…
|
leo
|
92 |
self._last_usage = { |
|
287a3bb…
|
leo
|
93 |
"input_tokens": getattr(response.usage, "input_tokens", 0), |
|
287a3bb…
|
leo
|
94 |
"output_tokens": getattr(response.usage, "output_tokens", 0), |
|
287a3bb…
|
leo
|
95 |
} |
|
a94205b…
|
leo
|
96 |
return response.content[0].text |
|
a94205b…
|
leo
|
97 |
|
|
a94205b…
|
leo
|
98 |
def transcribe_audio( |
|
a94205b…
|
leo
|
99 |
self, |
|
a94205b…
|
leo
|
100 |
audio_path: str | Path, |
|
a94205b…
|
leo
|
101 |
language: Optional[str] = None, |
|
a94205b…
|
leo
|
102 |
model: Optional[str] = None, |
|
a94205b…
|
leo
|
103 |
) -> dict: |
|
a94205b…
|
leo
|
104 |
raise NotImplementedError( |
|
a94205b…
|
leo
|
105 |
"Anthropic does not provide a dedicated transcription API. " |
|
a94205b…
|
leo
|
106 |
"Use OpenAI Whisper or Gemini for transcription." |
|
a94205b…
|
leo
|
107 |
) |
|
a94205b…
|
leo
|
108 |
|
|
a94205b…
|
leo
|
109 |
def list_models(self) -> list[ModelInfo]: |
|
a94205b…
|
leo
|
110 |
models = [] |
|
a94205b…
|
leo
|
111 |
try: |
|
a94205b…
|
leo
|
112 |
page = self.client.models.list(limit=100) |
|
a94205b…
|
leo
|
113 |
for m in page.data: |
|
a94205b…
|
leo
|
114 |
mid = m.id |
|
a94205b…
|
leo
|
115 |
caps = ["chat", "vision"] # All Claude models support chat + vision |
|
829e24a…
|
leo
|
116 |
models.append( |
|
829e24a…
|
leo
|
117 |
ModelInfo( |
|
829e24a…
|
leo
|
118 |
id=mid, |
|
829e24a…
|
leo
|
119 |
provider="anthropic", |
|
829e24a…
|
leo
|
120 |
display_name=getattr(m, "display_name", mid), |
|
829e24a…
|
leo
|
121 |
capabilities=caps, |
|
829e24a…
|
leo
|
122 |
) |
|
829e24a…
|
leo
|
123 |
) |
|
a94205b…
|
leo
|
124 |
except Exception as e: |
|
a94205b…
|
leo
|
125 |
logger.warning(f"Failed to list Anthropic models: {e}") |
|
a94205b…
|
leo
|
126 |
return sorted(models, key=lambda m: m.id) |
|
0981a08…
|
noreply
|
127 |
|
|
0981a08…
|
noreply
|
128 |
|
|
0981a08…
|
noreply
|
129 |
ProviderRegistry.register( |
|
0981a08…
|
noreply
|
130 |
name="anthropic", |
|
0981a08…
|
noreply
|
131 |
provider_class=AnthropicProvider, |
|
0981a08…
|
noreply
|
132 |
env_var="ANTHROPIC_API_KEY", |
|
0981a08…
|
noreply
|
133 |
model_prefixes=["claude-"], |
|
0981a08…
|
noreply
|
134 |
default_models={ |
|
0981a08…
|
noreply
|
135 |
"chat": "claude-haiku-4-5-20251001", |
|
0981a08…
|
noreply
|
136 |
"vision": "claude-haiku-4-5-20251001", |
|
0981a08…
|
noreply
|
137 |
"audio": "", |
|
0981a08…
|
noreply
|
138 |
}, |
|
0981a08…
|
noreply
|
139 |
) |