125 lines
4.0 KiB
Python
125 lines
4.0 KiB
Python
"""Клиент для работы с GigaChat API."""
|
||
import json
|
||
from typing import List, Optional
|
||
|
||
import aiohttp
|
||
|
||
from models.gigachat_types import GigaChatMessage, GigaChatRequest, GigaChatResponse
|
||
from services.token_manager import TokenManager
|
||
|
||
|
||
class GigaChatClient:
|
||
"""Клиент для взаимодействия с GigaChat API."""
|
||
|
||
def __init__(
|
||
self,
|
||
token_manager: TokenManager,
|
||
base_url: Optional[str] = None,
|
||
):
|
||
self.token_manager = token_manager
|
||
self.base_url = base_url or "https://gigachat.devices.sberbank.ru/api/v1"
|
||
self._session: Optional[aiohttp.ClientSession] = None
|
||
|
||
async def _get_session(self) -> aiohttp.ClientSession:
|
||
"""Получить HTTP сессию (lazy initialization)."""
|
||
if self._session is None or self._session.closed:
|
||
self._session = aiohttp.ClientSession()
|
||
return self._session
|
||
|
||
async def chat(
|
||
self,
|
||
message: str,
|
||
context: Optional[List[GigaChatMessage]] = None,
|
||
model: str = "GigaChat-2",
|
||
temperature: float = 0.7,
|
||
max_tokens: int = 2000,
|
||
) -> str:
|
||
"""
|
||
Отправить сообщение в GigaChat.
|
||
|
||
Args:
|
||
message: Текст сообщения
|
||
context: История сообщений
|
||
model: Модель GigaChat (GigaChat-2, GigaChat-2-Lite, GigaChat-2-Pro, GigaChat-2-Max)
|
||
temperature: Температура генерации
|
||
max_tokens: Максимальное количество токенов
|
||
|
||
Returns:
|
||
Ответ от модели
|
||
"""
|
||
messages = context or []
|
||
messages.append(GigaChatMessage(role="user", content=message))
|
||
|
||
request = GigaChatRequest(
|
||
model=model,
|
||
messages=messages,
|
||
temperature=temperature,
|
||
max_tokens=max_tokens,
|
||
)
|
||
|
||
response = await self._make_request(request)
|
||
return response.choices[0].message.content
|
||
|
||
async def chat_with_response(
|
||
self,
|
||
message: str,
|
||
context: Optional[List[GigaChatMessage]] = None,
|
||
model: str = "GigaChat-2",
|
||
temperature: float = 0.7,
|
||
max_tokens: int = 2000,
|
||
) -> GigaChatResponse:
|
||
"""
|
||
Отправить сообщение и получить полный ответ.
|
||
|
||
Args:
|
||
message: Текст сообщения
|
||
context: История сообщений
|
||
model: Модель GigaChat
|
||
temperature: Температура генерации
|
||
max_tokens: Максимальное количество токенов
|
||
|
||
Returns:
|
||
Полный ответ от API
|
||
"""
|
||
messages = context or []
|
||
messages.append(GigaChatMessage(role="user", content=message))
|
||
|
||
request = GigaChatRequest(
|
||
model=model,
|
||
messages=messages,
|
||
temperature=temperature,
|
||
max_tokens=max_tokens,
|
||
)
|
||
|
||
return await self._make_request(request)
|
||
|
||
async def _make_request(self, request: GigaChatRequest) -> GigaChatResponse:
|
||
"""Выполнить запрос к API."""
|
||
token = await self.token_manager.get_token()
|
||
session = await self._get_session()
|
||
|
||
headers = {
|
||
"Authorization": f"Bearer {token}",
|
||
"Content-Type": "application/json",
|
||
}
|
||
|
||
url = f"{self.base_url}/chat/completions"
|
||
|
||
async with session.post(
|
||
url,
|
||
headers=headers,
|
||
json=request.model_dump(exclude_none=True),
|
||
) as response:
|
||
if response.status != 200:
|
||
error_text = await response.text()
|
||
raise Exception(f"GigaChat API error: {response.status} - {error_text}")
|
||
|
||
data = await response.json()
|
||
return GigaChatResponse(**data)
|
||
|
||
async def close(self):
|
||
"""Закрыть HTTP сессию."""
|
||
if self._session and not self._session.closed:
|
||
await self._session.close()
|
||
|