116 lines
4.0 KiB
Python
116 lines
4.0 KiB
Python
"""ИИ-агент для чата 'Планета Земля'."""
|
||
from typing import List, Optional
|
||
from uuid import UUID
|
||
|
||
from models.gigachat_types import GigaChatMessage
|
||
from prompts.persona import EARTH_PERSONA
|
||
|
||
from agents.gigachat_client import GigaChatClient
|
||
from services.cache_service import CacheService
|
||
|
||
|
||
class ChatAgent:
|
||
"""ИИ-агент для общения с детьми и родителями."""
|
||
|
||
def __init__(self, gigachat: GigaChatClient, cache: CacheService):
|
||
self.gigachat = gigachat
|
||
self.cache = cache
|
||
|
||
async def chat(
|
||
self,
|
||
user_id: UUID,
|
||
message: str,
|
||
conversation_id: Optional[str] = None,
|
||
model: str = "GigaChat-2-Lite",
|
||
) -> tuple[str, int]:
|
||
"""
|
||
Отправить сообщение и получить ответ.
|
||
|
||
Args:
|
||
user_id: ID пользователя
|
||
message: Текст сообщения
|
||
conversation_id: ID разговора (для контекста)
|
||
model: Модель GigaChat
|
||
|
||
Returns:
|
||
(ответ, количество использованных токенов)
|
||
"""
|
||
# Загружаем контекст из кэша
|
||
context_messages = []
|
||
if conversation_id:
|
||
cached_context = await self.cache.get_context(str(conversation_id))
|
||
context_messages = [
|
||
GigaChatMessage(role=msg["role"], content=msg["content"])
|
||
for msg in cached_context
|
||
]
|
||
|
||
# Добавляем системный промпт в начало
|
||
system_message = GigaChatMessage(role="system", content=EARTH_PERSONA)
|
||
if not context_messages or context_messages[0].role != "system":
|
||
context_messages.insert(0, system_message)
|
||
|
||
# Добавляем текущее сообщение пользователя
|
||
context_messages.append(GigaChatMessage(role="user", content=message))
|
||
|
||
# Отправляем запрос
|
||
response = await self.gigachat.chat_with_response(
|
||
message=message,
|
||
context=context_messages,
|
||
model=model,
|
||
temperature=0.7,
|
||
max_tokens=1500,
|
||
)
|
||
|
||
assistant_message = response.choices[0].message.content
|
||
tokens_used = response.usage.total_tokens
|
||
|
||
# Сохраняем в контекст
|
||
if conversation_id:
|
||
await self.cache.add_message(str(conversation_id), "user", message)
|
||
await self.cache.add_message(str(conversation_id), "assistant", assistant_message)
|
||
|
||
return assistant_message, tokens_used
|
||
|
||
async def chat_with_context(
|
||
self,
|
||
user_id: UUID,
|
||
message: str,
|
||
context: Optional[List[dict]] = None,
|
||
model: str = "GigaChat-2-Lite",
|
||
) -> tuple[str, int]:
|
||
"""
|
||
Отправить сообщение с явным контекстом.
|
||
|
||
Args:
|
||
user_id: ID пользователя
|
||
message: Текст сообщения
|
||
context: Явный контекст разговора
|
||
model: Модель GigaChat
|
||
|
||
Returns:
|
||
(ответ, количество использованных токенов)
|
||
"""
|
||
context_messages = [GigaChatMessage(role="system", content=EARTH_PERSONA)]
|
||
|
||
if context:
|
||
for msg in context:
|
||
context_messages.append(
|
||
GigaChatMessage(role=msg["role"], content=msg["content"])
|
||
)
|
||
|
||
context_messages.append(GigaChatMessage(role="user", content=message))
|
||
|
||
response = await self.gigachat.chat_with_response(
|
||
message=message,
|
||
context=context_messages,
|
||
model=model,
|
||
temperature=0.7,
|
||
max_tokens=1500,
|
||
)
|
||
|
||
assistant_message = response.choices[0].message.content
|
||
tokens_used = response.usage.total_tokens
|
||
|
||
return assistant_message, tokens_used
|
||
|