Update environment variables, Docker configuration, and dependencies; refactor token management and chat agent logic. Added FastAPI server setup and improved message handling in GigaChat client.

This commit is contained in:
12 changed files with 348 additions and 50 deletions

143
app.py Normal file
View File

@@ -0,0 +1,143 @@
"""FastAPI сервер для AI-агентов."""
import os
from typing import List, Optional, Dict, Any
from uuid import UUID, uuid4
from fastapi import FastAPI, HTTPException, APIRouter
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field
from agents.gigachat_client import GigaChatClient
from agents.chat_agent import ChatAgent
from services.token_manager import TokenManager
from services.cache_service import CacheService
from models.gigachat_types import GigaChatMessage
app = FastAPI(title="New Planet AI Agents API", version="1.0.0")
# CORS middleware для работы с backend
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # В production указать конкретные домены
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Роутер для API эндпоинтов
api_router = APIRouter(prefix="/api/v1", tags=["ai"])
# Инициализация сервисов
token_manager = TokenManager()
gigachat_client = GigaChatClient(token_manager)
cache_service = CacheService() # Использует REDIS_URL из окружения
chat_agent = ChatAgent(gigachat_client, cache_service)
# Модели запросов/ответов
class ChatRequest(BaseModel):
"""Запрос на отправку сообщения в чат."""
message: str = Field(..., min_length=1, max_length=2000)
conversation_id: Optional[str] = None
context: Optional[List[Dict[str, Any]]] = None
model: Optional[str] = "GigaChat-2"
user_id: Optional[UUID] = None
class ChatResponse(BaseModel):
"""Ответ от ИИ-агента."""
response: str
conversation_id: Optional[str] = None
tokens_used: Optional[int] = None
model: Optional[str] = None
class GenerateTextRequest(BaseModel):
"""Запрос на генерацию текста."""
prompt: str = Field(..., min_length=1)
model: Optional[str] = "GigaChat-2-Pro"
class GenerateTextResponse(BaseModel):
"""Ответ с сгенерированным текстом."""
text: str
@app.get("/health")
async def health_check():
"""Проверка здоровья сервиса."""
return {"status": "ok", "service": "ai-agents"}
@api_router.post("/chat", response_model=ChatResponse)
async def chat(request: ChatRequest):
"""
Отправить сообщение в чат через ChatAgent.
Поддерживает два режима:
1. С conversation_id - использует ChatAgent с контекстом из Redis
2. С явным context - использует chat_with_context
"""
try:
user_id = request.user_id or uuid4()
if request.context:
# Используем явный контекст
response_text, tokens_used = await chat_agent.chat_with_context(
user_id=user_id,
message=request.message,
context=request.context,
model=request.model or "GigaChat-2",
)
else:
# Используем ChatAgent с conversation_id
response_text, tokens_used = await chat_agent.chat(
user_id=user_id,
message=request.message,
conversation_id=request.conversation_id,
model=request.model or "GigaChat-2",
)
return ChatResponse(
response=response_text,
conversation_id=request.conversation_id,
tokens_used=tokens_used,
model=request.model or "GigaChat-2",
)
except Exception as e:
raise HTTPException(status_code=500, detail=f"Chat error: {str(e)}")
@api_router.post("/generate_text", response_model=GenerateTextResponse)
async def generate_text(request: GenerateTextRequest):
"""
Генерация текста по промпту через GigaChat.
"""
try:
response_text = await gigachat_client.chat(
message=request.prompt,
model=request.model or "GigaChat-2-Pro",
temperature=0.7,
max_tokens=2000,
)
return GenerateTextResponse(text=response_text)
except Exception as e:
raise HTTPException(status_code=500, detail=f"Generate text error: {str(e)}")
# Подключение роутера к приложению
app.include_router(api_router)
@app.on_event("shutdown")
async def shutdown():
"""Закрытие соединений при остановке."""
await gigachat_client.close()
await cache_service.close()
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)