feat: Add LangGraph streaming with real-time UI updates

- Add streaming schemas and events
- Implement run_review_stream in ReviewerAgent
- Update task_worker to broadcast streaming events via WebSocket
- Create ReviewStream component for real-time progress visualization
- Integrate ReviewStream into ReviewDetail page
- Show agent steps, LLM messages, and progress in real-time
This commit is contained in:
Primakov Alexandr Alexandrovich
2025-10-13 01:00:49 +03:00
parent 2ad11142ad
commit 4ab6400a87
6 changed files with 383 additions and 3 deletions

View File

@@ -485,4 +485,56 @@ class ReviewerAgent:
final_state = await self.graph.ainvoke(initial_state)
return final_state
async def run_review_stream(
self,
review_id: int,
pr_number: int,
repository_id: int,
on_event: callable = None
) -> Dict[str, Any]:
"""Run the review workflow with streaming events"""
initial_state: ReviewState = {
"review_id": review_id,
"pr_number": pr_number,
"repository_id": repository_id,
"status": "pending",
"files": [],
"analyzed_files": [],
"comments": [],
"error": None,
"git_service": None
}
final_state = None
# Stream through the graph
async for event in self.graph.astream(
initial_state,
stream_mode=["updates", "messages"]
):
# Handle different event types
if isinstance(event, dict):
# Node updates
for node_name, node_data in event.items():
if on_event:
await on_event({
"type": "agent_step",
"step": node_name,
"data": node_data
})
# Store final state
if isinstance(node_data, dict):
final_state = node_data
# Handle message events (LLM calls)
elif hasattr(event, '__class__') and 'message' in event.__class__.__name__.lower():
if on_event:
await on_event({
"type": "llm_message",
"message": str(event)
})
return final_state or initial_state

View File

@@ -16,6 +16,13 @@ from app.schemas.webhook import (
GitHubWebhook,
BitbucketWebhook
)
from app.schemas.streaming import (
StreamEvent,
AgentStepEvent,
LLMStreamEvent,
ReviewProgressEvent,
StreamEventType
)
__all__ = [
"RepositoryCreate",
@@ -28,5 +35,10 @@ __all__ = [
"GiteaWebhook",
"GitHubWebhook",
"BitbucketWebhook",
"StreamEvent",
"AgentStepEvent",
"LLMStreamEvent",
"ReviewProgressEvent",
"StreamEventType",
]

View File

@@ -0,0 +1,55 @@
"""Streaming events schemas"""
from typing import Optional, Any, Dict, Literal
from pydantic import BaseModel
class StreamEventType:
"""Stream event types"""
AGENT_START = "agent_start"
AGENT_UPDATE = "agent_update"
AGENT_STEP = "agent_step"
LLM_START = "llm_start"
LLM_STREAM = "llm_stream"
LLM_END = "llm_end"
AGENT_ERROR = "agent_error"
AGENT_COMPLETE = "agent_complete"
class StreamEvent(BaseModel):
"""Base streaming event"""
type: str
review_id: int
timestamp: str
data: Dict[str, Any]
class AgentStepEvent(BaseModel):
"""Agent step event"""
type: Literal["agent_step"] = "agent_step"
review_id: int
step: str # fetch_pr_info, fetch_files, analyze_files, post_comments
status: str # started, completed, failed
message: str
data: Optional[Dict[str, Any]] = None
class LLMStreamEvent(BaseModel):
"""LLM streaming event"""
type: Literal["llm_stream"] = "llm_stream"
review_id: int
file_path: Optional[str] = None
chunk: str # Часть ответа от LLM
is_complete: bool = False
class ReviewProgressEvent(BaseModel):
"""Review progress event"""
type: Literal["review_progress"] = "review_progress"
review_id: int
total_files: int
analyzed_files: int
total_comments: int
current_step: str
message: str

View File

@@ -159,14 +159,45 @@ class ReviewTaskWorker:
await db.commit()
await db.refresh(review)
# Run review agent
# Run review agent with streaming
logger.info(f" 🤖 Запуск AI review для PR #{pull_request.pr_number}")
# Import broadcast function
from app.main import manager
from datetime import datetime as dt
# Create event handler
async def on_review_event(event: dict):
"""Handle review events and broadcast to clients"""
try:
# Prepare event data
event_data = {
"type": event.get("type", "agent_update"),
"review_id": review.id,
"pr_number": pull_request.pr_number,
"timestamp": dt.utcnow().isoformat(),
"data": event
}
# Broadcast to all connected clients
await manager.broadcast(event_data)
# Log the event
if event.get("type") == "agent_step":
step = event.get("step", "unknown")
logger.info(f" 📍 Step: {step}")
elif event.get("type") == "llm_message":
message = event.get("message", "")[:100]
logger.debug(f" 💬 LLM: {message}...")
except Exception as e:
logger.error(f" ❌ Ошибка broadcast события: {e}")
agent = ReviewerAgent(db)
await agent.run_review(
await agent.run_review_stream(
review_id=review.id,
pr_number=pull_request.pr_number,
repository_id=repository.id
repository_id=repository.id,
on_event=on_review_event
)
logger.info(f" ✅ Review завершен для PR #{pull_request.pr_number}")