feat: Add LangGraph streaming with real-time UI updates
- Add streaming schemas and events - Implement run_review_stream in ReviewerAgent - Update task_worker to broadcast streaming events via WebSocket - Create ReviewStream component for real-time progress visualization - Integrate ReviewStream into ReviewDetail page - Show agent steps, LLM messages, and progress in real-time
This commit is contained in:
@@ -16,6 +16,13 @@ from app.schemas.webhook import (
|
||||
GitHubWebhook,
|
||||
BitbucketWebhook
|
||||
)
|
||||
from app.schemas.streaming import (
|
||||
StreamEvent,
|
||||
AgentStepEvent,
|
||||
LLMStreamEvent,
|
||||
ReviewProgressEvent,
|
||||
StreamEventType
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"RepositoryCreate",
|
||||
@@ -28,5 +35,10 @@ __all__ = [
|
||||
"GiteaWebhook",
|
||||
"GitHubWebhook",
|
||||
"BitbucketWebhook",
|
||||
"StreamEvent",
|
||||
"AgentStepEvent",
|
||||
"LLMStreamEvent",
|
||||
"ReviewProgressEvent",
|
||||
"StreamEventType",
|
||||
]
|
||||
|
||||
|
||||
55
backend/app/schemas/streaming.py
Normal file
55
backend/app/schemas/streaming.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Streaming events schemas"""
|
||||
|
||||
from typing import Optional, Any, Dict, Literal
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class StreamEventType:
|
||||
"""Stream event types"""
|
||||
AGENT_START = "agent_start"
|
||||
AGENT_UPDATE = "agent_update"
|
||||
AGENT_STEP = "agent_step"
|
||||
LLM_START = "llm_start"
|
||||
LLM_STREAM = "llm_stream"
|
||||
LLM_END = "llm_end"
|
||||
AGENT_ERROR = "agent_error"
|
||||
AGENT_COMPLETE = "agent_complete"
|
||||
|
||||
|
||||
class StreamEvent(BaseModel):
|
||||
"""Base streaming event"""
|
||||
type: str
|
||||
review_id: int
|
||||
timestamp: str
|
||||
data: Dict[str, Any]
|
||||
|
||||
|
||||
class AgentStepEvent(BaseModel):
|
||||
"""Agent step event"""
|
||||
type: Literal["agent_step"] = "agent_step"
|
||||
review_id: int
|
||||
step: str # fetch_pr_info, fetch_files, analyze_files, post_comments
|
||||
status: str # started, completed, failed
|
||||
message: str
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class LLMStreamEvent(BaseModel):
|
||||
"""LLM streaming event"""
|
||||
type: Literal["llm_stream"] = "llm_stream"
|
||||
review_id: int
|
||||
file_path: Optional[str] = None
|
||||
chunk: str # Часть ответа от LLM
|
||||
is_complete: bool = False
|
||||
|
||||
|
||||
class ReviewProgressEvent(BaseModel):
|
||||
"""Review progress event"""
|
||||
type: Literal["review_progress"] = "review_progress"
|
||||
review_id: int
|
||||
total_files: int
|
||||
analyzed_files: int
|
||||
total_comments: int
|
||||
current_step: str
|
||||
message: str
|
||||
|
||||
Reference in New Issue
Block a user