feat: Add LangGraph streaming with real-time UI updates

- Add streaming schemas and events
- Implement run_review_stream in ReviewerAgent
- Update task_worker to broadcast streaming events via WebSocket
- Create ReviewStream component for real-time progress visualization
- Integrate ReviewStream into ReviewDetail page
- Show agent steps, LLM messages, and progress in real-time
This commit is contained in:
Primakov Alexandr Alexandrovich
2025-10-13 01:00:49 +03:00
parent 2ad11142ad
commit 4ab6400a87
6 changed files with 383 additions and 3 deletions

View File

@@ -485,4 +485,56 @@ class ReviewerAgent:
final_state = await self.graph.ainvoke(initial_state)
return final_state
async def run_review_stream(
self,
review_id: int,
pr_number: int,
repository_id: int,
on_event: callable = None
) -> Dict[str, Any]:
"""Run the review workflow with streaming events"""
initial_state: ReviewState = {
"review_id": review_id,
"pr_number": pr_number,
"repository_id": repository_id,
"status": "pending",
"files": [],
"analyzed_files": [],
"comments": [],
"error": None,
"git_service": None
}
final_state = None
# Stream through the graph
async for event in self.graph.astream(
initial_state,
stream_mode=["updates", "messages"]
):
# Handle different event types
if isinstance(event, dict):
# Node updates
for node_name, node_data in event.items():
if on_event:
await on_event({
"type": "agent_step",
"step": node_name,
"data": node_data
})
# Store final state
if isinstance(node_data, dict):
final_state = node_data
# Handle message events (LLM calls)
elif hasattr(event, '__class__') and 'message' in event.__class__.__name__.lower():
if on_event:
await on_event({
"type": "llm_message",
"message": str(event)
})
return final_state or initial_state