fix: Fix /api/version endpoint path and save all review events to DB
This commit is contained in:
parent
47bbb4ebc4
commit
8d231b49db
@ -118,7 +118,7 @@ async def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
@app.get("/version")
|
||||
@app.get("/api/version")
|
||||
async def get_version():
|
||||
"""Get backend version"""
|
||||
try:
|
||||
@ -128,7 +128,8 @@ async def get_version():
|
||||
else:
|
||||
version = "unknown"
|
||||
return {"version": version}
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
print(f"Error reading version: {e}")
|
||||
return {"version": "unknown"}
|
||||
|
||||
|
||||
|
||||
@ -169,6 +169,22 @@ class ReviewTaskWorker:
|
||||
# Send initial "review started" message
|
||||
logger.info(f" 📢 Отправка начального сообщения о старте review...")
|
||||
try:
|
||||
# Save initial event to database
|
||||
from app.models.review_event import ReviewEvent
|
||||
initial_db_event = ReviewEvent(
|
||||
review_id=review.id,
|
||||
event_type="review_started",
|
||||
message=f"Начало review для PR #{pull_request.pr_number}",
|
||||
data={
|
||||
"repository_id": repository.id,
|
||||
"repository_name": f"{repository.repo_owner}/{repository.repo_name}"
|
||||
}
|
||||
)
|
||||
db.add(initial_db_event)
|
||||
await db.commit()
|
||||
logger.info(f" 💾 Начальное событие сохранено в БД: {initial_db_event.id}")
|
||||
|
||||
# Broadcast initial message
|
||||
initial_message = {
|
||||
"type": "review_started",
|
||||
"review_id": review.id,
|
||||
@ -184,6 +200,8 @@ class ReviewTaskWorker:
|
||||
logger.info(f" ✅ Начальное сообщение отправлено: {len(manager.active_connections)} подключений")
|
||||
except Exception as e:
|
||||
logger.error(f" ❌ Ошибка отправки начального сообщения: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Create event handler
|
||||
async def on_review_event(event: dict):
|
||||
@ -240,6 +258,19 @@ class ReviewTaskWorker:
|
||||
|
||||
# Send completion message
|
||||
try:
|
||||
# Save completion event to database
|
||||
from app.models.review_event import ReviewEvent
|
||||
completion_db_event = ReviewEvent(
|
||||
review_id=review.id,
|
||||
event_type="review_completed",
|
||||
message=f"Review завершен для PR #{pull_request.pr_number}",
|
||||
data={}
|
||||
)
|
||||
db.add(completion_db_event)
|
||||
await db.commit()
|
||||
logger.info(f" 💾 Событие завершения сохранено в БД: {completion_db_event.id}")
|
||||
|
||||
# Broadcast completion message
|
||||
completion_message = {
|
||||
"type": "review_completed",
|
||||
"review_id": review.id,
|
||||
@ -250,8 +281,11 @@ class ReviewTaskWorker:
|
||||
}
|
||||
}
|
||||
await manager.broadcast(completion_message)
|
||||
logger.info(f" 📢 Сообщение о завершении отправлено: {len(manager.active_connections)} подключений")
|
||||
except Exception as e:
|
||||
logger.error(f" ❌ Ошибка отправки сообщения о завершении: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# Global worker instance
|
||||
|
||||
@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Скрипт для автоповышения версии backend
|
||||
# Вызывается из pre-commit hook или вручную
|
||||
|
||||
VERSION_FILE="backend/VERSION"
|
||||
|
||||
# Проверка существования файла
|
||||
if [ ! -f "$VERSION_FILE" ]; then
|
||||
echo "0.1.0" > "$VERSION_FILE"
|
||||
echo "✅ Создан файл версии: 0.1.0"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Чтение текущей версии
|
||||
CURRENT_VERSION=$(cat "$VERSION_FILE")
|
||||
|
||||
# Разбор версии (MAJOR.MINOR.PATCH)
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION"
|
||||
MAJOR="${VERSION_PARTS[0]}"
|
||||
MINOR="${VERSION_PARTS[1]}"
|
||||
PATCH="${VERSION_PARTS[2]}"
|
||||
|
||||
# Проверка типа изменения по коммиту
|
||||
if [ $# -eq 1 ]; then
|
||||
VERSION_TYPE="$1"
|
||||
else
|
||||
# Автоопределение по последнему коммиту
|
||||
LAST_COMMIT=$(git log -1 --pretty=%B 2>/dev/null || echo "")
|
||||
|
||||
if echo "$LAST_COMMIT" | grep -qiE "^(feat|feature):"; then
|
||||
VERSION_TYPE="minor"
|
||||
elif echo "$LAST_COMMIT" | grep -qiE "^(fix|bugfix):"; then
|
||||
VERSION_TYPE="patch"
|
||||
elif echo "$LAST_COMMIT" | grep -qiE "^(BREAKING|major):"; then
|
||||
VERSION_TYPE="major"
|
||||
else
|
||||
VERSION_TYPE="patch"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Повышение версии
|
||||
case "$VERSION_TYPE" in
|
||||
major)
|
||||
MAJOR=$((MAJOR + 1))
|
||||
MINOR=0
|
||||
PATCH=0
|
||||
;;
|
||||
minor)
|
||||
MINOR=$((MINOR + 1))
|
||||
PATCH=0
|
||||
;;
|
||||
patch|*)
|
||||
PATCH=$((PATCH + 1))
|
||||
;;
|
||||
esac
|
||||
|
||||
NEW_VERSION="$MAJOR.$MINOR.$PATCH"
|
||||
|
||||
# Запись новой версии
|
||||
echo "$NEW_VERSION" > "$VERSION_FILE"
|
||||
|
||||
echo "📦 Версия обновлена: $CURRENT_VERSION → $NEW_VERSION"
|
||||
|
||||
# Добавление файла в git если мы в hook
|
||||
if [ -n "$GIT_INDEX_FILE" ]; then
|
||||
git add "$VERSION_FILE"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
|
||||
@ -1,137 +0,0 @@
|
||||
"""
|
||||
Тест стриминга LLM messages от LangGraph
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from langgraph.graph import StateGraph, END
|
||||
from typing import TypedDict, Annotated
|
||||
import operator
|
||||
from langchain_ollama import OllamaLLM
|
||||
|
||||
|
||||
class TestState(TypedDict):
|
||||
messages: Annotated[list, operator.add]
|
||||
result: str
|
||||
|
||||
|
||||
async def llm_node(state: TestState) -> TestState:
|
||||
"""Нода с LLM вызовом"""
|
||||
print(" [LLM NODE] Вызов LLM...")
|
||||
|
||||
llm = OllamaLLM(
|
||||
model="qwen2.5-coder:3b",
|
||||
base_url="http://localhost:11434",
|
||||
temperature=0.7
|
||||
)
|
||||
|
||||
# Простой промпт для быстрого ответа
|
||||
prompt = "Напиши короткую проверку кода на Python (не более 100 символов)"
|
||||
|
||||
response = await llm.ainvoke(prompt)
|
||||
|
||||
print(f" [LLM NODE] Ответ получен: {response[:50]}...")
|
||||
|
||||
return {
|
||||
"messages": [{"role": "ai", "content": response}],
|
||||
"result": response
|
||||
}
|
||||
|
||||
|
||||
def create_test_graph():
|
||||
"""Создает тестовый граф с LLM"""
|
||||
workflow = StateGraph(TestState)
|
||||
|
||||
workflow.add_node("llm_call", llm_node)
|
||||
|
||||
workflow.set_entry_point("llm_call")
|
||||
workflow.add_edge("llm_call", END)
|
||||
|
||||
return workflow.compile()
|
||||
|
||||
|
||||
async def test_with_llm():
|
||||
"""Тест стриминга с LLM"""
|
||||
print("\n" + "="*80)
|
||||
print("ТЕСТ СТРИМИНГА LLM MESSAGES")
|
||||
print("="*80)
|
||||
|
||||
graph = create_test_graph()
|
||||
|
||||
initial_state: TestState = {
|
||||
"messages": [],
|
||||
"result": ""
|
||||
}
|
||||
|
||||
# Тест: updates + messages
|
||||
print(f"\n🔍 Тест: stream_mode=['updates', 'messages']")
|
||||
print("-" * 80)
|
||||
|
||||
event_count = 0
|
||||
messages_count = 0
|
||||
|
||||
async for event in graph.astream(initial_state, stream_mode=["updates", "messages"]):
|
||||
event_count += 1
|
||||
|
||||
if isinstance(event, tuple) and len(event) >= 2:
|
||||
event_type, event_data = event[0], event[1]
|
||||
|
||||
print(f"\n📨 Event #{event_count}")
|
||||
print(f" Type: {event_type}")
|
||||
print(f" Data type: {type(event_data)}")
|
||||
|
||||
if event_type == 'updates':
|
||||
print(f" ✅ Node update")
|
||||
if isinstance(event_data, dict):
|
||||
for node_name in event_data.keys():
|
||||
print(f" Node: {node_name}")
|
||||
|
||||
elif event_type == 'messages':
|
||||
messages_count += 1
|
||||
print(f" 💬 LLM Messages (#{messages_count})")
|
||||
|
||||
if isinstance(event_data, (list, tuple)):
|
||||
for i, msg in enumerate(event_data):
|
||||
print(f" Message {i+1}:")
|
||||
|
||||
# Извлекаем контент
|
||||
if hasattr(msg, 'content'):
|
||||
content = msg.content
|
||||
print(f" Content: {content[:100]}...")
|
||||
elif isinstance(msg, dict):
|
||||
print(f" Dict: {msg}")
|
||||
else:
|
||||
print(f" Type: {type(msg)}")
|
||||
print(f" Str: {str(msg)[:100]}...")
|
||||
|
||||
print(f"\n" + "="*80)
|
||||
print(f"✅ Всего событий: {event_count}")
|
||||
print(f"✅ Messages событий: {messages_count}")
|
||||
print("="*80)
|
||||
|
||||
|
||||
async def main():
|
||||
print("\n" + "="*80)
|
||||
print("ТЕСТИРОВАНИЕ LLM STREAMING В LANGGRAPH")
|
||||
print("="*80)
|
||||
print("\nПроверка Ollama...")
|
||||
|
||||
try:
|
||||
# Проверяем что Ollama доступен
|
||||
from langchain_ollama import OllamaLLM
|
||||
test_llm = OllamaLLM(model="qwen2.5-coder:3b", base_url="http://localhost:11434")
|
||||
result = await test_llm.ainvoke("test")
|
||||
print("✅ Ollama работает!")
|
||||
except Exception as e:
|
||||
print(f"❌ Ошибка подключения к Ollama: {e}")
|
||||
print("\n⚠️ Убедитесь что Ollama запущен: ollama serve")
|
||||
print("⚠️ И модель загружена: ollama pull qwen2.5-coder:3b\n")
|
||||
return
|
||||
|
||||
await test_with_llm()
|
||||
|
||||
print("\n✅ Тестирование завершено\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user