feat: 自动提交 - 周五 2025/09/19 19:42:44.04
This commit is contained in:
@@ -11,9 +11,10 @@ from datetime import datetime, timedelta
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..core.database import db_manager
|
||||
from ..core.models import Conversation
|
||||
from ..core.models import Conversation, WorkOrder, WorkOrderSuggestion, KnowledgeEntry
|
||||
from ..core.redis_manager import redis_manager
|
||||
from ..config.config import Config
|
||||
from sqlalchemy import and_, or_, desc
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -375,3 +376,330 @@ class ConversationHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"清理旧对话记录失败: {e}")
|
||||
return 0
|
||||
|
||||
def get_workorder_complete_timeline(
|
||||
self,
|
||||
work_order_id: int,
|
||||
include_ai_suggestions: bool = True,
|
||||
include_feishu_sync: bool = True,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""获取工单的完整时间线(包含对话、AI建议、飞书同步)"""
|
||||
try:
|
||||
timeline = []
|
||||
|
||||
with db_manager.get_session() as session:
|
||||
# 1. 获取基础对话记录
|
||||
conversations = session.query(Conversation).filter(
|
||||
Conversation.work_order_id == work_order_id
|
||||
).order_by(Conversation.timestamp.desc()).limit(limit).all()
|
||||
|
||||
for conv in conversations:
|
||||
timeline.append({
|
||||
"id": conv.id,
|
||||
"type": "conversation",
|
||||
"timestamp": conv.timestamp,
|
||||
"user_message": conv.user_message,
|
||||
"assistant_response": conv.assistant_response,
|
||||
"confidence_score": conv.confidence_score,
|
||||
"response_time": conv.response_time,
|
||||
"knowledge_used": json.loads(conv.knowledge_used) if conv.knowledge_used else []
|
||||
})
|
||||
|
||||
# 2. 获取AI建议记录
|
||||
if include_ai_suggestions:
|
||||
suggestions = session.query(WorkOrderSuggestion).filter(
|
||||
WorkOrderSuggestion.work_order_id == work_order_id
|
||||
).order_by(WorkOrderSuggestion.created_at.desc()).limit(limit).all()
|
||||
|
||||
for suggestion in suggestions:
|
||||
timeline.append({
|
||||
"id": f"suggestion_{suggestion.id}",
|
||||
"type": "ai_suggestion",
|
||||
"timestamp": suggestion.created_at,
|
||||
"ai_suggestion": suggestion.ai_suggestion,
|
||||
"human_resolution": suggestion.human_resolution,
|
||||
"ai_similarity": suggestion.ai_similarity,
|
||||
"approved": suggestion.approved,
|
||||
"use_human_resolution": suggestion.use_human_resolution,
|
||||
"updated_at": suggestion.updated_at
|
||||
})
|
||||
|
||||
# 3. 获取飞书同步记录(从工单的feishu_record_id推断)
|
||||
if include_feishu_sync:
|
||||
work_order = session.query(WorkOrder).filter(
|
||||
WorkOrder.id == work_order_id
|
||||
).first()
|
||||
|
||||
if work_order and work_order.feishu_record_id:
|
||||
timeline.append({
|
||||
"id": f"feishu_{work_order.feishu_record_id}",
|
||||
"type": "feishu_sync",
|
||||
"timestamp": work_order.created_at,
|
||||
"feishu_record_id": work_order.feishu_record_id,
|
||||
"order_id": work_order.order_id,
|
||||
"title": work_order.title,
|
||||
"description": work_order.description,
|
||||
"category": work_order.category,
|
||||
"priority": work_order.priority,
|
||||
"status": work_order.status,
|
||||
"source": work_order.source
|
||||
})
|
||||
|
||||
# 按时间排序
|
||||
timeline.sort(key=lambda x: x["timestamp"], reverse=True)
|
||||
|
||||
return timeline[:limit]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取工单完整时间线失败: {e}")
|
||||
return []
|
||||
|
||||
def get_ai_suggestion_context(
|
||||
self,
|
||||
work_order_id: int,
|
||||
suggestion_id: Optional[int] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""获取AI建议的对话上下文"""
|
||||
try:
|
||||
context = {
|
||||
"work_order_info": {},
|
||||
"conversation_history": [],
|
||||
"ai_suggestions": [],
|
||||
"knowledge_base": []
|
||||
}
|
||||
|
||||
with db_manager.get_session() as session:
|
||||
# 1. 获取工单信息
|
||||
work_order = session.query(WorkOrder).filter(
|
||||
WorkOrder.id == work_order_id
|
||||
).first()
|
||||
|
||||
if work_order:
|
||||
context["work_order_info"] = {
|
||||
"id": work_order.id,
|
||||
"order_id": work_order.order_id,
|
||||
"title": work_order.title,
|
||||
"description": work_order.description,
|
||||
"category": work_order.category,
|
||||
"priority": work_order.priority,
|
||||
"status": work_order.status,
|
||||
"created_at": work_order.created_at.isoformat(),
|
||||
"feishu_record_id": work_order.feishu_record_id
|
||||
}
|
||||
|
||||
# 2. 获取相关对话历史
|
||||
conversations = session.query(Conversation).filter(
|
||||
Conversation.work_order_id == work_order_id
|
||||
).order_by(Conversation.timestamp.desc()).limit(10).all()
|
||||
|
||||
for conv in conversations:
|
||||
context["conversation_history"].append({
|
||||
"id": conv.id,
|
||||
"user_message": conv.user_message,
|
||||
"assistant_response": conv.assistant_response,
|
||||
"timestamp": conv.timestamp.isoformat(),
|
||||
"confidence_score": conv.confidence_score
|
||||
})
|
||||
|
||||
# 3. 获取AI建议历史
|
||||
suggestions = session.query(WorkOrderSuggestion).filter(
|
||||
WorkOrderSuggestion.work_order_id == work_order_id
|
||||
).order_by(WorkOrderSuggestion.created_at.desc()).limit(5).all()
|
||||
|
||||
for suggestion in suggestions:
|
||||
context["ai_suggestions"].append({
|
||||
"id": suggestion.id,
|
||||
"ai_suggestion": suggestion.ai_suggestion,
|
||||
"human_resolution": suggestion.human_resolution,
|
||||
"ai_similarity": suggestion.ai_similarity,
|
||||
"approved": suggestion.approved,
|
||||
"use_human_resolution": suggestion.use_human_resolution,
|
||||
"created_at": suggestion.created_at.isoformat()
|
||||
})
|
||||
|
||||
# 4. 获取相关知识库条目
|
||||
if work_order:
|
||||
knowledge_entries = session.query(KnowledgeEntry).filter(
|
||||
and_(
|
||||
KnowledgeEntry.is_active == True,
|
||||
or_(
|
||||
KnowledgeEntry.category == work_order.category,
|
||||
KnowledgeEntry.question.contains(work_order.title[:20])
|
||||
)
|
||||
)
|
||||
).limit(5).all()
|
||||
|
||||
for entry in knowledge_entries:
|
||||
context["knowledge_base"].append({
|
||||
"id": entry.id,
|
||||
"question": entry.question,
|
||||
"answer": entry.answer,
|
||||
"category": entry.category,
|
||||
"confidence_score": entry.confidence_score,
|
||||
"is_verified": entry.is_verified
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取AI建议对话上下文失败: {e}")
|
||||
return {}
|
||||
|
||||
def search_conversations_by_content(
|
||||
self,
|
||||
search_query: str,
|
||||
work_order_id: Optional[int] = None,
|
||||
conversation_type: Optional[str] = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""根据内容搜索对话记录(包含AI建议)"""
|
||||
try:
|
||||
results = []
|
||||
|
||||
with db_manager.get_session() as session:
|
||||
# 搜索基础对话
|
||||
conv_query = session.query(Conversation)
|
||||
if work_order_id:
|
||||
conv_query = conv_query.filter(Conversation.work_order_id == work_order_id)
|
||||
|
||||
conversations = conv_query.filter(
|
||||
or_(
|
||||
Conversation.user_message.contains(search_query),
|
||||
Conversation.assistant_response.contains(search_query)
|
||||
)
|
||||
).order_by(Conversation.timestamp.desc()).limit(limit).all()
|
||||
|
||||
for conv in conversations:
|
||||
results.append({
|
||||
"id": conv.id,
|
||||
"type": "conversation",
|
||||
"timestamp": conv.timestamp,
|
||||
"user_message": conv.user_message,
|
||||
"assistant_response": conv.assistant_response,
|
||||
"work_order_id": conv.work_order_id,
|
||||
"confidence_score": conv.confidence_score
|
||||
})
|
||||
|
||||
# 搜索AI建议
|
||||
if not conversation_type or conversation_type == "ai_suggestion":
|
||||
suggestion_query = session.query(WorkOrderSuggestion)
|
||||
if work_order_id:
|
||||
suggestion_query = suggestion_query.filter(
|
||||
WorkOrderSuggestion.work_order_id == work_order_id
|
||||
)
|
||||
|
||||
suggestions = suggestion_query.filter(
|
||||
or_(
|
||||
WorkOrderSuggestion.ai_suggestion.contains(search_query),
|
||||
WorkOrderSuggestion.human_resolution.contains(search_query)
|
||||
)
|
||||
).order_by(WorkOrderSuggestion.created_at.desc()).limit(limit).all()
|
||||
|
||||
for suggestion in suggestions:
|
||||
results.append({
|
||||
"id": f"suggestion_{suggestion.id}",
|
||||
"type": "ai_suggestion",
|
||||
"timestamp": suggestion.created_at,
|
||||
"ai_suggestion": suggestion.ai_suggestion,
|
||||
"human_resolution": suggestion.human_resolution,
|
||||
"work_order_id": suggestion.work_order_id,
|
||||
"ai_similarity": suggestion.ai_similarity,
|
||||
"approved": suggestion.approved
|
||||
})
|
||||
|
||||
# 按时间排序
|
||||
results.sort(key=lambda x: x["timestamp"], reverse=True)
|
||||
|
||||
return results[:limit]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"搜索对话记录失败: {e}")
|
||||
return []
|
||||
|
||||
def get_conversation_analytics(
|
||||
self,
|
||||
work_order_id: Optional[int] = None,
|
||||
days: int = 7
|
||||
) -> Dict[str, Any]:
|
||||
"""获取对话分析数据(包含AI建议统计)"""
|
||||
try:
|
||||
cutoff_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
with db_manager.get_session() as session:
|
||||
analytics = {
|
||||
"period_days": days,
|
||||
"conversations": {},
|
||||
"ai_suggestions": {},
|
||||
"performance": {}
|
||||
}
|
||||
|
||||
# 对话统计
|
||||
conv_query = session.query(Conversation)
|
||||
if work_order_id:
|
||||
conv_query = conv_query.filter(Conversation.work_order_id == work_order_id)
|
||||
|
||||
conversations = conv_query.filter(
|
||||
Conversation.timestamp >= cutoff_date
|
||||
).all()
|
||||
|
||||
analytics["conversations"] = {
|
||||
"total": len(conversations),
|
||||
"avg_confidence": 0,
|
||||
"avg_response_time": 0,
|
||||
"high_confidence_count": 0
|
||||
}
|
||||
|
||||
if conversations:
|
||||
confidences = [c.confidence_score for c in conversations if c.confidence_score]
|
||||
response_times = [c.response_time for c in conversations if c.response_time]
|
||||
|
||||
if confidences:
|
||||
analytics["conversations"]["avg_confidence"] = sum(confidences) / len(confidences)
|
||||
analytics["conversations"]["high_confidence_count"] = len([c for c in confidences if c >= 0.8])
|
||||
|
||||
if response_times:
|
||||
analytics["conversations"]["avg_response_time"] = sum(response_times) / len(response_times)
|
||||
|
||||
# AI建议统计
|
||||
suggestion_query = session.query(WorkOrderSuggestion)
|
||||
if work_order_id:
|
||||
suggestion_query = suggestion_query.filter(
|
||||
WorkOrderSuggestion.work_order_id == work_order_id
|
||||
)
|
||||
|
||||
suggestions = suggestion_query.filter(
|
||||
WorkOrderSuggestion.created_at >= cutoff_date
|
||||
).all()
|
||||
|
||||
analytics["ai_suggestions"] = {
|
||||
"total": len(suggestions),
|
||||
"approved_count": len([s for s in suggestions if s.approved]),
|
||||
"avg_similarity": 0,
|
||||
"human_resolution_count": len([s for s in suggestions if s.use_human_resolution])
|
||||
}
|
||||
|
||||
if suggestions:
|
||||
similarities = [s.ai_similarity for s in suggestions if s.ai_similarity]
|
||||
if similarities:
|
||||
analytics["ai_suggestions"]["avg_similarity"] = sum(similarities) / len(similarities)
|
||||
|
||||
# 性能指标
|
||||
analytics["performance"] = {
|
||||
"conversation_success_rate": 0,
|
||||
"ai_suggestion_approval_rate": 0,
|
||||
"knowledge_base_usage_rate": 0
|
||||
}
|
||||
|
||||
if conversations:
|
||||
successful_convs = len([c for c in conversations if c.confidence_score and c.confidence_score >= 0.5])
|
||||
analytics["performance"]["conversation_success_rate"] = successful_convs / len(conversations)
|
||||
|
||||
if suggestions:
|
||||
analytics["performance"]["ai_suggestion_approval_rate"] = len([s for s in suggestions if s.approved]) / len(suggestions)
|
||||
|
||||
return analytics
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取对话分析数据失败: {e}")
|
||||
return {}
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
对话管理蓝图
|
||||
处理对话相关的API路由
|
||||
处理对话相关的API路由,整合飞书工单和AI建议
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from src.core.database import db_manager
|
||||
from src.core.models import Conversation
|
||||
from src.core.models import Conversation, WorkOrder, WorkOrderSuggestion
|
||||
from src.core.query_optimizer import query_optimizer
|
||||
from src.dialogue.conversation_history import ConversationHistoryManager
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
conversations_bp = Blueprint('conversations', __name__, url_prefix='/api/conversations')
|
||||
|
||||
# 初始化对话历史管理器
|
||||
history_manager = ConversationHistoryManager()
|
||||
|
||||
@conversations_bp.route('')
|
||||
def get_conversations():
|
||||
"""获取对话历史列表(分页)- 优化版"""
|
||||
@@ -197,3 +203,136 @@ def migrate_merge_conversations():
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/workorder/<int:work_order_id>/timeline')
|
||||
def get_workorder_timeline(work_order_id):
|
||||
"""获取工单的完整对话时间线(包含AI建议和飞书同步)"""
|
||||
try:
|
||||
include_ai_suggestions = request.args.get('include_ai_suggestions', 'true').lower() == 'true'
|
||||
include_feishu_sync = request.args.get('include_feishu_sync', 'true').lower() == 'true'
|
||||
limit = request.args.get('limit', 20, type=int)
|
||||
|
||||
timeline = history_manager.get_workorder_complete_timeline(
|
||||
work_order_id=work_order_id,
|
||||
include_ai_suggestions=include_ai_suggestions,
|
||||
include_feishu_sync=include_feishu_sync,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'work_order_id': work_order_id,
|
||||
'timeline': timeline,
|
||||
'total_count': len(timeline)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取工单时间线失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/workorder/<int:work_order_id>/context')
|
||||
def get_workorder_context(work_order_id):
|
||||
"""获取工单的AI建议对话上下文"""
|
||||
try:
|
||||
suggestion_id = request.args.get('suggestion_id', type=int)
|
||||
|
||||
context = history_manager.get_ai_suggestion_context(
|
||||
work_order_id=work_order_id,
|
||||
suggestion_id=suggestion_id
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'work_order_id': work_order_id,
|
||||
'context': context
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取工单上下文失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/workorder/<int:work_order_id>/summary')
|
||||
def get_workorder_summary(work_order_id):
|
||||
"""获取工单对话摘要"""
|
||||
try:
|
||||
# 获取时间线数据
|
||||
timeline = history_manager.get_workorder_complete_timeline(
|
||||
work_order_id=work_order_id,
|
||||
include_ai_suggestions=True,
|
||||
include_feishu_sync=True,
|
||||
limit=50
|
||||
)
|
||||
|
||||
if not timeline:
|
||||
return jsonify({"error": "没有找到对话记录"}), 404
|
||||
|
||||
# 生成简单摘要
|
||||
summary = {
|
||||
"work_order_id": work_order_id,
|
||||
"total_interactions": len(timeline),
|
||||
"conversations": len([t for t in timeline if t["type"] == "conversation"]),
|
||||
"ai_suggestions": len([t for t in timeline if t["type"] == "ai_suggestion"]),
|
||||
"feishu_syncs": len([t for t in timeline if t["type"] == "feishu_sync"]),
|
||||
"generated_at": timeline[0]["timestamp"].isoformat() if timeline else None
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'work_order_id': work_order_id,
|
||||
'summary': summary
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取工单摘要失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/search')
|
||||
def search_conversations():
|
||||
"""搜索对话记录(包含AI建议)"""
|
||||
try:
|
||||
search_query = request.args.get('q', '')
|
||||
work_order_id = request.args.get('work_order_id', type=int)
|
||||
conversation_type = request.args.get('type') # conversation, ai_suggestion, all
|
||||
limit = request.args.get('limit', 20, type=int)
|
||||
|
||||
if not search_query:
|
||||
return jsonify({"error": "搜索查询不能为空"}), 400
|
||||
|
||||
results = history_manager.search_conversations_by_content(
|
||||
search_query=search_query,
|
||||
work_order_id=work_order_id,
|
||||
conversation_type=conversation_type,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'query': search_query,
|
||||
'results': results,
|
||||
'total_count': len(results)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"搜索对话记录失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/analytics')
|
||||
def get_conversation_analytics():
|
||||
"""获取对话分析数据"""
|
||||
try:
|
||||
work_order_id = request.args.get('work_order_id', type=int)
|
||||
days = request.args.get('days', 7, type=int)
|
||||
|
||||
analytics = history_manager.get_conversation_analytics(
|
||||
work_order_id=work_order_id,
|
||||
days=days
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'analytics': analytics
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取对话分析数据失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
Reference in New Issue
Block a user