修复AI建议逻辑和字段映射问题
- 修复AI建议基于问题描述而不是处理过程生成 - 修复工单详情页面显示逻辑 - 修复飞书时间字段处理(毫秒时间戳转换) - 优化字段映射和转换逻辑 - 添加飞书集成功能 - 改进对话历史合并功能 - 优化系统优化反馈机制
This commit is contained in:
@@ -8,6 +8,7 @@ from flask import Blueprint, request, jsonify
|
||||
from src.core.database import db_manager
|
||||
from src.core.models import Conversation
|
||||
from src.core.query_optimizer import query_optimizer
|
||||
from datetime import timedelta
|
||||
|
||||
conversations_bp = Blueprint('conversations', __name__, url_prefix='/api/conversations')
|
||||
|
||||
@@ -27,6 +28,10 @@ def get_conversations():
|
||||
user_id=user_id, date_filter=date_filter
|
||||
)
|
||||
|
||||
# 规范化:移除不存在的user_id字段,避免前端误用
|
||||
for conv in result.get('conversations', []):
|
||||
if 'user_id' in conv and conv['user_id'] is None:
|
||||
conv.pop('user_id', None)
|
||||
return jsonify(result)
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
@@ -40,10 +45,11 @@ def get_conversation_detail(conversation_id):
|
||||
if not conv:
|
||||
return jsonify({"error": "对话不存在"}), 404
|
||||
|
||||
# Conversation模型没有user_id字段,这里用占位或由外层推断
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'id': conv.id,
|
||||
'user_id': conv.user_id,
|
||||
'user_id': None,
|
||||
'user_message': conv.user_message,
|
||||
'assistant_response': conv.assistant_response,
|
||||
'timestamp': conv.timestamp.isoformat() if conv.timestamp else None,
|
||||
@@ -88,3 +94,106 @@ def clear_all_conversations():
|
||||
return jsonify({"success": True, "message": "对话历史已清空"})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@conversations_bp.route('/migrate-merge', methods=['POST'])
|
||||
def migrate_merge_conversations():
|
||||
"""一次性迁移:将历史上拆分存储的用户/助手两条记录合并为一条
|
||||
规则:
|
||||
- 只处理一端为空的记录(user_only 或 assistant_only)
|
||||
- 优先将 user_only 与其后最近的 assistant_only 合并(同工单且5分钟内)
|
||||
- 若当前为 assistant_only 且前一条是 user_only 也合并到前一条
|
||||
- 合并后删除被吸收的那条记录
|
||||
- 可重复执行(幂等):已合并的不再满足“一端为空”的条件
|
||||
"""
|
||||
try:
|
||||
merged_pairs = 0
|
||||
deleted_rows = 0
|
||||
time_threshold_seconds = 300
|
||||
to_delete_ids = []
|
||||
with db_manager.get_session() as session:
|
||||
conversations = session.query(Conversation).order_by(Conversation.timestamp.asc(), Conversation.id.asc()).all()
|
||||
total = len(conversations)
|
||||
i = 0
|
||||
|
||||
def is_empty(text: str) -> bool:
|
||||
return (text is None) or (str(text).strip() == '')
|
||||
|
||||
while i < total:
|
||||
c = conversations[i]
|
||||
user_only = (not is_empty(c.user_message)) and is_empty(c.assistant_response)
|
||||
assistant_only = (not is_empty(c.assistant_response)) and is_empty(c.user_message)
|
||||
|
||||
if user_only:
|
||||
# 向后寻找匹配的assistant_only
|
||||
j = i + 1
|
||||
while j < total:
|
||||
n = conversations[j]
|
||||
# 跳过已经标记删除的
|
||||
if n.id in to_delete_ids:
|
||||
j += 1
|
||||
continue
|
||||
# 超过阈值不再尝试
|
||||
if c.timestamp and n.timestamp and (n.timestamp - c.timestamp).total_seconds() > time_threshold_seconds:
|
||||
break
|
||||
# 同工单或两者都为空均可
|
||||
same_wo = (c.work_order_id == n.work_order_id) or (c.work_order_id is None and n.work_order_id is None)
|
||||
if same_wo and (not is_empty(n.assistant_response)) and is_empty(n.user_message):
|
||||
# 合并
|
||||
c.assistant_response = n.assistant_response
|
||||
if c.response_time is None and c.timestamp and n.timestamp:
|
||||
try:
|
||||
c.response_time = max(0.0, (n.timestamp - c.timestamp).total_seconds() * 1000.0)
|
||||
except Exception:
|
||||
pass
|
||||
# 继承辅助信息
|
||||
if (not c.confidence_score) and n.confidence_score is not None:
|
||||
c.confidence_score = n.confidence_score
|
||||
if (not c.knowledge_used) and n.knowledge_used:
|
||||
c.knowledge_used = n.knowledge_used
|
||||
session.add(c)
|
||||
to_delete_ids.append(n.id)
|
||||
merged_pairs += 1
|
||||
break
|
||||
j += 1
|
||||
|
||||
elif assistant_only:
|
||||
# 向前与最近的 user_only 合并(如果尚未被其他合并吸收)
|
||||
j = i - 1
|
||||
while j >= 0:
|
||||
p = conversations[j]
|
||||
if p.id in to_delete_ids:
|
||||
j -= 1
|
||||
continue
|
||||
if p.timestamp and c.timestamp and (c.timestamp - p.timestamp).total_seconds() > time_threshold_seconds:
|
||||
break
|
||||
same_wo = (c.work_order_id == p.work_order_id) or (c.work_order_id is None and p.work_order_id is None)
|
||||
if same_wo and (not is_empty(p.user_message)) and is_empty(p.assistant_response):
|
||||
p.assistant_response = c.assistant_response
|
||||
if p.response_time is None and p.timestamp and c.timestamp:
|
||||
try:
|
||||
p.response_time = max(0.0, (c.timestamp - p.timestamp).total_seconds() * 1000.0)
|
||||
except Exception:
|
||||
pass
|
||||
if (not p.confidence_score) and c.confidence_score is not None:
|
||||
p.confidence_score = c.confidence_score
|
||||
if (not p.knowledge_used) and c.knowledge_used:
|
||||
p.knowledge_used = c.knowledge_used
|
||||
session.add(p)
|
||||
to_delete_ids.append(c.id)
|
||||
merged_pairs += 1
|
||||
break
|
||||
j -= 1
|
||||
|
||||
i += 1
|
||||
|
||||
if to_delete_ids:
|
||||
deleted_rows = session.query(Conversation).filter(Conversation.id.in_(to_delete_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'merged_pairs': merged_pairs,
|
||||
'deleted_rows': deleted_rows
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
304
src/web/blueprints/feishu_sync.py
Normal file
304
src/web/blueprints/feishu_sync.py
Normal file
@@ -0,0 +1,304 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
飞书同步蓝图
|
||||
处理飞书多维表格与工单系统的同步
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from src.integrations.feishu_client import FeishuClient
|
||||
from src.integrations.workorder_sync import WorkOrderSyncService
|
||||
from src.integrations.config_manager import config_manager
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
feishu_sync_bp = Blueprint('feishu_sync', __name__, url_prefix='/api/feishu-sync')
|
||||
|
||||
# 全局同步服务实例
|
||||
sync_service = None
|
||||
|
||||
def get_sync_service():
|
||||
"""获取同步服务实例"""
|
||||
global sync_service
|
||||
if sync_service is None:
|
||||
# 从配置管理器读取飞书配置
|
||||
feishu_config = config_manager.get_feishu_config()
|
||||
|
||||
if not all([feishu_config.get("app_id"), feishu_config.get("app_secret"),
|
||||
feishu_config.get("app_token"), feishu_config.get("table_id")]):
|
||||
raise Exception("飞书配置不完整,请先配置飞书应用信息")
|
||||
|
||||
feishu_client = FeishuClient(feishu_config["app_id"], feishu_config["app_secret"])
|
||||
sync_service = WorkOrderSyncService(feishu_client, feishu_config["app_token"], feishu_config["table_id"])
|
||||
|
||||
return sync_service
|
||||
|
||||
@feishu_sync_bp.route('/config', methods=['GET', 'POST'])
|
||||
def manage_config():
|
||||
"""管理飞书同步配置"""
|
||||
if request.method == 'GET':
|
||||
# 返回当前配置
|
||||
try:
|
||||
config_summary = config_manager.get_config_summary()
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"config": config_summary
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"获取配置失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
elif request.method == 'POST':
|
||||
# 更新配置
|
||||
try:
|
||||
data = request.get_json()
|
||||
app_id = data.get('app_id')
|
||||
app_secret = data.get('app_secret')
|
||||
app_token = data.get('app_token')
|
||||
table_id = data.get('table_id')
|
||||
|
||||
if not all([app_id, app_secret, app_token, table_id]):
|
||||
return jsonify({"error": "缺少必要配置参数"}), 400
|
||||
|
||||
# 更新配置管理器
|
||||
success = config_manager.update_feishu_config(
|
||||
app_id=app_id,
|
||||
app_secret=app_secret,
|
||||
app_token=app_token,
|
||||
table_id=table_id
|
||||
)
|
||||
|
||||
if success:
|
||||
# 重新初始化同步服务
|
||||
global sync_service
|
||||
sync_service = None # 强制重新创建
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": "配置更新成功"
|
||||
})
|
||||
else:
|
||||
return jsonify({"error": "配置更新失败"}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"更新飞书配置失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/sync-from-feishu', methods=['POST'])
|
||||
def sync_from_feishu():
|
||||
"""从飞书同步数据到本地"""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
generate_ai = data.get('generate_ai_suggestions', True)
|
||||
limit = data.get('limit', 10)
|
||||
|
||||
sync_service = get_sync_service()
|
||||
result = sync_service.sync_from_feishu(generate_ai_suggestions=generate_ai, limit=limit)
|
||||
|
||||
if result.get("success"):
|
||||
message = f"同步完成:创建 {result['created_count']} 条,更新 {result['updated_count']} 条"
|
||||
if result.get('ai_suggestions_generated'):
|
||||
message += ",AI建议已生成并更新到飞书表格"
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": message,
|
||||
"details": result
|
||||
})
|
||||
else:
|
||||
return jsonify({"error": result.get("error")}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"从飞书同步失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/sync-to-feishu/<int:workorder_id>', methods=['POST'])
|
||||
def sync_to_feishu(workorder_id):
|
||||
"""将本地工单同步到飞书"""
|
||||
try:
|
||||
sync_service = get_sync_service()
|
||||
result = sync_service.sync_to_feishu(workorder_id)
|
||||
|
||||
if result.get("success"):
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": "同步到飞书成功"
|
||||
})
|
||||
else:
|
||||
return jsonify({"error": result.get("error")}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"同步到飞书失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/status')
|
||||
def get_sync_status():
|
||||
"""获取同步状态"""
|
||||
try:
|
||||
sync_service = get_sync_service()
|
||||
status = sync_service.get_sync_status()
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"status": status
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"获取同步状态失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/test-connection')
|
||||
def test_connection():
|
||||
"""测试飞书连接"""
|
||||
try:
|
||||
# 使用配置管理器测试连接
|
||||
result = config_manager.test_feishu_connection()
|
||||
|
||||
if result.get("success"):
|
||||
# 如果连接成功,尝试获取表格字段信息
|
||||
try:
|
||||
sync_service = get_sync_service()
|
||||
|
||||
# 使用新的测试连接方法
|
||||
connection_test = sync_service.feishu_client.test_connection()
|
||||
if not connection_test.get("success"):
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"message": f"飞书连接测试失败: {connection_test.get('message')}"
|
||||
}), 400
|
||||
|
||||
fields_info = sync_service.feishu_client.get_table_fields(
|
||||
sync_service.app_token, sync_service.table_id
|
||||
)
|
||||
|
||||
if fields_info.get("code") == 0:
|
||||
result["fields"] = fields_info.get("data", {}).get("items", [])
|
||||
except Exception as e:
|
||||
logger.warning(f"获取表格字段信息失败: {e}")
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"测试飞书连接失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/create-workorder', methods=['POST'])
|
||||
def create_workorder_from_feishu():
|
||||
"""从飞书记录创建工单"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
record_id = data.get('record_id')
|
||||
|
||||
if not record_id:
|
||||
return jsonify({"success": False, "message": "缺少记录ID"}), 400
|
||||
|
||||
sync_service = get_sync_service()
|
||||
result = sync_service.create_workorder_from_feishu_record(record_id)
|
||||
|
||||
if result.get("success"):
|
||||
return jsonify(result)
|
||||
else:
|
||||
return jsonify(result), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"创建工单失败: {e}")
|
||||
return jsonify({"success": False, "message": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/preview-feishu-data')
|
||||
def preview_feishu_data():
|
||||
"""预览飞书数据"""
|
||||
try:
|
||||
sync_service = get_sync_service()
|
||||
|
||||
# 获取前10条记录进行预览
|
||||
records = sync_service.feishu_client.get_table_records(
|
||||
sync_service.app_token, sync_service.table_id, page_size=10
|
||||
)
|
||||
|
||||
if records.get("code") == 0:
|
||||
items = records.get("data", {}).get("items", [])
|
||||
preview_data = []
|
||||
|
||||
for record in items:
|
||||
parsed_fields = sync_service.feishu_client.parse_record_fields(record)
|
||||
preview_data.append({
|
||||
"record_id": record.get("record_id"),
|
||||
"fields": parsed_fields
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"preview_data": preview_data,
|
||||
"total_count": len(preview_data)
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
"success": False,
|
||||
"error": records.get("msg", "获取数据失败")
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"预览飞书数据失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/config/export', methods=['GET'])
|
||||
def export_config():
|
||||
"""导出配置"""
|
||||
try:
|
||||
config_json = config_manager.export_config()
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"config": config_json
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"导出配置失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/config/import', methods=['POST'])
|
||||
def import_config():
|
||||
"""导入配置"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
config_json = data.get('config')
|
||||
|
||||
if not config_json:
|
||||
return jsonify({"error": "缺少配置数据"}), 400
|
||||
|
||||
success = config_manager.import_config(config_json)
|
||||
|
||||
if success:
|
||||
# 重新初始化同步服务
|
||||
global sync_service
|
||||
sync_service = None
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": "配置导入成功"
|
||||
})
|
||||
else:
|
||||
return jsonify({"error": "配置导入失败"}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"导入配置失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@feishu_sync_bp.route('/config/reset', methods=['POST'])
|
||||
def reset_config():
|
||||
"""重置配置"""
|
||||
try:
|
||||
success = config_manager.reset_config()
|
||||
|
||||
if success:
|
||||
# 重新初始化同步服务
|
||||
global sync_service
|
||||
sync_service = None
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"message": "配置重置成功"
|
||||
})
|
||||
else:
|
||||
return jsonify({"error": "配置重置失败"}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"重置配置失败: {e}")
|
||||
return jsonify({"error": str(e)}), 500
|
||||
@@ -306,6 +306,108 @@ def optimize_disk():
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@system_bp.route('/system-optimizer/clear-cache', methods=['POST'])
|
||||
def clear_cache():
|
||||
"""清理应用缓存(内存/Redis均尝试)"""
|
||||
try:
|
||||
cleared = False
|
||||
try:
|
||||
from src.core.cache_manager import cache_manager
|
||||
cache_manager.clear()
|
||||
cleared = True
|
||||
except Exception:
|
||||
pass
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': '缓存已清理' if cleared else '缓存清理已尝试(可能未启用缓存模块)',
|
||||
'progress': 100
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@system_bp.route('/system-optimizer/optimize-all', methods=['POST'])
|
||||
def optimize_all():
|
||||
"""一键优化:CPU/内存/磁盘 + 缓存清理 + 轻量数据库维护"""
|
||||
try:
|
||||
import gc
|
||||
import time
|
||||
actions = []
|
||||
start_time = time.time()
|
||||
|
||||
# 垃圾回收 & 缓存
|
||||
try:
|
||||
collected = gc.collect()
|
||||
actions.append(f"垃圾回收:{collected}")
|
||||
except Exception:
|
||||
actions.append("垃圾回收:跳过")
|
||||
|
||||
try:
|
||||
from src.core.cache_manager import cache_manager
|
||||
cache_manager.clear()
|
||||
actions.append("缓存清理:完成")
|
||||
except Exception:
|
||||
actions.append("缓存清理:跳过")
|
||||
|
||||
# 临时文件与日志清理(沿用磁盘优化逻辑的子集)
|
||||
temp_files_cleaned = 0
|
||||
log_files_cleaned = 0
|
||||
try:
|
||||
import os, tempfile
|
||||
temp_dir = tempfile.gettempdir()
|
||||
for filename in os.listdir(temp_dir):
|
||||
if filename.startswith('tsp_') or filename.startswith('tmp_'):
|
||||
file_path = os.path.join(temp_dir, filename)
|
||||
try:
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
temp_files_cleaned += 1
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
actions.append(f"临时文件:{temp_files_cleaned}")
|
||||
|
||||
try:
|
||||
import os, glob
|
||||
from datetime import datetime, timedelta
|
||||
log_dir = 'logs'
|
||||
if os.path.exists(log_dir):
|
||||
cutoff_date = datetime.now() - timedelta(days=7)
|
||||
for log_file in glob.glob(os.path.join(log_dir, '*.log')):
|
||||
try:
|
||||
file_time = datetime.fromtimestamp(os.path.getmtime(log_file))
|
||||
if file_time < cutoff_date:
|
||||
os.remove(log_file)
|
||||
log_files_cleaned += 1
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
actions.append(f"日志清理:{log_files_cleaned}")
|
||||
|
||||
# 轻量数据库维护(尽力而为):SQLite时执行VACUUM;其他数据库跳过
|
||||
try:
|
||||
engine = db_manager.engine
|
||||
if str(engine.url).startswith('sqlite'):
|
||||
with engine.begin() as conn:
|
||||
conn.exec_driver_sql('VACUUM')
|
||||
actions.append("SQLite VACUUM:完成")
|
||||
else:
|
||||
actions.append("DB维护:跳过(非SQLite)")
|
||||
except Exception:
|
||||
actions.append("DB维护:失败")
|
||||
|
||||
optimization_time = round((time.time() - start_time) * 1000, 1)
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': '一键优化完成: ' + ','.join(actions) + f',耗时{optimization_time}ms',
|
||||
'progress': 100,
|
||||
'actions': actions,
|
||||
'optimization_time': optimization_time
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@system_bp.route('/system-optimizer/security-settings', methods=['GET', 'POST'])
|
||||
def security_settings():
|
||||
"""安全设置"""
|
||||
|
||||
@@ -31,15 +31,23 @@ def _ensure_workorder_template_file() -> str:
|
||||
# 确保目录存在
|
||||
os.makedirs('uploads', exist_ok=True)
|
||||
if not os.path.exists(template_path):
|
||||
# 如果运行目录不存在模板,尝试从项目根相对路径拷贝一份
|
||||
repo_template = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'uploads', 'workorder_template.xlsx')
|
||||
repo_template = os.path.abspath(repo_template)
|
||||
# 优先从项目根目录的 uploads 拷贝(仓库自带模板)
|
||||
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
repo_template = os.path.join(project_root, 'uploads', 'workorder_template.xlsx')
|
||||
try:
|
||||
if os.path.exists(repo_template):
|
||||
import shutil
|
||||
shutil.copyfile(repo_template, template_path)
|
||||
else:
|
||||
raise FileNotFoundError('模板文件缺失:uploads/workorder_template.xlsx')
|
||||
# 仓库模板不存在时,自动生成一个最小可用模板
|
||||
try:
|
||||
import pandas as pd
|
||||
from pandas import DataFrame
|
||||
columns = ['标题', '描述', '分类', '优先级', '状态', '解决方案', '满意度']
|
||||
df: DataFrame = pd.DataFrame(columns=columns)
|
||||
df.to_excel(template_path, index=False)
|
||||
except Exception as gen_err:
|
||||
raise FileNotFoundError('模板文件缺失且自动生成失败,请检查依赖:openpyxl/pandas') from gen_err
|
||||
except Exception as copy_err:
|
||||
raise copy_err
|
||||
return template_path
|
||||
@@ -199,14 +207,15 @@ def generate_workorder_ai_suggestion(workorder_id):
|
||||
if not w:
|
||||
return jsonify({"error": "工单不存在"}), 404
|
||||
# 调用知识库搜索与LLM生成
|
||||
query = f"{w.title} {w.description}"
|
||||
# 使用问题描述(title)而不是处理过程(description)作为主要查询依据
|
||||
query = f"{w.title}"
|
||||
kb_results = get_assistant().search_knowledge(query, top_k=3)
|
||||
kb_list = kb_results.get('results', []) if isinstance(kb_results, dict) else []
|
||||
# 组装提示词
|
||||
context = "\n".join([f"Q: {k.get('question','')}\nA: {k.get('answer','')}" for k in kb_list])
|
||||
from src.core.llm_client import QwenClient
|
||||
llm = QwenClient()
|
||||
prompt = f"请基于以下工单描述与知识库片段,给出简洁、可执行的处理建议。\n工单描述:\n{w.description}\n\n知识库片段:\n{context}\n\n请直接输出建议文本:"
|
||||
prompt = f"请基于以下工单问题描述与知识库片段,给出简洁、可执行的处理建议。\n\n问题描述:\n{w.title}\n\n处理过程(仅供参考):\n{w.description}\n\n知识库片段:\n{context}\n\n请直接输出建议文本:"
|
||||
llm_resp = llm.chat_completion(messages=[{"role":"user","content":prompt}], temperature=0.3, max_tokens=800)
|
||||
suggestion = ""
|
||||
if llm_resp and 'choices' in llm_resp:
|
||||
@@ -404,6 +413,11 @@ def download_import_template_file():
|
||||
"""直接返回工单导入模板文件(下载)"""
|
||||
try:
|
||||
template_path = _ensure_workorder_template_file()
|
||||
return send_file(template_path, as_attachment=True, download_name='工单导入模板.xlsx')
|
||||
try:
|
||||
# Flask>=2 使用 download_name
|
||||
return send_file(template_path, as_attachment=True, download_name='工单导入模板.xlsx', mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
|
||||
except TypeError:
|
||||
# 兼容 Flask<2 的 attachment_filename
|
||||
return send_file(template_path, as_attachment=True, attachment_filename='工单导入模板.xlsx', mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
|
||||
except Exception as e:
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
Reference in New Issue
Block a user