2026-04-23 17:10:38 +08:00

1202 lines
48 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
标伙伴 - AI 标书助手(单机版)
启动命令python app.py
访问地址http://localhost:5000
"""
import os
import sys
def _bootstrap_env_file():
"""在 import config 之前加载项目根目录 .env便于注入 API Key不覆盖已存在的环境变量。"""
if getattr(sys, 'frozen', False):
base = os.path.dirname(sys.executable)
else:
base = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(base, '.env')
if not os.path.isfile(path):
return
try:
with open(path, encoding='utf-8') as f:
for raw in f:
line = raw.strip()
if not line or line.startswith('#') or '=' not in line:
continue
key, _, val = line.partition('=')
key, val = key.strip(), val.strip().strip('"').strip("'")
if key and key not in os.environ:
os.environ[key] = val
except OSError:
pass
_bootstrap_env_file()
import json
import sqlite3
import threading
import logging
from datetime import datetime
from flask import Flask, request, jsonify, render_template, send_from_directory, abort
import config
from utils import settings as _settings
# ── 日志配置 ────────────────────────────────────────────────────────────────
_log_handlers = [logging.StreamHandler()]
if getattr(sys, 'frozen', False):
_log_file = os.path.join(os.path.dirname(sys.executable), 'bid_partner.log')
try:
_log_handlers.append(logging.FileHandler(_log_file, encoding='utf-8'))
except Exception:
pass
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
datefmt='%H:%M:%S',
handlers=_log_handlers,
)
logger = logging.getLogger(__name__)
def _safe_json_load(raw):
if not raw or not isinstance(raw, str):
return None
try:
return json.loads(raw)
except Exception:
return None
# ── Flask 应用 ───────────────────────────────────────────────────────────────
_bundle = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
app = Flask(__name__,
template_folder=os.path.join(_bundle, 'templates'),
static_folder=os.path.join(_bundle, 'static'))
app.secret_key = config.SECRET_KEY
app.config['MAX_CONTENT_LENGTH'] = config.MAX_FILE_SIZE_MB * 1024 * 1024
# ═══════════════════════════════════════════════════════════════════════════
# 数据库初始化
# ═══════════════════════════════════════════════════════════════════════════
def init_db():
"""创建所有必要的目录和数据库表"""
for d in [config.DATA_DIR, config.UPLOAD_DIR, config.EXPORT_DIR,
config.KNOWLEDGE_DIR, config.CHROMA_DIR]:
os.makedirs(d, exist_ok=True)
# 初始化持久化配置,启动时恢复上次保存的 API Key 等设置
settings_path = os.path.join(config.DATA_DIR, 'settings.json')
_settings.init(settings_path)
_settings.load(config)
logger.info(f'当前模型: {config.MODEL_PROVIDER}')
conn = sqlite3.connect(config.DB_PATH)
cur = conn.cursor()
# WAL 模式:允许多个读写线程并发操作,不互相阻塞
cur.execute('PRAGMA journal_mode=WAL')
cur.execute('PRAGMA synchronous=NORMAL') # WAL 下可适当降低同步级别以提速
cur.executescript('''
CREATE TABLE IF NOT EXISTS projects (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
outline_status TEXT DEFAULT 'none',
outline_error TEXT DEFAULT '',
anon_requirements TEXT DEFAULT '',
enable_figure INTEGER DEFAULT 0,
enable_table INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS tender_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
project_id INTEGER NOT NULL UNIQUE,
file_name TEXT,
raw_text TEXT,
summary TEXT,
rating_requirements TEXT,
rating_json TEXT,
outline TEXT,
boq_file_name TEXT DEFAULT '',
boq_text TEXT DEFAULT '',
boq_summary TEXT DEFAULT '',
boq_analysis_json TEXT DEFAULT '',
boq_status TEXT DEFAULT 'none',
boq_error TEXT DEFAULT '',
tender_kind TEXT DEFAULT 'engineering',
status TEXT DEFAULT 'pending',
error_message TEXT DEFAULT '',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS bid_sections (
id INTEGER PRIMARY KEY AUTOINCREMENT,
project_id INTEGER NOT NULL,
section_number TEXT,
section_title TEXT NOT NULL,
level INTEGER DEFAULT 1,
is_leaf INTEGER DEFAULT 1,
content TEXT DEFAULT '',
intro_content TEXT DEFAULT '',
order_index INTEGER DEFAULT 0,
status TEXT DEFAULT 'pending',
error_message TEXT DEFAULT '',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS knowledge_files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_name TEXT NOT NULL UNIQUE,
file_path TEXT,
chunk_count INTEGER DEFAULT 0,
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
''')
conn.commit()
# 兼容旧数据库:追加新列(已存在时忽略错误)
migrations = [
("ALTER TABLE projects ADD COLUMN anon_requirements TEXT DEFAULT ''",
'projects.anon_requirements'),
("ALTER TABLE projects ADD COLUMN enable_figure INTEGER DEFAULT 0",
'projects.enable_figure'),
("ALTER TABLE projects ADD COLUMN enable_table INTEGER DEFAULT 0",
'projects.enable_table'),
("ALTER TABLE tender_data ADD COLUMN boq_file_name TEXT DEFAULT ''",
'tender_data.boq_file_name'),
("ALTER TABLE tender_data ADD COLUMN boq_text TEXT DEFAULT ''",
'tender_data.boq_text'),
("ALTER TABLE tender_data ADD COLUMN boq_summary TEXT DEFAULT ''",
'tender_data.boq_summary'),
("ALTER TABLE tender_data ADD COLUMN boq_status TEXT DEFAULT 'none'",
'tender_data.boq_status'),
("ALTER TABLE tender_data ADD COLUMN boq_error TEXT DEFAULT ''",
'tender_data.boq_error'),
("ALTER TABLE tender_data ADD COLUMN boq_analysis_json TEXT DEFAULT ''",
'tender_data.boq_analysis_json'),
("ALTER TABLE tender_data ADD COLUMN tender_kind TEXT DEFAULT 'engineering'",
'tender_data.tender_kind'),
("ALTER TABLE tender_data ADD COLUMN target_pages INTEGER DEFAULT 0",
'tender_data.target_pages'),
]
for sql, col in migrations:
try:
conn.execute(sql)
conn.commit()
logger.info(f'数据库迁移:新增 {col}')
except Exception:
pass # 列已存在
conn.close()
logger.info('数据库初始化完成')
def get_db():
return sqlite3.connect(config.DB_PATH)
# ═══════════════════════════════════════════════════════════════════════════
# 页面路由
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/')
def index():
return render_template('index.html')
@app.route('/project/<int:project_id>')
def project_page(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT id, name, created_at FROM projects WHERE id=?", (project_id,))
row = cur.fetchone()
conn.close()
if not row:
abort(404)
return render_template('project.html', project={'id': row[0], 'name': row[1], 'created_at': row[2]})
# ═══════════════════════════════════════════════════════════════════════════
# API项目管理
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects', methods=['GET'])
def api_list_projects():
conn = get_db()
cur = conn.cursor()
cur.execute('''
SELECT p.id, p.name, p.created_at, p.outline_status,
td.status as parse_status, td.file_name,
(SELECT COUNT(*) FROM bid_sections WHERE project_id=p.id) as section_count,
(SELECT COUNT(*) FROM bid_sections WHERE project_id=p.id AND status='done') as done_count
FROM projects p
LEFT JOIN tender_data td ON td.project_id = p.id
ORDER BY p.created_at DESC
''')
rows = cur.fetchall()
conn.close()
projects = []
for r in rows:
projects.append({
'id': r[0], 'name': r[1], 'created_at': r[2],
'outline_status': r[3], 'parse_status': r[4] or 'none',
'file_name': r[5], 'section_count': r[6], 'done_count': r[7],
})
return jsonify({'projects': projects})
@app.route('/api/projects', methods=['POST'])
def api_create_project():
data = request.get_json()
name = (data or {}).get('name', '').strip()
if not name:
return jsonify({'error': '项目名称不能为空'}), 400
conn = get_db()
cur = conn.cursor()
cur.execute("INSERT INTO projects (name) VALUES (?)", (name,))
project_id = cur.lastrowid
conn.commit()
conn.close()
return jsonify({'id': project_id, 'name': name}), 201
@app.route('/api/projects/<int:project_id>', methods=['DELETE'])
def api_delete_project(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute("DELETE FROM projects WHERE id=?", (project_id,))
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>', methods=['GET'])
def api_get_project(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute('''
SELECT p.id, p.name, p.created_at, p.outline_status, p.outline_error,
td.file_name, td.status as parse_status, td.error_message,
td.summary, td.rating_requirements, td.rating_json, td.outline,
p.anon_requirements, p.enable_figure, p.enable_table,
td.boq_file_name, td.boq_summary, td.boq_status, td.boq_error,
td.boq_analysis_json, td.tender_kind, COALESCE(td.target_pages, 0)
FROM projects p
LEFT JOIN tender_data td ON td.project_id = p.id
WHERE p.id=?
''', (project_id,))
row = cur.fetchone()
conn.close()
if not row:
return jsonify({'error': '项目不存在'}), 404
return jsonify({
'id': row[0], 'name': row[1], 'created_at': row[2],
'outline_status': row[3], 'outline_error': row[4],
'file_name': row[5], 'parse_status': row[6] or 'none',
'parse_error': row[7], 'summary': row[8],
'rating_requirements': row[9], 'rating_json': row[10],
'outline': row[11], 'anon_requirements': row[12] or '',
'enable_figure': bool(row[13]), 'enable_table': bool(row[14]),
'boq_file_name': row[15] or '', 'boq_summary': row[16] or '',
'boq_status': row[17] or 'none', 'boq_error': row[18] or '',
'boq_analysis': _safe_json_load(row[19]),
'tender_kind': row[20] or 'engineering',
'target_pages': int(row[21] or 0),
})
# ═══════════════════════════════════════════════════════════════════════════
# API文件上传与解析
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects/<int:project_id>/upload', methods=['POST'])
def api_upload(project_id):
from utils.file_utils import allowed_file, safe_filename
if 'file' not in request.files:
return jsonify({'error': '未选择文件'}), 400
f = request.files['file']
if not f.filename:
return jsonify({'error': '文件名为空'}), 400
if not allowed_file(f.filename):
return jsonify({'error': '仅支持 PDF / DOC / DOCX 格式'}), 400
filename = safe_filename(f.filename)
save_path = os.path.join(config.UPLOAD_DIR, f'{project_id}_{filename}')
f.save(save_path)
# 初始化 tender_data 记录
conn = get_db()
cur = conn.cursor()
cur.execute('''
INSERT INTO tender_data (project_id, file_name, status)
VALUES (?, ?, 'uploaded')
ON CONFLICT(project_id) DO UPDATE SET file_name=?, status='uploaded', error_message='', updated_at=?
''', (project_id, filename, filename, datetime.now()))
conn.commit()
conn.close()
return jsonify({'success': True, 'file_name': filename, 'path': save_path})
@app.route('/api/projects/<int:project_id>/tender-data', methods=['PUT'])
def api_update_tender_data(project_id):
"""允许用户手动修改并保存解析结果(摘要、技术评分要求、标书类型)"""
data = request.get_json() or {}
fields = {}
if 'summary' in data:
fields['summary'] = data['summary']
if 'rating_requirements' in data:
fields['rating_requirements'] = data['rating_requirements']
if 'tender_kind' in data:
tk = (data.get('tender_kind') or 'engineering').strip().lower()
if tk not in ('engineering', 'service', 'goods'):
return jsonify({'error': 'tender_kind 须为 engineering / service / goods'}), 400
fields['tender_kind'] = tk
if 'target_pages' in data:
try:
fields['target_pages'] = max(0, int(data['target_pages']))
except (TypeError, ValueError):
return jsonify({'error': 'target_pages 须为非负整数'}), 400
if not fields:
return jsonify({'error': '无可更新字段'}), 400
conn = get_db()
cur = conn.cursor()
set_clause = ', '.join(f'{k}=?' for k in fields)
values = list(fields.values()) + [datetime.now(), project_id]
cur.execute(
f'UPDATE tender_data SET {set_clause}, updated_at=? WHERE project_id=?',
values
)
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/upload-boq', methods=['POST'])
def api_upload_boq(project_id):
"""上传工程量清单文件(独立于招标文件)"""
from utils.file_utils import safe_filename
if 'file' not in request.files:
return jsonify({'error': '未选择文件'}), 400
f = request.files['file']
if not f.filename:
return jsonify({'error': '文件名为空'}), 400
ext = os.path.splitext(f.filename)[1].lower()
allowed_exts = {'.xlsx', '.xls', '.csv', '.pdf', '.docx', '.doc'}
if ext not in allowed_exts:
return jsonify({'error': f'不支持的格式 {ext},请使用 xlsx/xls/csv/pdf/docx/doc'}), 400
filename = safe_filename(f.filename)
save_path = os.path.join(config.UPLOAD_DIR, f'{project_id}_boq_{filename}')
f.save(save_path)
# 确保 tender_data 记录存在
conn = get_db()
cur = conn.cursor()
cur.execute('''
INSERT INTO tender_data (project_id, boq_file_name, boq_status)
VALUES (?, ?, 'uploaded')
ON CONFLICT(project_id) DO UPDATE
SET boq_file_name=?, boq_status='uploaded', boq_error='', updated_at=?
''', (project_id, filename, filename, datetime.now()))
conn.commit()
conn.close()
return jsonify({'success': True, 'file_name': filename, 'path': save_path})
@app.route('/api/projects/<int:project_id>/parse-boq', methods=['POST'])
def api_parse_boq(project_id):
"""后台解析工程量清单 → AI 摘要"""
from modules.parser import parse_boq_file
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT boq_file_name FROM tender_data WHERE project_id=?", (project_id,))
row = cur.fetchone()
conn.close()
if not row or not row[0]:
return jsonify({'error': '请先上传工程量清单文件'}), 400
file_name = row[0]
# 同时尝试带/不带 boq_ 前缀的路径
path1 = os.path.join(config.UPLOAD_DIR, f'{project_id}_boq_{file_name}')
path2 = os.path.join(config.UPLOAD_DIR, f'{project_id}_{file_name}')
file_path = path1 if os.path.exists(path1) else path2
if not os.path.exists(file_path):
return jsonify({'error': '清单文件不存在,请重新上传'}), 404
t = threading.Thread(
target=parse_boq_file,
args=(config.DB_PATH, project_id, file_path, file_name),
daemon=True,
)
t.start()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/boq', methods=['PUT'])
def api_update_boq(project_id):
"""手动保存用户编辑后的工程量清单摘要"""
data = request.get_json() or {}
boq_summary = data.get('boq_summary', '')
conn = get_db()
cur = conn.cursor()
cur.execute(
"UPDATE tender_data SET boq_summary=?, updated_at=? WHERE project_id=?",
(boq_summary, datetime.now(), project_id)
)
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/parse', methods=['POST'])
def api_parse(project_id):
from modules.parser import parse_tender_file
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT file_name FROM tender_data WHERE project_id=?", (project_id,))
row = cur.fetchone()
conn.close()
if not row or not row[0]:
return jsonify({'error': '请先上传招标文件'}), 400
file_name = row[0]
file_path = os.path.join(config.UPLOAD_DIR, f'{project_id}_{file_name}')
if not os.path.exists(file_path):
return jsonify({'error': f'文件不存在: {file_name}'}), 404
t = threading.Thread(
target=parse_tender_file,
args=(config.DB_PATH, project_id, file_path, file_name),
daemon=True,
)
t.start()
return jsonify({'success': True, 'message': '解析任务已启动'})
@app.route('/api/projects/<int:project_id>/parse-status', methods=['GET'])
def api_parse_status(project_id):
conn = get_db()
cur = conn.cursor()
# Extended for deep integration: include settings from projects for parse tab UI
cur.execute('''
SELECT td.status, td.error_message, td.summary, td.rating_requirements, td.rating_json, td.tender_kind,
p.anon_requirements, p.enable_figure, p.enable_table
FROM tender_data td
LEFT JOIN projects p ON p.id = td.project_id
WHERE td.project_id=?
''', (project_id,))
row = cur.fetchone()
conn.close()
if not row:
return jsonify({'status': 'none'})
return jsonify({
'status': row[0] or 'none',
'message': row[1] or '',
'has_summary': bool(row[2]),
'has_rating': bool(row[3]),
'has_rating_json': bool(row[4]),
'tender_kind': row[5] or 'engineering',
'anon_requirements': row[6] or '',
'enable_figure': bool(row[7]),
'enable_table': bool(row[8]),
})
# ═══════════════════════════════════════════════════════════════════════════
# API大纲生成
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects/<int:project_id>/outline', methods=['PUT'])
def api_update_outline(project_id):
"""
用户手动修改大纲后保存:重新解析大纲文本,更新 bid_sections。
注意:已生成的章节内容将被清除,需重新生成。
"""
from modules.generator import _parse_outline, _save_sections, _save_outline_text
data = request.get_json() or {}
outline_text = (data.get('outline') or '').strip()
if not outline_text:
return jsonify({'error': '大纲内容不能为空'}), 400
try:
# 解析并自动重排序号,返回规范化文本
_, sections, normalized_text = _parse_outline(outline_text)
if not sections:
return jsonify({'error': '大纲解析失败,未识别到任何章节,请检查格式'}), 400
conn = get_db()
cur = conn.cursor()
# 存储重排序号后的规范文本
_save_outline_text(conn, project_id, normalized_text)
_save_sections(conn, project_id, sections)
cur.execute(
"UPDATE projects SET outline_status='outline_done', outline_error='', updated_at=? WHERE id=?",
(datetime.now(), project_id)
)
cur.execute("SELECT length(outline) FROM tender_data WHERE project_id=?", (project_id,))
persisted_len = (cur.fetchone() or [0])[0] or 0
conn.commit()
conn.close()
# 把规范化文本返回给前端,前端据此更新编辑器内容
return jsonify({
'success': True,
'section_count': len(sections),
'normalized_outline': normalized_text,
'persisted_outline_len': persisted_len,
})
except Exception as e:
logger.exception('手动保存大纲失败')
return jsonify({'error': str(e)}), 500
@app.route('/api/projects/<int:project_id>/generate-outline', methods=['POST'])
def api_generate_outline(project_id):
from modules.generator import generate_outline
data = request.get_json(silent=True) or {}
force = bool(data.get('force', False))
conn = get_db()
cur = conn.cursor()
cur.execute(
'''
SELECT p.outline_status, COALESCE(td.outline, '')
FROM projects p
LEFT JOIN tender_data td ON td.project_id = p.id
WHERE p.id = ?
''',
(project_id,),
)
row = cur.fetchone()
if not row:
conn.close()
return jsonify({'success': False, 'error': '项目不存在'}), 404
ostatus, outline_text = (row[0] or 'none', row[1] or '')
if ostatus == 'outline_generating':
conn.close()
return jsonify({'success': True, 'message': '大纲正在生成中,请稍候'}), 200
if not force and outline_text.strip():
conn.close()
return jsonify({
'success': False,
'error': '当前项目已有大纲,重新生成会覆盖现有大纲。请确认后以 force=true 再次请求。'
}), 409
# 立即落库,避免仅依赖后台线程的首条 SQL 时产生竞态(重复点击、轮询、刷新时状态不一致)
cur.execute(
"UPDATE projects SET outline_status=?, outline_error=?, updated_at=? WHERE id=?",
('outline_generating', '', datetime.now(), project_id),
)
conn.commit()
conn.close()
t = threading.Thread(
target=generate_outline,
args=(config.DB_PATH, project_id),
daemon=True,
)
t.start()
return jsonify({'success': True, 'message': '大纲生成任务已启动'})
@app.route('/api/projects/<int:project_id>/expand-outline', methods=['POST'])
def api_expand_outline(project_id):
"""根据当前编辑大纲自动补全小章节,并直接落库重建章节树。"""
from modules.generator import (
expand_outline,
_parse_outline,
_save_outline_text,
_save_sections,
)
from utils import volume_chapters as vol_ch
data = request.get_json() or {}
outline = data.get('outline', '')
if not outline.strip():
return jsonify({'success': False, 'error': '大纲内容不能为空'}), 400
no_subchapter_limit = data.get('no_subchapter_limit') in (True, 1, '1', 'true', 'True')
if 'target_pages' in data:
try:
request_pages = max(0, int(data['target_pages']))
except (TypeError, ValueError):
request_pages = None
else:
request_pages = None
conn = get_db()
cur = conn.cursor()
cur.execute(
"SELECT summary, rating_requirements, COALESCE(target_pages, 0) FROM tender_data "
"WHERE project_id=?",
(project_id,),
)
row = cur.fetchone()
conn.close()
summary = row[0] if row else ''
rating_requirements = row[1] if row else ''
db_tp = int(row[2] or 0) if row else 0
cfg_tp = int(getattr(config, 'TARGET_PAGES', 0) or 0)
effective_target_pages = vol_ch.resolve_expand_target_pages(
request_pages, no_subchapter_limit, db_tp, cfg_tp
)
try:
expanded_outline = expand_outline(
outline, summary, rating_requirements, project_id, effective_target_pages
)
_, sections, normalized_text = _parse_outline(expanded_outline)
if not sections:
return jsonify({'success': False, 'error': '扩充后大纲解析失败,请检查章节格式'}), 400
conn = get_db()
cur = conn.cursor()
_save_outline_text(conn, project_id, normalized_text)
_save_sections(conn, project_id, sections)
cur.execute(
"UPDATE projects SET outline_status='outline_done', outline_error='', updated_at=? WHERE id=?",
(datetime.now(), project_id),
)
conn.commit()
cur.execute("SELECT length(outline) FROM tender_data WHERE project_id=?", (project_id,))
persisted_len = (cur.fetchone() or [0])[0] or 0
conn.close()
return jsonify({
'success': True,
'expanded_outline': expanded_outline,
'normalized_outline': normalized_text,
'section_count': len(sections),
'persisted_outline_len': persisted_len,
'used_target_pages': effective_target_pages,
'no_subchapter_limit': no_subchapter_limit,
})
except Exception as e:
logger.exception(f'expand_outline failed for project {project_id}')
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/projects/<int:project_id>/outline-status', methods=['GET'])
def api_outline_status(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT outline_status, outline_error FROM projects WHERE id=?", (project_id,))
row = cur.fetchone()
conn.close()
if not row:
return jsonify({'status': 'none'})
return jsonify({'status': row[0], 'error': row[1]})
# ═══════════════════════════════════════════════════════════════════════════
# API章节管理与内容生成
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects/<int:project_id>/sections', methods=['GET'])
def api_list_sections(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute('''
SELECT id, section_number, section_title, level, is_leaf,
status, error_message, length(content) as content_len
FROM bid_sections
WHERE project_id=?
ORDER BY order_index
''', (project_id,))
rows = cur.fetchall()
conn.close()
sections = []
for r in rows:
sections.append({
'id': r[0], 'number': r[1], 'title': r[2], 'level': r[3],
'is_leaf': bool(r[4]), 'status': r[5], 'error': r[6],
'has_content': (r[7] or 0) > 0,
})
return jsonify({'sections': sections})
@app.route('/api/projects/<int:project_id>/sections/<int:section_id>', methods=['GET'])
def api_get_section(project_id, section_id):
conn = get_db()
cur = conn.cursor()
cur.execute(
"SELECT id, section_number, section_title, level, is_leaf, content, intro_content, status FROM bid_sections WHERE id=? AND project_id=?",
(section_id, project_id)
)
row = cur.fetchone()
conn.close()
if not row:
return jsonify({'error': '章节不存在'}), 404
return jsonify({
'id': row[0], 'number': row[1], 'title': row[2], 'level': row[3],
'is_leaf': bool(row[4]), 'content': row[5], 'intro_content': row[6], 'status': row[7],
})
@app.route('/api/projects/<int:project_id>/sections/<int:section_id>', methods=['PUT'])
def api_update_section(project_id, section_id):
data = request.get_json() or {}
content = data.get('content', '')
conn = get_db()
cur = conn.cursor()
cur.execute(
"UPDATE bid_sections SET content=?, status='done', updated_at=? WHERE id=? AND project_id=?",
(content, datetime.now(), section_id, project_id)
)
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/sections/<int:section_id>/chat', methods=['POST'])
def api_section_chat(project_id, section_id):
"""
对话式章节生成:接受多轮对话历史,结合章节上下文调用 AI返回新一轮回复。
请求体:{ "messages": [{"role": "user"|"assistant", "content": "..."}] }
"""
from utils import ai_client
data = request.get_json() or {}
messages = data.get('messages', [])
if not messages:
return jsonify({'error': '消息列表不能为空'}), 400
conn = get_db()
cur = conn.cursor()
cur.execute(
"SELECT section_title FROM bid_sections WHERE id=? AND project_id=?",
(section_id, project_id)
)
row = cur.fetchone()
if not row:
conn.close()
return jsonify({'error': '章节不存在'}), 404
section_title = row[0]
cur.execute(
"SELECT summary, outline, tender_kind FROM tender_data WHERE project_id=?",
(project_id,),
)
td = cur.fetchone()
conn.close()
summary = ((td[0] or '')[:3000]) if td else ''
outline = ((td[1] or '')[:2000]) if td else ''
tk = (td[2] or 'engineering').strip().lower() if td else 'engineering'
if tk not in ('engineering', 'service', 'goods'):
tk = 'engineering'
from utils.tender_kind_sections import CHAT_KIND_INSTRUCTION
kind_hint = CHAT_KIND_INSTRUCTION.get(tk, CHAT_KIND_INSTRUCTION['engineering'])
system = f"""你是一位资深的投标文件撰写专家,正在协助用户以对话方式撰写技术标书中「{section_title}」章节的正文内容。
【项目背景摘要】
{summary or '(未提供)'}
【标书目录结构】
{outline or '(未提供)'}
{kind_hint}
【撰写规范(必须遵守)】
- 投标方自称统一用"我方",禁用"我们""我公司"
- 禁止套话:综上所述、高度重视、全力以赴、不断优化、稳步推进等
- 每项措施须有可检验的实质内容(做法、节点、标准编号,或招标文件/清单已给出的量化依据);
未载明的型号、数量、吨位、时限等不得编造,用概括性定性表述写清含义;禁止使用方括号待填项(如[型号][数量]
- 列举用(1)(2)(3)编号,禁止"首先其次最后"连接
- 纯文本输出,段落间用空行分隔,不使用 Markdown 符号
- 直接输出正文,不含章节标题、解释说明或"以下是..."引导语"""
valid_messages = [m for m in messages if m.get('role') in ('user', 'assistant')]
try:
content = ai_client.chat_with_history(system, valid_messages,
temperature=0.7, max_tokens=4096)
return jsonify({'success': True, 'content': content})
except Exception as e:
logger.exception(f'对话式章节生成失败 section_id={section_id}')
return jsonify({'error': str(e)}), 500
@app.route('/api/projects/<int:project_id>/generate-section', methods=['POST'])
def api_generate_section(project_id):
from modules.generator import generate_section
data = request.get_json() or {}
section_id = data.get('section_id')
if not section_id:
return jsonify({'error': '缺少 section_id'}), 400
conn = get_db()
cur = conn.cursor()
cur.execute(
"SELECT anon_requirements, enable_figure, enable_table FROM projects WHERE id=?",
(project_id,)
)
row = cur.fetchone()
conn.close()
anon_req = (row[0] or '') if row else ''
enable_fig = bool(row[1]) if row else False
enable_tbl = bool(row[2]) if row else False
t = threading.Thread(
target=generate_section,
args=(config.DB_PATH, project_id, section_id, anon_req, enable_fig, enable_tbl),
daemon=True,
)
t.start()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/diagram', methods=['PUT'])
def api_update_diagram(project_id):
"""保存图表模式开关"""
data = request.get_json() or {}
enable_figure = 1 if data.get('enable_figure') else 0
enable_table = 1 if data.get('enable_table') else 0
conn = get_db()
cur = conn.cursor()
cur.execute(
"UPDATE projects SET enable_figure=?, enable_table=?, updated_at=? WHERE id=?",
(enable_figure, enable_table, datetime.now(), project_id)
)
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/anon', methods=['PUT'])
def api_update_anon(project_id):
"""保存暗标要求"""
data = request.get_json() or {}
anon_requirements = data.get('anon_requirements', '')
conn = get_db()
cur = conn.cursor()
cur.execute(
"UPDATE projects SET anon_requirements=?, updated_at=? WHERE id=?",
(anon_requirements, datetime.now(), project_id)
)
conn.commit()
conn.close()
return jsonify({'success': True})
@app.route('/api/projects/<int:project_id>/generate-all-sections', methods=['POST'])
def api_generate_all_sections(project_id):
from modules.generator import generate_all_sections
conn = get_db()
cur = conn.cursor()
cur.execute(
"SELECT anon_requirements, enable_figure, enable_table FROM projects WHERE id=?",
(project_id,)
)
row = cur.fetchone()
conn.close()
anon_req = (row[0] or '') if row else ''
enable_fig = bool(row[1]) if row else False
enable_tbl = bool(row[2]) if row else False
t = threading.Thread(
target=generate_all_sections,
args=(config.DB_PATH, project_id, anon_req, enable_fig, enable_tbl),
daemon=True,
)
t.start()
return jsonify({'success': True, 'message': '全量生成任务已启动'})
@app.route('/api/projects/<int:project_id>/section-progress', methods=['GET'])
def api_section_progress(project_id):
conn = get_db()
cur = conn.cursor()
cur.execute('''
SELECT
COUNT(*) as total,
SUM(CASE WHEN status='done' THEN 1 ELSE 0 END) as done,
SUM(CASE WHEN status='generating' THEN 1 ELSE 0 END) as running,
SUM(CASE WHEN status='error' THEN 1 ELSE 0 END) as error_count
FROM bid_sections WHERE project_id=?
''', (project_id,))
r = cur.fetchone()
conn.close()
total, done, running, errors = r
return jsonify({
'total': total or 0, 'done': done or 0,
'running': running or 0, 'errors': errors or 0,
'percent': round((done or 0) / max(total or 1, 1) * 100),
})
# ═══════════════════════════════════════════════════════════════════════════
# API合规检查
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects/<int:project_id>/check', methods=['POST'])
def api_check(project_id):
from modules.checker import check_compliance
result = check_compliance(config.DB_PATH, project_id)
return jsonify(result)
@app.route('/api/projects/<int:project_id>/check-dark-bid-format', methods=['POST'])
def api_check_dark_bid_format(project_id):
"""技术暗标 HTML 格式清标(不调用 AI。请求体 JSON{"html": "..."}"""
conn = get_db()
cur = conn.cursor()
cur.execute("SELECT id FROM projects WHERE id=?", (project_id,))
if not cur.fetchone():
conn.close()
return jsonify({'error': '项目不存在'}), 404
conn.close()
data = request.get_json(silent=True) or {}
html = (data.get('html') or '').strip()
if not html:
return jsonify({'error': '请提供 html 字段(技术暗标完整 HTML 内容)'}), 400
from modules.dark_bid_format_check import check_technical_bid
try:
result = check_technical_bid(html)
return jsonify(result)
except Exception as e:
logger.exception('技术暗标格式清标失败')
return jsonify({'error': str(e)}), 500
# ═══════════════════════════════════════════════════════════════════════════
# API导出
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/projects/<int:project_id>/export', methods=['POST'])
def api_export(project_id):
try:
from modules.exporter import export_to_word
data = request.get_json(silent=True) or {}
style_preset = data.get('style_preset', 'standard')
filename = export_to_word(config.DB_PATH, project_id, style_preset)
return jsonify({'success': True, 'filename': filename, 'url': f'/api/download/{filename}'})
except Exception as e:
logger.exception('导出失败')
return jsonify({'error': str(e)}), 500
@app.route('/api/download/<filename>')
def api_download(filename):
return send_from_directory(config.EXPORT_DIR, filename, as_attachment=True)
# ═══════════════════════════════════════════════════════════════════════════
# API知识库管理
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/knowledge/status', methods=['GET'])
def api_knowledge_status():
from modules.knowledge import is_available, list_files
status = is_available()
status['file_count'] = len(list_files(config.DB_PATH))
return jsonify(status)
@app.route('/api/knowledge/files', methods=['GET'])
def api_knowledge_list():
from modules.knowledge import list_files
files = list_files(config.DB_PATH)
return jsonify({'files': files})
@app.route('/api/knowledge/upload', methods=['POST'])
def api_knowledge_upload():
from modules.knowledge import add_file
from utils.file_utils import allowed_file, safe_filename
import threading
if 'file' not in request.files:
return jsonify({'error': '未选择文件'}), 400
f = request.files['file']
if not f.filename or not allowed_file(f.filename):
return jsonify({'error': '仅支持 PDF / DOC / DOCX'}), 400
filename = safe_filename(f.filename)
save_path = os.path.join(config.KNOWLEDGE_DIR, filename)
f.save(save_path)
# 后台线程入库(提取文本 + 向量化可能耗时,避免请求超时)
def _ingest():
result = add_file(save_path, config.DB_PATH)
if not result.get('success'):
logger.error(f'知识库入库失败 {filename}: {result.get("error")}')
threading.Thread(target=_ingest, daemon=True).start()
return jsonify({'success': True, 'queued': True, 'filename': filename})
@app.route('/api/knowledge/delete', methods=['POST'])
def api_knowledge_delete():
from modules.knowledge import delete_file
data = request.get_json() or {}
file_name = data.get('file_name', '')
if not file_name:
return jsonify({'error': '缺少 file_name'}), 400
result = delete_file(file_name, config.DB_PATH)
return jsonify(result)
# ═══════════════════════════════════════════════════════════════════════════
# APIAI 配置
# ═══════════════════════════════════════════════════════════════════════════
@app.route('/api/config', methods=['GET'])
def api_get_config():
def _has_key(k): return bool(k and not k.startswith('sk-your'))
return jsonify({
'model_provider': config.MODEL_PROVIDER,
'qwen_model': config.QWEN_MODEL,
'qwen_base_url': config.QWEN_BASE_URL,
'openai_model': config.OPENAI_MODEL,
'openai_base_url': config.OPENAI_BASE_URL,
'deepseek_model': config.DEEPSEEK_MODEL,
'deepseek_base_url': config.DEEPSEEK_BASE_URL,
'ollama_base_url': config.OLLAMA_BASE_URL,
'ollama_model': config.OLLAMA_MODEL,
'doubao_model': config.DOUBAO_MODEL,
'doubao_base_url': config.DOUBAO_BASE_URL,
'kimi_model': config.KIMI_MODEL,
'kimi_base_url': config.KIMI_BASE_URL,
'has_qwen_key': _has_key(config.QWEN_API_KEY),
'has_openai_key': _has_key(config.OPENAI_API_KEY),
'has_deepseek_key': _has_key(config.DEEPSEEK_API_KEY),
'has_doubao_key': _has_key(config.DOUBAO_API_KEY),
'has_kimi_key': _has_key(config.KIMI_API_KEY),
'max_concurrent': config.MAX_CONCURRENT_SECTIONS,
'llm_concurrency_limit': getattr(config, 'LLM_CONCURRENCY_LIMIT', 20),
'content_volume': config.CONTENT_VOLUME,
'target_pages': getattr(config, 'TARGET_PAGES', 0),
'page_char_estimate': getattr(config, 'PAGE_CHAR_ESTIMATE', 700),
# 监控指标:并发状态已集成到 progress.running 和日志UI显示 "X路并发"
})
@app.route('/api/config', methods=['POST'])
def api_save_config():
data = request.get_json() or {}
if 'model_provider' in data:
config.MODEL_PROVIDER = data['model_provider']
if 'qwen_api_key' in data and data['qwen_api_key']:
config.QWEN_API_KEY = data['qwen_api_key']
if 'qwen_model' in data and data['qwen_model']:
config.QWEN_MODEL = data['qwen_model']
if 'qwen_base_url' in data and data['qwen_base_url']:
config.QWEN_BASE_URL = data['qwen_base_url']
if 'openai_api_key' in data and data['openai_api_key']:
config.OPENAI_API_KEY = data['openai_api_key']
if 'openai_model' in data and data['openai_model']:
config.OPENAI_MODEL = data['openai_model']
if 'openai_base_url' in data and data['openai_base_url']:
config.OPENAI_BASE_URL = data['openai_base_url']
if 'deepseek_api_key' in data and data['deepseek_api_key']:
config.DEEPSEEK_API_KEY = data['deepseek_api_key']
if 'deepseek_model' in data and data['deepseek_model']:
config.DEEPSEEK_MODEL = data['deepseek_model']
if 'deepseek_base_url' in data and data['deepseek_base_url']:
config.DEEPSEEK_BASE_URL = data['deepseek_base_url']
if 'ollama_base_url' in data and data['ollama_base_url']:
config.OLLAMA_BASE_URL = data['ollama_base_url']
if 'ollama_model' in data and data['ollama_model']:
config.OLLAMA_MODEL = data['ollama_model']
if 'doubao_api_key' in data and data['doubao_api_key']:
config.DOUBAO_API_KEY = data['doubao_api_key']
if 'doubao_model' in data and data['doubao_model']:
config.DOUBAO_MODEL = data['doubao_model']
if 'doubao_base_url' in data and data['doubao_base_url']:
config.DOUBAO_BASE_URL = data['doubao_base_url']
if 'kimi_api_key' in data and data['kimi_api_key']:
config.KIMI_API_KEY = data['kimi_api_key']
if 'kimi_model' in data and data['kimi_model']:
config.KIMI_MODEL = data['kimi_model']
if 'kimi_base_url' in data and data['kimi_base_url']:
config.KIMI_BASE_URL = data['kimi_base_url']
if 'max_concurrent' in data:
v = int(data['max_concurrent'])
config.MAX_CONCURRENT_SECTIONS = max(1, min(v, 20))
if 'llm_concurrency_limit' in data:
try:
v = int(data['llm_concurrency_limit'])
config.LLM_CONCURRENCY_LIMIT = max(1, min(v, 30))
# Note: semaphore recreated on next import/restart for simplicity
except (ValueError, TypeError):
pass
if 'content_volume' in data and data['content_volume'] in ('concise', 'standard', 'detailed', 'full'):
config.CONTENT_VOLUME = data['content_volume']
if 'target_pages' in data:
try:
config.TARGET_PAGES = max(0, int(data['target_pages']))
except (ValueError, TypeError):
pass
if 'page_char_estimate' in data:
try:
config.PAGE_CHAR_ESTIMATE = max(300, min(3000, int(data['page_char_estimate'])))
except (ValueError, TypeError):
pass
if 'style_preset' in data:
# Save selected style preset for the project (future extension)
pass
_settings.save(config)
return jsonify({'success': True})
# 新增 /api/styles endpoints for homepage style settings
@app.route('/api/styles', methods=['GET'])
def api_get_styles():
from utils.style_manager import DEFAULT_PRESETS
return jsonify({'presets': DEFAULT_PRESETS, 'success': True})
@app.route('/api/styles', methods=['POST'])
def api_save_style_preset():
data = request.get_json() or {}
name = data.get('name', 'custom')
preset_config = data.get('config', {})
from utils.style_manager import save_preset
if save_preset(name, preset_config):
return jsonify({'success': True, 'name': name})
return jsonify({'error': '保存失败'}), 500
# ═══════════════════════════════════════════════════════════════════════════
# 启动
# ═══════════════════════════════════════════════════════════════════════════
if __name__ == '__main__':
init_db()
print('\n' + '=' * 60)
print(' BidPartner - AI Bid Writing Assistant')
print('=' * 60)
print(' URL: http://localhost:5000')
print(' Press Ctrl+C to quit\n')
app.run(host='0.0.0.0', port=5000, debug=False, threaded=True)