# ============ Eventlet/Gevent Monkey Patching锛堝繀椤绘斁鍦ㄦ墍鏈?import 涔嬪墠锛侊級============ # 鐢ㄤ簬鏀寔 Gunicorn + eventlet/gevent 寮傛妯″紡锛屼娇 requests 绛夐樆濉炶皟鐢ㄥ彉涓洪潪闃诲 import os import sys def _detect_async_env(): """妫€娴嬪綋鍓嶅紓姝ョ幆澧?"" # 妫€娴?eventlet try: import eventlet if hasattr(eventlet, 'is_monkey_patched') and eventlet.is_monkey_patched('socket'): return 'eventlet_patched' return 'eventlet_available' except ImportError: pass # 妫€娴?gevent try: from gevent import monkey if monkey.is_module_patched('socket'): return 'gevent_patched' return 'gevent_available' except ImportError: pass return 'none' _async_env = _detect_async_env() # Gunicorn eventlet worker 浼氳嚜鍔?patch锛岃繖閲屽彧鎵撳嵃鐘舵€? if _async_env == 'eventlet_patched': print("鉁?Eventlet monkey patching 宸茬敱 Worker 鍚敤") elif _async_env == 'gevent_patched': print("鉁?Gevent monkey patching 宸茬敱 Worker 鍚敤") elif _async_env == 'eventlet_available': print("馃摗 Eventlet 鍙敤锛岀瓑寰?Gunicorn worker 鍒濆鍖?) elif _async_env == 'gevent_available': print("馃摗 Gevent 鍙敤锛岀瓑寰?Gunicorn worker 鍒濆鍖?) else: print("鈿狅笍 鏈娴嬪埌 eventlet 鎴?gevent锛屽皢浣跨敤 threading 妯″紡") # ============ Monkey Patching 妫€娴嬬粨鏉?============ import base64 import csv import io import threading import time import urllib import uuid from functools import wraps import qrcode from flask_mail import Mail, Message from flask_socketio import SocketIO, emit, join_room, leave_room import pytz import requests from celery import Celery from flask_compress import Compress from pathlib import Path import json from sqlalchemy import Column, Integer, String, Boolean, DateTime, create_engine, text, func, or_ from flask import Flask, render_template, request, jsonify, redirect, url_for, flash, session, render_template_string, \ current_app, make_response from flask_sqlalchemy import SQLAlchemy from flask_login import LoginManager, UserMixin, login_user, logout_user, login_required, current_user import random from werkzeug.security import generate_password_hash, check_password_hash from werkzeug.middleware.proxy_fix import ProxyFix import re import string from datetime import datetime, timedelta, time as dt_time, date from clickhouse_driver import Client as Cclient from elasticsearch import Elasticsearch from flask_cors import CORS import redis from flask_session import Session from collections import defaultdict from functools import lru_cache import jieba import jieba.analyse from flask_cors import cross_origin from tencentcloud.common import credential from tencentcloud.common.profile.client_profile import ClientProfile from tencentcloud.common.profile.http_profile import HttpProfile from tencentcloud.sms.v20210111 import sms_client, models from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException from sqlalchemy import text, desc, and_ import pandas as pd from decimal import Decimal from apscheduler.schedulers.background import BackgroundScheduler # 浜ゆ槗鏃ユ暟鎹紦瀛? trading_days = [] trading_days_set = set() def load_trading_days(): """鍔犺浇浜ゆ槗鏃ユ暟鎹?"" global trading_days, trading_days_set try: with open('tdays.csv', 'r') as f: reader = csv.DictReader(f) for row in reader: date_str = row['DateTime'] # 瑙f瀽鏃ユ湡 (鏍煎紡: 2010/1/4) date = datetime.strptime(date_str, '%Y/%m/%d').date() trading_days.append(date) trading_days_set.add(date) # 鎺掑簭浜ゆ槗鏃? trading_days.sort() print(f"鎴愬姛鍔犺浇 {len(trading_days)} 涓氦鏄撴棩鏁版嵁") except Exception as e: print(f"鍔犺浇浜ゆ槗鏃ユ暟鎹け璐? {e}") def row_to_dict(row): """ 灏?SQLAlchemy Row 瀵硅薄杞崲涓哄瓧鍏? 鍏煎 SQLAlchemy 1.4+ 鐗堟湰 """ if row is None: return None # 浣跨敤 _mapping 灞炴€ф潵璁块棶鍒楁暟鎹? return dict(row._mapping) def get_trading_day_near_date(target_date): """ 鑾峰彇璺濈鐩爣鏃ユ湡鏈€杩戠殑浜ゆ槗鏃? 濡傛灉鐩爣鏃ユ湡鏄氦鏄撴棩锛岃繑鍥炶鏃ユ湡 濡傛灉涓嶆槸锛岃繑鍥炰笅涓€涓氦鏄撴棩 """ if not trading_days: load_trading_days() if not trading_days: return None # 濡傛灉鐩爣鏃ユ湡鏄痙atetime锛岃浆鎹负date if isinstance(target_date, datetime): target_date = target_date.date() # 妫€鏌ョ洰鏍囨棩鏈熸槸鍚︽槸浜ゆ槗鏃? if target_date in trading_days_set: return target_date # 鏌ユ壘涓嬩竴涓氦鏄撴棩 for trading_day in trading_days: if trading_day >= target_date: return trading_day # 濡傛灉娌℃湁鎵惧埌锛岃繑鍥炴渶鍚庝竴涓氦鏄撴棩 return trading_days[-1] if trading_days else None def get_target_and_prev_trading_day(event_datetime): """ 鏍规嵁浜嬩欢鏃堕棿纭畾鐩爣浜ゆ槗鏃ュ拰鍓嶄竴浜ゆ槗鏃ワ紙鐢ㄤ簬璁$畻娑ㄨ穼骞咃級 澶勭悊璺ㄥ懆鏈満鏅細 - 鍛ㄤ簲15:00鍚庡埌鍛ㄤ竴15:00鍓嶏紝鍒嗘椂鍥炬樉绀哄懆涓€琛屾儏锛屾定璺屽箙鍩轰簬鍛ㄤ簲鏀剁洏浠? 閫昏緫锛? - 濡傛灉浜嬩欢鏃堕棿鍦ㄤ氦鏄撴棩鐨?9:00-15:00 涔嬮棿锛屾樉绀哄綋澶╂暟鎹紝娑ㄨ穼骞呭熀浜庡墠涓€浜ゆ槗鏃? - 濡傛灉浜嬩欢鏃堕棿鍦ㄤ氦鏄撴棩鐨?15:00 涔嬪悗锛屾樉绀轰笅涓€涓氦鏄撴棩鏁版嵁锛屾定璺屽箙鍩轰簬褰撳ぉ - 濡傛灉浜嬩欢鏃堕棿鍦ㄩ潪浜ゆ槗鏃ワ紙鍛ㄦ湯/鑺傚亣鏃ワ級锛屾樉绀轰笅涓€涓氦鏄撴棩鏁版嵁锛屾定璺屽箙鍩轰簬涓婁竴涓氦鏄撴棩 - 濡傛灉浜嬩欢鏃堕棿鍦ㄤ氦鏄撴棩鐨?9:00 涔嬪墠锛屾樉绀哄綋澶╂暟鎹紝娑ㄨ穼骞呭熀浜庡墠涓€浜ゆ槗鏃? 杩斿洖锛?target_date, prev_close_date) - 鍒嗘椂鍥炬樉绀烘棩鏈熷拰娑ㄨ穼骞呭熀鍑嗘棩鏈? """ if not trading_days: load_trading_days() if not trading_days: return None, None # 濡傛灉鏄痙atetime锛屾彁鍙杁ate鍜宼ime if isinstance(event_datetime, datetime): event_date = event_datetime.date() event_time = event_datetime.time() else: event_date = event_datetime event_time = dt_time(12, 0) # 榛樿涓崍锛岃涓哄湪鐩樹腑 # 妫€鏌ヤ簨浠舵棩鏈熸槸鍚︽槸浜ゆ槗鏃? is_trading_day = event_date in trading_days_set # 鏀剁洏鏃堕棿鍒ゆ柇 market_close_time = dt_time(15, 0) is_after_market = event_time > market_close_time if is_trading_day: if is_after_market: # 浜ゆ槗鏃ユ敹鐩樺悗锛氭樉绀轰笅涓€涓氦鏄撴棩锛屾定璺屽箙鍩轰簬褰撳ぉ锛堝嵆鏈氦鏄撴棩锛? target_date = get_trading_day_near_date(event_date + timedelta(days=1)) prev_close_date = event_date else: # 浜ゆ槗鏃ョ洏涓垨寮€鐩樺墠锛氭樉绀哄綋澶╋紝娑ㄨ穼骞呭熀浜庡墠涓€浜ゆ槗鏃? target_date = event_date # 鎵惧墠涓€涓氦鏄撴棩 target_idx = trading_days.index(event_date) if event_date in trading_days else -1 prev_close_date = trading_days[target_idx - 1] if target_idx > 0 else None else: # 闈炰氦鏄撴棩锛堝懆鏈?鑺傚亣鏃ワ級锛氭樉绀轰笅涓€涓氦鏄撴棩锛屾定璺屽箙鍩轰簬涓婁竴涓氦鏄撴棩 target_date = get_trading_day_near_date(event_date) # 鎵句笂涓€涓氦鏄撴棩浣滀负鍩哄噯 prev_close_date = None for td in reversed(trading_days): if td < event_date: prev_close_date = td break return target_date, prev_close_date # 搴旂敤鍚姩鏃跺姞杞戒氦鏄撴棩鏁版嵁 load_trading_days() engine = create_engine( "mysql+pymysql://root:Zzl33818!@127.0.0.1:3306/stock?charset=utf8mb4", echo=False, pool_size=10, pool_recycle=3600, pool_pre_ping=True, pool_timeout=30, max_overflow=20 ) # Elasticsearch 瀹㈡埛绔垵濮嬪寲 es_client = Elasticsearch( hosts=["http://222.128.1.157:19200"], request_timeout=30, max_retries=3, retry_on_timeout=True ) app = Flask(__name__) # ============ ProxyFix 閰嶇疆锛堜俊浠诲弽鍚戜唬鐞嗗ご锛?=========== # 閲嶈锛氳В鍐?Nginx 鍙嶅悜浠g悊鍚?Flask 鏃犳硶璇嗗埆 HTTPS 鐨勯棶棰? # 杩欎細瀵艰嚧 SESSION_COOKIE_SECURE=True 鏃?cookie 琚竻闄? # x_for=1: 淇′换 1 灞備唬鐞嗙殑 X-Forwarded-For 澶达紙鑾峰彇鐪熷疄瀹㈡埛绔?IP锛? # x_proto=1: 淇′换 1 灞備唬鐞嗙殑 X-Forwarded-Proto 澶达紙璇嗗埆 HTTPS锛? # x_host=1: 淇′换 1 灞備唬鐞嗙殑 X-Forwarded-Host 澶达紙鑾峰彇鍘熷 Host锛? # x_prefix=1: 淇′换 1 灞備唬鐞嗙殑 X-Forwarded-Prefix 澶达紙URL 鍓嶇紑锛? app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1) print("鉁?ProxyFix 宸查厤缃紝Flask 灏嗕俊浠诲弽鍚戜唬鐞嗗ご锛圶-Forwarded-Proto 绛夛級") # ============ Redis 杩炴帴閰嶇疆锛堟敮鎸佺幆澧冨彉閲忚鐩栵級 ============ _REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') _REDIS_PORT = int(os.environ.get('REDIS_PORT', 6379)) _REDIS_PASSWORD = os.environ.get('REDIS_PASSWORD', 'VF_Redis_2024') # Redis 瀵嗙爜锛堝畨鍏ㄥ姞鍥猴級 redis_client = redis.Redis(host=_REDIS_HOST, port=_REDIS_PORT, db=0, password=_REDIS_PASSWORD, decode_responses=True) print(f"馃摝 Redis 閰嶇疆: {_REDIS_HOST}:{_REDIS_PORT}/db=0 (宸插惎鐢ㄥ瘑鐮佽璇?") # ============ 楠岃瘉鐮?Redis 瀛樺偍锛堟敮鎸佸杩涚▼/澶?Worker锛?============ VERIFICATION_CODE_PREFIX = "vf_code:" VERIFICATION_CODE_EXPIRE = 300 # 楠岃瘉鐮佽繃鏈熸椂闂达紙5鍒嗛挓锛? def set_verification_code(key, code, expires_in=VERIFICATION_CODE_EXPIRE): """瀛樺偍楠岃瘉鐮佸埌 Redis""" try: data = { 'code': code, 'expires': time.time() + expires_in } redis_client.setex( f"{VERIFICATION_CODE_PREFIX}{key}", expires_in, json.dumps(data) ) return True except Exception as e: print(f"鉂?Redis 瀛樺偍楠岃瘉鐮佸け璐? {e}") return False def get_verification_code(key): """浠?Redis 鑾峰彇楠岃瘉鐮?"" try: data = redis_client.get(f"{VERIFICATION_CODE_PREFIX}{key}") if data: return json.loads(data) return None except Exception as e: print(f"鉂?Redis 鑾峰彇楠岃瘉鐮佸け璐? {e}") return None def delete_verification_code(key): """浠?Redis 鍒犻櫎楠岃瘉鐮?"" try: redis_client.delete(f"{VERIFICATION_CODE_PREFIX}{key}") except Exception as e: print(f"鉂?Redis 鍒犻櫎楠岃瘉鐮佸け璐? {e}") print(f"馃摝 楠岃瘉鐮佸瓨鍌? Redis, 杩囨湡鏃堕棿: {VERIFICATION_CODE_EXPIRE}绉?) # ============ 寰俊鐧诲綍 Session 绠$悊锛圧edis 瀛樺偍锛屾敮鎸佸杩涚▼锛?============ WECHAT_SESSION_EXPIRE = 300 # Session 杩囨湡鏃堕棿锛?鍒嗛挓锛? WECHAT_SESSION_PREFIX = "wechat_session:" def set_wechat_session(state, data): """瀛樺偍寰俊鐧诲綍 session 鍒?Redis""" try: redis_client.setex( f"{WECHAT_SESSION_PREFIX}{state}", WECHAT_SESSION_EXPIRE, json.dumps(data) ) return True except Exception as e: print(f"鉂?Redis 瀛樺偍 wechat session 澶辫触: {e}") return False def get_wechat_session(state): """浠?Redis 鑾峰彇寰俊鐧诲綍 session""" try: data = redis_client.get(f"{WECHAT_SESSION_PREFIX}{state}") if data: return json.loads(data) return None except Exception as e: print(f"鉂?Redis 鑾峰彇 wechat session 澶辫触: {e}") return None def update_wechat_session(state, updates): """鏇存柊寰俊鐧诲綍 session锛堝悎骞舵洿鏂帮級""" try: data = get_wechat_session(state) if data: data.update(updates) # 鑾峰彇鍓╀綑 TTL锛屼繚鎸佸師鏈夎繃鏈熸椂闂? ttl = redis_client.ttl(f"{WECHAT_SESSION_PREFIX}{state}") if ttl > 0: redis_client.setex( f"{WECHAT_SESSION_PREFIX}{state}", ttl, json.dumps(data) ) else: # 濡傛灉 TTL 鏃犳晥锛屼娇鐢ㄩ粯璁よ繃鏈熸椂闂? set_wechat_session(state, data) return True return False except Exception as e: print(f"鉂?Redis 鏇存柊 wechat session 澶辫触: {e}") return False def delete_wechat_session(state): """鍒犻櫎寰俊鐧诲綍 session""" try: redis_client.delete(f"{WECHAT_SESSION_PREFIX}{state}") return True except Exception as e: print(f"鉂?Redis 鍒犻櫎 wechat session 澶辫触: {e}") return False def wechat_session_exists(state): """妫€鏌ュ井淇$櫥褰?session 鏄惁瀛樺湪""" try: return redis_client.exists(f"{WECHAT_SESSION_PREFIX}{state}") > 0 except Exception as e: print(f"鉂?Redis 妫€鏌?wechat session 澶辫触: {e}") return False # ============ 寰俊鐧诲綍 Session 绠$悊缁撴潫 ============ # ============ 鑲$エ鏁版嵁鏌ヨ锛堢洿鎺ユ煡 MySQL锛?============ def get_stock_names(base_codes): """ 鎵归噺鑾峰彇鑲$エ鍚嶇О锛堢洿鎺ヤ粠 MySQL 鏌ヨ锛? :param base_codes: 鑲$エ浠g爜鍒楄〃锛堜笉甯﹀悗缂€锛屽 ['600000', '000001']锛? :return: dict {code: name} """ if not base_codes: return {} result = {} try: with engine.connect() as conn: placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} db_result = conn.execute(text( f"SELECT SECCODE, SECNAME FROM ea_stocklist WHERE SECCODE IN ({placeholders})" ), params).fetchall() for row in db_result: code, name = row[0], row[1] result[code] = name except Exception as e: print(f"鉂?鏁版嵁搴撴煡璇㈣偂绁ㄥ悕绉板け璐? {e}") return result def get_prev_close(base_codes, trade_date_str): """ 鎵归噺鑾峰彇鍓嶆敹鐩樹环锛堢洿鎺ヤ粠 MySQL 鏌ヨ锛? :param base_codes: 鑲$エ浠g爜鍒楄〃锛堜笉甯﹀悗缂€锛屽 ['600000', '000001']锛? :param trade_date_str: 浜ゆ槗鏃ユ湡瀛楃涓诧紙鏍煎紡 YYYYMMDD锛? :return: dict {code: close_price} """ if not base_codes or not trade_date_str: return {} result = {} try: with engine.connect() as conn: placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} params['trade_date'] = trade_date_str db_result = conn.execute(text(f""" SELECT SECCODE, F007N as close_price FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE = :trade_date AND F007N > 0 """), params).fetchall() for row in db_result: code, close_price = row[0], float(row[1]) if row[1] else None if close_price: result[code] = close_price except Exception as e: print(f"鉂?鏁版嵁搴撴煡璇㈠墠鏀剁洏浠峰け璐? {e}") return result # ============ 鑲$エ鏁版嵁鏌ヨ缁撴潫 ============ # 鑵捐浜戠煭淇¢厤缃? SMS_SECRET_ID = 'AKID2we9TacdTAhCjCSYTErHVimeJo9Yr00s' SMS_SECRET_KEY = 'pMlBWijlkgT9fz5ziEXdWEnAPTJzRfkf' SMS_SDK_APP_ID = "1400972398" SMS_SIGN_NAME = "浠峰€煎墠娌跨鎶€" SMS_TEMPLATE_REGISTER = "2386557" # 娉ㄥ唽妯℃澘 SMS_TEMPLATE_LOGIN = "2386540" # 鐧诲綍妯℃澘 # 寰俊寮€鏀惧钩鍙伴厤缃紙PC 鎵爜鐧诲綍鐢級 WECHAT_OPEN_APPID = 'wxa8d74c47041b5f87' WECHAT_OPEN_APPSECRET = 'eedef95b11787fd7ca7f1acc6c9061bc' # 寰俊鍏紬鍙烽厤缃紙H5 缃戦〉鎺堟潈鐢級 WECHAT_MP_APPID = 'wx8afd36f7c7b21ba0' WECHAT_MP_APPSECRET = 'c3ec5a227ddb26ad8a1d4c55efa1cf86' # 寰俊灏忕▼搴忛厤缃紙H5 璺宠浆灏忕▼搴忕敤锛? WECHAT_MINIPROGRAM_APPID = 'wx0edeaab76d4fa414' WECHAT_MINIPROGRAM_APPSECRET = os.environ.get('WECHAT_MINIPROGRAM_APPSECRET', '0d0c70084f05a8c1411f6b89da7e815d') WECHAT_MINIPROGRAM_ORIGINAL_ID = 'gh_fd2fd8dd2fb5' # Redis 缂撳瓨閿墠缂€锛堝井淇?token锛? WECHAT_ACCESS_TOKEN_PREFIX = "wechat:access_token:" WECHAT_JSAPI_TICKET_PREFIX = "wechat:jsapi_ticket:" # 寰俊鍥炶皟鍦板潃 WECHAT_REDIRECT_URI = 'https://api.valuefrontier.cn/api/auth/wechat/callback' # 鍓嶇鍩熷悕锛堢敤浜庣櫥褰曟垚鍔熷悗閲嶅畾鍚戯級 FRONTEND_URL = 'https://valuefrontier.cn' # 閭欢鏈嶅姟閰嶇疆锛圦Q浼佷笟閭锛? MAIL_SERVER = 'smtp.exmail.qq.com' MAIL_PORT = 465 MAIL_USE_SSL = True MAIL_USE_TLS = False MAIL_USERNAME = 'admin@valuefrontier.cn' MAIL_PASSWORD = 'QYncRu6WUdASvTg4' MAIL_DEFAULT_SENDER = 'admin@valuefrontier.cn' # Session鍜屽畨鍏ㄩ厤缃? # 浣跨敤鍥哄畾鐨?SECRET_KEY锛岀‘淇濇湇鍔″櫒閲嶅惎鍚庣敤鎴风櫥褰曠姸鎬佷笉涓㈠け # 閲嶈锛氱敓浜х幆澧冭浣跨敤鐜鍙橀噺閰嶇疆锛屼笉瑕佺‖缂栫爜 import os app.config['SECRET_KEY'] = os.environ.get('FLASK_SECRET_KEY', 'vf_production_secret_key_2024_valuefrontier_cn') # ============ Redis Session 閰嶇疆锛堟敮鎸佸杩涚▼/澶?Worker锛?=========== # 浣跨敤 Redis 瀛樺偍 session锛岀‘淇濆涓?Gunicorn worker 鍏变韩 session # 閫氳繃鐜鍙橀噺鎺у埗鏄惁鍚敤 Redis Session锛堟帓鏌ラ棶棰樻椂鍙互绂佺敤锛? USE_REDIS_SESSION = os.environ.get('USE_REDIS_SESSION', 'true').lower() == 'true' if USE_REDIS_SESSION: app.config['SESSION_TYPE'] = 'redis' app.config['SESSION_REDIS'] = redis.Redis(host=_REDIS_HOST, port=_REDIS_PORT, db=1, password=_REDIS_PASSWORD) # db=1 鐢ㄤ簬 session app.config['SESSION_PERMANENT'] = True app.config['SESSION_USE_SIGNER'] = True # 瀵?session cookie 绛惧悕锛屾彁楂樺畨鍏ㄦ€? app.config['SESSION_KEY_PREFIX'] = 'vf_session:' # session key 鍓嶇紑 app.config['SESSION_REFRESH_EACH_REQUEST'] = True # 姣忔璇锋眰閮藉埛鏂?session TTL # 娉ㄦ剰锛欶lask-Session 浣跨敤 PERMANENT_SESSION_LIFETIME 浣滀负 Redis TTL锛堜笅闈㈠凡閰嶇疆涓?澶╋級 print(f"馃摝 Flask Session 閰嶇疆: Redis {_REDIS_HOST}:{_REDIS_PORT}/db=1, 杩囨湡鏃堕棿: 7澶?) else: # 浣跨敤榛樿鐨?cookie session锛堝崟 Worker 妯″紡鍙敤锛? app.config['SESSION_TYPE'] = 'null' # 绂佺敤鏈嶅姟绔?session锛屼娇鐢?cookie print(f"馃摝 Flask Session 閰嶇疆: Cookie 妯″紡锛堝崟 Worker锛?) # ============ Redis Session 閰嶇疆缁撴潫 ============ # Cookie 閰嶇疆 - 閲嶈锛欻TTPS 鐜蹇呴』璁剧疆 SECURE=True app.config['SESSION_COOKIE_SECURE'] = True # 鐢熶骇鐜浣跨敤 HTTPS锛屽繀椤讳负 True app.config['SESSION_COOKIE_HTTPONLY'] = True # 鐢熶骇鐜搴旇涓篢rue锛岄槻姝SS鏀诲嚮 # SameSite='None' 鍏佽寰俊鍐呯疆娴忚鍣ㄥ湪 OAuth 閲嶅畾鍚戝悗鎼哄甫 Cookie # 蹇呴』閰嶅悎 Secure=True 浣跨敤锛堝凡鍦ㄤ笂闈㈤厤缃級 app.config['SESSION_COOKIE_SAMESITE'] = 'None' # 寰俊娴忚鍣ㄥ吋瀹规€э細蹇呴』涓?None app.config['SESSION_COOKIE_DOMAIN'] = None # 涓嶉檺鍒跺煙鍚? app.config['SESSION_COOKIE_PATH'] = '/' # 璁剧疆cookie璺緞 app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7) # session鎸佺画7澶? app.config['REMEMBER_COOKIE_DURATION'] = timedelta(days=30) # 璁颁綇鐧诲綍30澶? app.config['REMEMBER_COOKIE_SECURE'] = True # 鐢熶骇鐜浣跨敤 HTTPS锛屽繀椤讳负 True app.config['REMEMBER_COOKIE_HTTPONLY'] = True # 闃叉XSS鏀诲嚮 app.config['REMEMBER_COOKIE_SAMESITE'] = 'None' # 寰俊娴忚鍣ㄥ吋瀹规€? # 鍒濆鍖?Flask-Session锛堜粎鍦ㄥ惎鐢?Redis Session 鏃讹級 if USE_REDIS_SESSION: Session(app) print("鉁?Flask-Session (Redis) 宸插垵濮嬪寲锛屾敮鎸佸 Worker 鍏变韩 session") # 纭繚 session 浣跨敤姘镐箙妯″紡骞跺埛鏂?TTL锛堣В鍐?Flask-Session 0.8.0 TTL 闂锛? @app.before_request def refresh_session_ttl(): """ 姣忔璇锋眰寮€濮嬫椂锛? 1. 纭繚 session 鏄案涔呯殑锛屼娇鐢?PERMANENT_SESSION_LIFETIME 浣滀负 TTL 2. 鏍囪 session 涓哄凡淇敼锛岃Е鍙?Redis TTL 鍒锋柊 娉ㄦ剰锛氬繀椤诲湪 before_request 涓缃?session.modified = True 鍥犱负 Flask-Session 鐨?save_session 鍦?after_request 涔嬪墠鎵ц 濡傛灉鍦?after_request 涓缃紝TTL 涓嶄細琚埛鏂? """ from flask import session session.permanent = True # 鍙湁褰?session 涓湁鐢ㄦ埛鏁版嵁鏃舵墠鍒锋柊 TTL锛堥伩鍏嶄负鍖垮悕鐢ㄦ埛鍒涘缓 session锛? if session.get('user_id') or session.get('_user_id'): session.modified = True # 閰嶇疆閭欢 app.config['MAIL_SERVER'] = MAIL_SERVER app.config['MAIL_PORT'] = MAIL_PORT app.config['MAIL_USE_SSL'] = MAIL_USE_SSL app.config['MAIL_USE_TLS'] = MAIL_USE_TLS app.config['MAIL_USERNAME'] = MAIL_USERNAME app.config['MAIL_PASSWORD'] = MAIL_PASSWORD app.config['MAIL_DEFAULT_SENDER'] = MAIL_DEFAULT_SENDER # 鍏佽鍓嶇璺ㄥ煙璁块棶 - 淇CORS閰嶇疆 try: CORS(app, origins=["http://localhost:3000", "http://127.0.0.1:3000", "http://localhost:5173", "https://valuefrontier.cn", "http://valuefrontier.cn", "https://www.valuefrontier.cn", "http://www.valuefrontier.cn"], # 鏄庣‘鎸囧畾鍏佽鐨勬簮 methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"], allow_headers=["Content-Type", "Authorization", "X-Requested-With", "Cache-Control"], supports_credentials=True, # 鍏佽鎼哄甫鍑嵁 expose_headers=["Content-Type", "Authorization"]) except ImportError: pass # 濡傛灉鏈畨瑁協lask_cors鍒欒烦杩? # 鍒濆鍖?Flask-Login login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' login_manager.login_message = '璇峰厛鐧诲綍璁块棶姝ら〉闈? login_manager.remember_cookie_duration = timedelta(days=30) # 璁颁綇鐧诲綍鎸佺画鏃堕棿 Compress(app) MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # 16MB max file size # Configure Flask-Compress app.config['COMPRESS_ALGORITHM'] = ['gzip', 'br'] app.config['COMPRESS_MIMETYPES'] = [ 'text/html', 'text/css', 'text/xml', 'application/json', 'application/javascript', 'application/x-javascript' ] app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:Zzl33818!@127.0.0.1:3306/stock?charset=utf8mb4' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_ENGINE_OPTIONS'] = { 'pool_size': 10, 'pool_recycle': 3600, 'pool_pre_ping': True, 'pool_timeout': 30, 'max_overflow': 20 } # Cache directory setup CACHE_DIR = Path('cache') CACHE_DIR.mkdir(exist_ok=True) def beijing_now(): # 浣跨敤 pytz 澶勭悊鏃跺尯锛屼絾杩斿洖 naive datetime锛堥€傚悎鏁版嵁搴撳瓨鍌級 beijing_tz = pytz.timezone('Asia/Shanghai') return datetime.now(beijing_tz).replace(tzinfo=None) # 妫€鏌ョ敤鎴锋槸鍚︾櫥褰曠殑瑁呴グ鍣? def login_required(f): @wraps(f) def decorated_function(*args, **kwargs): if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 return f(*args, **kwargs) return decorated_function # Memory management constants MAX_MEMORY_PERCENT = 75 MEMORY_CHECK_INTERVAL = 300 MAX_CACHE_ITEMS = 50 db = SQLAlchemy(app) # 鍒濆鍖栭偖浠舵湇鍔? mail = Mail(app) # 鍒濆鍖?Flask-SocketIO锛堢敤浜庡疄鏃朵簨浠舵帹閫侊級 # 鏀寔閫氳繃鐜鍙橀噺鎸囧畾妯″紡: SOCKETIO_ASYNC_MODE=gevent|threading def _detect_async_mode(): """妫€娴嬪彲鐢ㄧ殑寮傛妯″紡""" # 鍏佽閫氳繃鐜鍙橀噺寮哄埗鎸囧畾 forced_mode = os.environ.get('SOCKETIO_ASYNC_MODE', '').lower() if forced_mode in ('gevent', 'threading', 'eventlet'): return forced_mode # 妫€娴?gevent 鏄惁宸茶 patch锛圙unicorn -k gevent 浼氳嚜鍔?patch锛? try: from gevent import monkey if monkey.is_module_patched('socket'): return 'gevent' except ImportError: pass # 榛樿浣跨敤 threading锛堟渶绋冲畾锛岄厤鍚?simple-websocket锛? return 'threading' _async_mode = _detect_async_mode() print(f"馃摗 Flask-SocketIO async_mode: {_async_mode}") # Redis 娑堟伅闃熷垪 URL锛堟敮鎸佸 Worker 涔嬮棿鐨勬秷鎭悓姝ワ級 # 浣跨敤 127.0.0.1 鑰岄潪 localhost锛岄伩鍏?eventlet DNS 闂 # 鏍煎紡: redis://:password@host:port/db SOCKETIO_MESSAGE_QUEUE = os.environ.get('SOCKETIO_REDIS_URL', f'redis://:{_REDIS_PASSWORD}@{_REDIS_HOST}:{_REDIS_PORT}/2') # 妫€娴嬫槸鍚﹂渶瑕佸惎鐢ㄦ秷鎭槦鍒? # 榛樿鍚敤锛堝 Worker 妯″紡闇€瑕侊紝鍗?Worker 妯″紡涔熷吋瀹癸級 _use_message_queue = os.environ.get('SOCKETIO_USE_QUEUE', 'true').lower() == 'true' socketio = SocketIO( app, cors_allowed_origins=["http://localhost:3000", "http://127.0.0.1:3000", "http://localhost:5173", "https://valuefrontier.cn", "http://valuefrontier.cn"], async_mode=_async_mode, message_queue=SOCKETIO_MESSAGE_QUEUE if _use_message_queue else None, manage_session=False, # 璁?Flask-Session 绠$悊 session锛岄伩鍏嶄笌 SocketIO 鍐茬獊 logger=True, engineio_logger=False, ping_timeout=120, # 蹇冭烦瓒呮椂鏃堕棿锛堢锛夛紝瀹㈡埛绔?20绉掑唴鏃犲搷搴旀墠鏂紑 ping_interval=25 # 蹇冭烦妫€娴嬮棿闅旓紙绉掞級锛屾瘡25绉掑彂閫佷竴娆ing ) if _use_message_queue: print(f"鉁?Flask-SocketIO 宸查厤缃?Redis 娑堟伅闃熷垪: {SOCKETIO_MESSAGE_QUEUE}") else: print(f"馃摗 Flask-SocketIO 鍗?Worker 妯″紡锛堟棤娑堟伅闃熷垪锛?) @login_manager.user_loader def load_user(user_id): """Flask-Login 鐢ㄦ埛鍔犺浇鍥炶皟""" try: return User.query.get(int(user_id)) except Exception as e: app.logger.error(f"鐢ㄦ埛鍔犺浇閿欒: {e}") return None # 鍏ㄥ眬閿欒澶勭悊鍣?- 纭繚API鎺ュ彛濮嬬粓杩斿洖JSON @app.errorhandler(404) def not_found_error(error): """404閿欒澶勭悊""" if request.path.startswith('/api/'): return jsonify({'success': False, 'error': '鎺ュ彛涓嶅瓨鍦?}), 404 return error @app.errorhandler(500) def internal_error(error): """500閿欒澶勭悊""" db.session.rollback() if request.path.startswith('/api/'): return jsonify({'success': False, 'error': '鏈嶅姟鍣ㄥ唴閮ㄩ敊璇?}), 500 return error @app.errorhandler(405) def method_not_allowed_error(error): """405閿欒澶勭悊""" if request.path.startswith('/api/'): return jsonify({'success': False, 'error': '璇锋眰鏂规硶涓嶈鍏佽'}), 405 return error class Post(db.Model): """甯栧瓙妯″瀷""" id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 鍐呭 title = db.Column(db.String(200)) # 鏍囬(鍙€? content = db.Column(db.Text, nullable=False) # 鍐呭 content_type = db.Column(db.String(20), default='text') # 鍐呭绫诲瀷:text/rich_text/link # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 缁熻 likes_count = db.Column(db.Integer, default=0) comments_count = db.Column(db.Integer, default=0) view_count = db.Column(db.Integer, default=0) # 鐘舵€? status = db.Column(db.String(20), default='active') # active/hidden/deleted is_top = db.Column(db.Boolean, default=False) # 鏄惁缃《 # 鍏崇郴 user = db.relationship('User', backref='posts') likes = db.relationship('PostLike', backref='post', lazy='dynamic') comments = db.relationship('Comment', backref='post', lazy='dynamic') class Comment(db.Model): """甯栧瓙璇勮妯″瀷""" id = db.Column(db.Integer, primary_key=True) post_id = db.Column(db.Integer, db.ForeignKey('post.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 鍐呭 content = db.Column(db.Text, nullable=False) parent_id = db.Column(db.Integer, db.ForeignKey('comment.id')) # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 缁熻 likes_count = db.Column(db.Integer, default=0) # 鐘舵€? status = db.Column(db.String(20), default='active') # active/hidden/deleted # 鍏崇郴 user = db.relationship('User', backref='comments') replies = db.relationship('Comment', backref=db.backref('parent', remote_side=[id])) class User(UserMixin, db.Model): """鐢ㄦ埛妯″瀷 - 瀹屽叏鍖归厤鐜版湁鏁版嵁搴撹〃缁撴瀯""" __tablename__ = 'user' # 涓婚敭 id = db.Column(db.Integer, primary_key=True, autoincrement=True) # 鍩虹璐﹀彿淇℃伅 username = db.Column(db.String(80), unique=True, nullable=False) email = db.Column(db.String(120), unique=True, nullable=True) password_hash = db.Column(db.String(255), nullable=True) email_confirmed = db.Column(db.Boolean, nullable=True, default=True) # 鏃堕棿瀛楁 created_at = db.Column(db.DateTime, nullable=True, default=beijing_now) last_seen = db.Column(db.DateTime, nullable=True, default=beijing_now) # 璐﹀彿鐘舵€? status = db.Column(db.String(20), nullable=True, default='active') # 涓汉璧勬枡淇℃伅 nickname = db.Column(db.String(30), nullable=True) avatar_url = db.Column(db.String(200), nullable=True) banner_url = db.Column(db.String(200), nullable=True) bio = db.Column(db.String(200), nullable=True) gender = db.Column(db.String(10), nullable=True) birth_date = db.Column(db.Date, nullable=True) location = db.Column(db.String(100), nullable=True) # 鑱旂郴鏂瑰紡 phone = db.Column(db.String(20), nullable=True) wechat_id = db.Column(db.String(80), nullable=True) # 寰俊鍙? # 瀹炲悕璁よ瘉 real_name = db.Column(db.String(30), nullable=True) id_number = db.Column(db.String(18), nullable=True) is_verified = db.Column(db.Boolean, nullable=True, default=False) verify_time = db.Column(db.DateTime, nullable=True) # 鎶曡祫鍋忓ソ trading_experience = db.Column(db.String(200), nullable=True) investment_style = db.Column(db.String(50), nullable=True) risk_preference = db.Column(db.String(20), nullable=True) investment_amount = db.Column(db.String(20), nullable=True) preferred_markets = db.Column(db.String(200), nullable=True) # 绀惧尯鏁版嵁 user_level = db.Column(db.Integer, nullable=True, default=1) reputation_score = db.Column(db.Integer, nullable=True, default=0) contribution_point = db.Column(db.Integer, nullable=True, default=0) post_count = db.Column(db.Integer, nullable=True, default=0) comment_count = db.Column(db.Integer, nullable=True, default=0) follower_count = db.Column(db.Integer, nullable=True, default=0) following_count = db.Column(db.Integer, nullable=True, default=0) # 鍒涗綔鑰呯浉鍏? is_creator = db.Column(db.Boolean, nullable=True, default=False) creator_type = db.Column(db.String(20), nullable=True) creator_tags = db.Column(db.String(200), nullable=True) # 閫氱煡璁剧疆 email_notifications = db.Column(db.Boolean, nullable=True, default=True) sms_notifications = db.Column(db.Boolean, nullable=True, default=False) wechat_notifications = db.Column(db.Boolean, nullable=True, default=False) notification_preferences = db.Column(db.String(500), nullable=True) # 闅愮鍜岀晫闈㈣缃? privacy_level = db.Column(db.String(20), nullable=True, default='public') theme_preference = db.Column(db.String(20), nullable=True, default='light') blocked_keywords = db.Column(db.String(500), nullable=True) # 鎵嬫満楠岃瘉鐩稿叧 phone_confirmed = db.Column(db.Boolean, nullable=True, default=False) # 娉ㄦ剰锛氬師琛ㄤ腑鏄痓lob锛岃繖閲屾敼涓築oolean鏇村悎鐞? phone_confirm_time = db.Column(db.DateTime, nullable=True) # 寰俊鐧诲綍鐩稿叧瀛楁 wechat_union_id = db.Column(db.String(100), nullable=True) # 寰俊UnionID wechat_open_id = db.Column(db.String(100), nullable=True) # 寰俊OpenID def __init__(self, username, email=None, password=None, phone=None): """鍒濆鍖栫敤鎴?"" self.username = username if email: self.email = email if phone: self.phone = phone if password: self.set_password(password) self.nickname = username # 榛樿鏄电О涓虹敤鎴峰悕 self.created_at = beijing_now() self.last_seen = beijing_now() def set_password(self, password): """璁剧疆瀵嗙爜""" if password: self.password_hash = generate_password_hash(password) def check_password(self, password): """楠岃瘉瀵嗙爜""" if not password or not self.password_hash: return False return check_password_hash(self.password_hash, password) def update_last_seen(self): """鏇存柊鏈€鍚庢椿璺冩椂闂?"" self.last_seen = beijing_now() db.session.commit() def confirm_email(self): """纭閭""" self.email_confirmed = True db.session.commit() def confirm_phone(self): """纭鎵嬫満鍙?"" self.phone_confirmed = True self.phone_confirm_time = beijing_now() db.session.commit() def bind_wechat(self, open_id, union_id=None, wechat_info=None): """缁戝畾寰俊璐﹀彿""" self.wechat_open_id = open_id if union_id: self.wechat_union_id = union_id # 濡傛灉鎻愪緵浜嗗井淇$敤鎴蜂俊鎭紝鏇存柊澶村儚鍜屾樀绉? if wechat_info: if not self.avatar_url and wechat_info.get('headimgurl'): self.avatar_url = wechat_info['headimgurl'] if not self.nickname and wechat_info.get('nickname'): # 纭繚鏄电О缂栫爜姝g‘涓旈暱搴﹀悎鐞? nickname = self._sanitize_nickname(wechat_info['nickname']) self.nickname = nickname db.session.commit() def _sanitize_nickname(self, nickname): """娓呯悊鍜岄獙璇佹樀绉?"" if not nickname: return '寰俊鐢ㄦ埛' try: # 纭繚鏄纭殑UTF-8瀛楃涓? sanitized = str(nickname).strip() # 绉婚櫎鍙兘鐨勬帶鍒跺瓧绗? import re sanitized = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', sanitized) # 闄愬埗闀垮害锛堥伩鍏嶈繃闀跨殑鏄电О锛? if len(sanitized) > 50: sanitized = sanitized[:47] + '...' # 濡傛灉娓呯悊鍚庝负绌猴紝浣跨敤榛樿鍊? if not sanitized: sanitized = '寰俊鐢ㄦ埛' return sanitized except Exception as e: return '寰俊鐢ㄦ埛' def unbind_wechat(self): """瑙g粦寰俊璐﹀彿""" self.wechat_open_id = None self.wechat_union_id = None db.session.commit() def increment_post_count(self): """澧炲姞鍙戝笘鏁?"" self.post_count = (self.post_count or 0) + 1 db.session.commit() def increment_comment_count(self): """澧炲姞璇勮鏁?"" self.comment_count = (self.comment_count or 0) + 1 db.session.commit() def add_reputation(self, points): """澧炲姞澹拌獕鍒嗘暟""" self.reputation_score = (self.reputation_score or 0) + points db.session.commit() def to_dict(self, include_sensitive=False): """杞崲涓哄瓧鍏?"" data = { 'id': self.id, 'username': self.username, 'nickname': self.nickname or self.username, 'avatar_url': self.avatar_url, 'banner_url': self.banner_url, 'bio': self.bio, 'gender': self.gender, 'location': self.location, 'user_level': self.user_level or 1, 'reputation_score': self.reputation_score or 0, 'contribution_point': self.contribution_point or 0, 'post_count': self.post_count or 0, 'comment_count': self.comment_count or 0, 'follower_count': self.follower_count or 0, 'following_count': self.following_count or 0, 'is_creator': self.is_creator or False, 'creator_type': self.creator_type, 'creator_tags': self.creator_tags, 'is_verified': self.is_verified or False, 'created_at': self.created_at.isoformat() if self.created_at else None, 'last_seen': self.last_seen.isoformat() if self.last_seen else None, 'status': self.status, 'has_wechat': bool(self.wechat_open_id), 'is_authenticated': True } # 鑾峰彇鐢ㄦ埛璁㈤槄淇℃伅锛堜粠 user_subscriptions 琛級 subscription = UserSubscription.query.filter_by(user_id=self.id).first() if subscription: data.update({ 'subscription_type': subscription.subscription_type, 'subscription_status': subscription.subscription_status, 'billing_cycle': subscription.billing_cycle, 'start_date': subscription.start_date.isoformat() if subscription.start_date else None, 'end_date': subscription.end_date.isoformat() if subscription.end_date else None, 'auto_renewal': subscription.auto_renewal }) else: # 鏃犺闃呮椂浣跨敤榛樿鍊? data.update({ 'subscription_type': 'free', 'subscription_status': 'inactive', 'billing_cycle': None, 'start_date': None, 'end_date': None, 'auto_renewal': False }) # 鏁忔劅淇℃伅鍙湪闇€瑕佹椂鍖呭惈 if include_sensitive: data.update({ 'email': self.email, 'phone': self.phone, 'email_confirmed': self.email_confirmed, 'phone_confirmed': self.phone_confirmed, 'real_name': self.real_name, 'birth_date': self.birth_date.isoformat() if self.birth_date else None, 'trading_experience': self.trading_experience, 'investment_style': self.investment_style, 'risk_preference': self.risk_preference, 'investment_amount': self.investment_amount, 'preferred_markets': self.preferred_markets, 'email_notifications': self.email_notifications, 'sms_notifications': self.sms_notifications, 'wechat_notifications': self.wechat_notifications, 'privacy_level': self.privacy_level, 'theme_preference': self.theme_preference }) return data def to_public_dict(self): """鍏紑淇℃伅瀛楀吀锛堢敤浜庢樉绀虹粰鍏朵粬鐢ㄦ埛锛?"" return { 'id': self.id, 'username': self.username, 'nickname': self.nickname or self.username, 'avatar_url': self.avatar_url, 'bio': self.bio, 'user_level': self.user_level or 1, 'reputation_score': self.reputation_score or 0, 'post_count': self.post_count or 0, 'follower_count': self.follower_count or 0, 'is_creator': self.is_creator or False, 'creator_type': self.creator_type, 'is_verified': self.is_verified or False, 'created_at': self.created_at.isoformat() if self.created_at else None } @staticmethod def find_by_login_info(login_info): """鏍规嵁鐧诲綍淇℃伅鏌ユ壘鐢ㄦ埛锛堟敮鎸佺敤鎴峰悕銆侀偖绠便€佹墜鏈哄彿锛?"" return User.query.filter( db.or_( User.username == login_info, User.email == login_info, User.phone == login_info ) ).first() @staticmethod def find_by_wechat_openid(open_id): """鏍规嵁寰俊OpenID鏌ユ壘鐢ㄦ埛""" return User.query.filter_by(wechat_open_id=open_id).first() @staticmethod def find_by_wechat_unionid(union_id): """鏍规嵁寰俊UnionID鏌ユ壘鐢ㄦ埛""" return User.query.filter_by(wechat_union_id=union_id).first() @staticmethod def is_username_taken(username): """妫€鏌ョ敤鎴峰悕鏄惁宸茶浣跨敤""" return User.query.filter_by(username=username).first() is not None @staticmethod def is_email_taken(email): """妫€鏌ラ偖绠辨槸鍚﹀凡琚娇鐢?"" return User.query.filter_by(email=email).first() is not None @staticmethod def is_phone_taken(phone): """妫€鏌ユ墜鏈哄彿鏄惁宸茶浣跨敤""" return User.query.filter_by(phone=phone).first() is not None def __repr__(self): return f'' # ============================================ # 璁㈤槄鍔熻兘妯″潡锛堝畨鍏ㄧ増鏈?- 鐙珛琛級 # ============================================ class UserSubscription(db.Model): """鐢ㄦ埛璁㈤槄琛?- 鐙珛浜庣幇鏈塙ser琛?"" __tablename__ = 'user_subscriptions' id = db.Column(db.Integer, primary_key=True, autoincrement=True) user_id = db.Column(db.Integer, nullable=False, unique=True, index=True) subscription_type = db.Column(db.String(10), nullable=False, default='free') subscription_status = db.Column(db.String(20), nullable=False, default='active') start_date = db.Column(db.DateTime, nullable=True) end_date = db.Column(db.DateTime, nullable=True) billing_cycle = db.Column(db.String(10), nullable=True) auto_renewal = db.Column(db.Boolean, nullable=False, default=False) created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) def is_active(self): if self.subscription_status != 'active': return False if self.subscription_type == 'free': return True if self.end_date: try: now = beijing_now() if self.end_date < now: return False except Exception as e: return False return True def days_left(self): if self.subscription_type == 'free' or not self.end_date: return 999 try: now = beijing_now() delta = self.end_date - now return max(0, delta.days) except Exception as e: return 0 def to_dict(self): return { 'type': self.subscription_type, 'status': self.subscription_status, 'is_active': self.is_active(), 'days_left': self.days_left(), 'start_date': self.start_date.isoformat() if self.start_date else None, 'end_date': self.end_date.isoformat() if self.end_date else None, 'billing_cycle': self.billing_cycle, 'auto_renewal': self.auto_renewal } class SubscriptionPlan(db.Model): """璁㈤槄濂楅琛?"" __tablename__ = 'subscription_plans' id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(50), nullable=False, unique=True) display_name = db.Column(db.String(100), nullable=False) description = db.Column(db.Text, nullable=True) monthly_price = db.Column(db.Numeric(10, 2), nullable=False) yearly_price = db.Column(db.Numeric(10, 2), nullable=False) features = db.Column(db.Text, nullable=True) pricing_options = db.Column(db.Text, nullable=True) # JSON鏍煎紡锛歔{"months": 1, "price": 99}, {"months": 12, "price": 999}] is_active = db.Column(db.Boolean, default=True) sort_order = db.Column(db.Integer, default=0) created_at = db.Column(db.DateTime, default=beijing_now) def to_dict(self): # 瑙f瀽pricing_options锛堝鏋滃瓨鍦級 pricing_opts = None if self.pricing_options: try: pricing_opts = json.loads(self.pricing_options) except: pricing_opts = None # 濡傛灉娌℃湁pricing_options锛屽垯浠巑onthly_price鍜寉early_price鐢熸垚榛樿閫夐」 if not pricing_opts: pricing_opts = [ { 'months': 1, 'price': float(self.monthly_price) if self.monthly_price else 0, 'label': '鏈堜粯', 'cycle_key': 'monthly' }, { 'months': 12, 'price': float(self.yearly_price) if self.yearly_price else 0, 'label': '骞翠粯', 'cycle_key': 'yearly', 'discount_percent': 20 # 骞翠粯榛樿20%鎶樻墸 } ] return { 'id': self.id, 'name': self.name, 'display_name': self.display_name, 'description': self.description, 'monthly_price': float(self.monthly_price) if self.monthly_price else 0, 'yearly_price': float(self.yearly_price) if self.yearly_price else 0, 'pricing_options': pricing_opts, # 鏂板锛氱伒娲昏璐瑰懆鏈熼€夐」 'features': json.loads(self.features) if self.features else [], 'is_active': self.is_active, 'sort_order': self.sort_order } class PaymentOrder(db.Model): """鏀粯璁㈠崟琛?"" __tablename__ = 'payment_orders' id = db.Column(db.Integer, primary_key=True, autoincrement=True) order_no = db.Column(db.String(32), unique=True, nullable=False) user_id = db.Column(db.Integer, nullable=False) plan_name = db.Column(db.String(20), nullable=False) billing_cycle = db.Column(db.String(10), nullable=False) amount = db.Column(db.Numeric(10, 2), nullable=False) original_amount = db.Column(db.Numeric(10, 2), nullable=True) # 鍘熶环 discount_amount = db.Column(db.Numeric(10, 2), nullable=True, default=0) # 鎶樻墸閲戦 promo_code_id = db.Column(db.Integer, db.ForeignKey('promo_codes.id'), nullable=True) # 浼樻儬鐮両D payment_method = db.Column(db.String(20), default='wechat') # 鏀粯鏂瑰紡: wechat/alipay wechat_order_id = db.Column(db.String(64), nullable=True) # 寰俊浜ゆ槗鍙? alipay_trade_no = db.Column(db.String(64), nullable=True) # 鏀粯瀹濅氦鏄撳彿 prepay_id = db.Column(db.String(64), nullable=True) qr_code_url = db.Column(db.String(200), nullable=True) # 寰俊鏀粯浜岀淮鐮乁RL pay_url = db.Column(db.String(2000), nullable=True) # 鏀粯瀹濇敮浠橀摼鎺ワ紙杈冮暱锛? status = db.Column(db.String(20), default='pending') created_at = db.Column(db.DateTime, default=beijing_now) paid_at = db.Column(db.DateTime, nullable=True) expired_at = db.Column(db.DateTime, nullable=True) remark = db.Column(db.String(200), nullable=True) # 鍏宠仈浼樻儬鐮? promo_code = db.relationship('PromoCode', backref='orders', lazy=True, foreign_keys=[promo_code_id]) def __init__(self, user_id, plan_name, billing_cycle, amount, original_amount=None, discount_amount=0): self.user_id = user_id self.plan_name = plan_name self.billing_cycle = billing_cycle self.amount = amount self.original_amount = original_amount if original_amount is not None else amount self.discount_amount = discount_amount or 0 import random timestamp = int(beijing_now().timestamp() * 1000000) random_suffix = random.randint(1000, 9999) self.order_no = f"{timestamp}{user_id:04d}{random_suffix}" self.expired_at = beijing_now() + timedelta(minutes=30) def is_expired(self): if not self.expired_at: return False try: now = beijing_now() return now > self.expired_at except Exception as e: return False def mark_as_paid(self, transaction_id, payment_method=None): """ 鏍囪璁㈠崟涓哄凡鏀粯 Args: transaction_id: 浜ゆ槗鍙凤紙寰俊鎴栨敮浠樺疂锛? payment_method: 鏀粯鏂瑰紡锛堝彲閫夛紝濡傛灉宸茶缃垯涓嶈鐩栵級 """ self.status = 'paid' self.paid_at = beijing_now() # 鏍规嵁鏀粯鏂瑰紡瀛樺偍浜ゆ槗鍙? if payment_method: self.payment_method = payment_method if self.payment_method == 'alipay': self.alipay_trade_no = transaction_id else: self.wechat_order_id = transaction_id def to_dict(self): return { 'id': self.id, 'order_no': self.order_no, 'user_id': self.user_id, 'plan_name': self.plan_name, 'billing_cycle': self.billing_cycle, 'amount': float(self.amount) if self.amount else 0, 'original_amount': float(self.original_amount) if self.original_amount else None, 'discount_amount': float(self.discount_amount) if self.discount_amount else 0, 'promo_code': self.promo_code.code if self.promo_code else None, 'payment_method': self.payment_method or 'wechat', 'qr_code_url': self.qr_code_url, 'pay_url': self.pay_url, 'status': self.status, 'is_expired': self.is_expired(), 'created_at': self.created_at.isoformat() if self.created_at else None, 'paid_at': self.paid_at.isoformat() if self.paid_at else None, 'expired_at': self.expired_at.isoformat() if self.expired_at else None, 'remark': self.remark } class PromoCode(db.Model): """浼樻儬鐮佽〃""" __tablename__ = 'promo_codes' id = db.Column(db.Integer, primary_key=True, autoincrement=True) code = db.Column(db.String(50), unique=True, nullable=False, index=True) description = db.Column(db.String(200), nullable=True) # 鎶樻墸绫诲瀷鍜屽€? discount_type = db.Column(db.String(20), nullable=False) # 'percentage' 鎴?'fixed_amount' discount_value = db.Column(db.Numeric(10, 2), nullable=False) # 閫傜敤鑼冨洿 applicable_plans = db.Column(db.String(200), nullable=True) # JSON鏍煎紡 applicable_cycles = db.Column(db.String(50), nullable=True) # JSON鏍煎紡 min_amount = db.Column(db.Numeric(10, 2), nullable=True) # 浣跨敤闄愬埗 max_uses = db.Column(db.Integer, nullable=True) max_uses_per_user = db.Column(db.Integer, default=1) current_uses = db.Column(db.Integer, default=0) # 鏈夋晥鏈? valid_from = db.Column(db.DateTime, nullable=False) valid_until = db.Column(db.DateTime, nullable=False) # 鐘舵€? is_active = db.Column(db.Boolean, default=True) created_by = db.Column(db.Integer, nullable=True) created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) def to_dict(self): return { 'id': self.id, 'code': self.code, 'description': self.description, 'discount_type': self.discount_type, 'discount_value': float(self.discount_value) if self.discount_value else 0, 'applicable_plans': json.loads(self.applicable_plans) if self.applicable_plans else None, 'applicable_cycles': json.loads(self.applicable_cycles) if self.applicable_cycles else None, 'min_amount': float(self.min_amount) if self.min_amount else None, 'max_uses': self.max_uses, 'max_uses_per_user': self.max_uses_per_user, 'current_uses': self.current_uses, 'valid_from': self.valid_from.isoformat() if self.valid_from else None, 'valid_until': self.valid_until.isoformat() if self.valid_until else None, 'is_active': self.is_active } class PromoCodeUsage(db.Model): """浼樻儬鐮佷娇鐢ㄨ褰曡〃""" __tablename__ = 'promo_code_usage' id = db.Column(db.Integer, primary_key=True, autoincrement=True) promo_code_id = db.Column(db.Integer, db.ForeignKey('promo_codes.id'), nullable=False) user_id = db.Column(db.Integer, nullable=False, index=True) order_id = db.Column(db.Integer, db.ForeignKey('payment_orders.id'), nullable=False) original_amount = db.Column(db.Numeric(10, 2), nullable=False) discount_amount = db.Column(db.Numeric(10, 2), nullable=False) final_amount = db.Column(db.Numeric(10, 2), nullable=False) used_at = db.Column(db.DateTime, default=beijing_now) # 鍏崇郴 promo_code = db.relationship('PromoCode', backref='usages') order = db.relationship('PaymentOrder', backref='promo_usage') class SubscriptionUpgrade(db.Model): """璁㈤槄鍗囩骇/闄嶇骇璁板綍琛?"" __tablename__ = 'subscription_upgrades' id = db.Column(db.Integer, primary_key=True, autoincrement=True) user_id = db.Column(db.Integer, nullable=False, index=True) order_id = db.Column(db.Integer, db.ForeignKey('payment_orders.id'), nullable=False) # 鍘熻闃呬俊鎭? from_plan = db.Column(db.String(20), nullable=False) from_cycle = db.Column(db.String(10), nullable=False) from_end_date = db.Column(db.DateTime, nullable=True) # 鏂拌闃呬俊鎭? to_plan = db.Column(db.String(20), nullable=False) to_cycle = db.Column(db.String(10), nullable=False) to_end_date = db.Column(db.DateTime, nullable=False) # 浠锋牸璁$畻 remaining_value = db.Column(db.Numeric(10, 2), nullable=False) upgrade_amount = db.Column(db.Numeric(10, 2), nullable=False) actual_amount = db.Column(db.Numeric(10, 2), nullable=False) upgrade_type = db.Column(db.String(20), nullable=False) # 'plan_upgrade', 'cycle_change', 'both' created_at = db.Column(db.DateTime, default=beijing_now) # 鍏崇郴 order = db.relationship('PaymentOrder', backref='upgrade_record') # ============================================ # 妯℃嫙鐩樼浉鍏虫ā鍨? # ============================================ class SimulationAccount(db.Model): """妯℃嫙璐︽埛""" __tablename__ = 'simulation_accounts' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False, unique=True) account_name = db.Column(db.String(100), default='鎴戠殑妯℃嫙璐︽埛') initial_capital = db.Column(db.Numeric(15, 2), default=1000000.00) # 鍒濆璧勯噾 available_cash = db.Column(db.Numeric(15, 2), default=1000000.00) # 鍙敤璧勯噾 frozen_cash = db.Column(db.Numeric(15, 2), default=0.00) # 鍐荤粨璧勯噾 position_value = db.Column(db.Numeric(15, 2), default=0.00) # 鎸佷粨甯傚€? total_assets = db.Column(db.Numeric(15, 2), default=1000000.00) # 鎬昏祫浜? total_profit = db.Column(db.Numeric(15, 2), default=0.00) # 鎬荤泩浜? total_profit_rate = db.Column(db.Numeric(10, 4), default=0.00) # 鎬绘敹鐩婄巼 daily_profit = db.Column(db.Numeric(15, 2), default=0.00) # 鏃ョ泩浜? daily_profit_rate = db.Column(db.Numeric(10, 4), default=0.00) # 鏃ユ敹鐩婄巼 created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) last_settlement_date = db.Column(db.Date) # 鏈€鍚庣粨绠楁棩鏈? # 鍏崇郴 user = db.relationship('User', backref='simulation_account') positions = db.relationship('SimulationPosition', backref='account', lazy='dynamic') orders = db.relationship('SimulationOrder', backref='account', lazy='dynamic') transactions = db.relationship('SimulationTransaction', backref='account', lazy='dynamic') def calculate_total_assets(self): """璁$畻鎬昏祫浜?"" self.total_assets = self.available_cash + self.frozen_cash + self.position_value self.total_profit = self.total_assets - self.initial_capital self.total_profit_rate = (self.total_profit / self.initial_capital) * 100 if self.initial_capital > 0 else 0 return self.total_assets class SimulationPosition(db.Model): """妯℃嫙鎸佷粨""" __tablename__ = 'simulation_positions' id = db.Column(db.Integer, primary_key=True) account_id = db.Column(db.Integer, db.ForeignKey('simulation_accounts.id'), nullable=False) stock_code = db.Column(db.String(20), nullable=False) stock_name = db.Column(db.String(100)) position_qty = db.Column(db.Integer, default=0) # 鎸佷粨鏁伴噺 available_qty = db.Column(db.Integer, default=0) # 鍙敤鏁伴噺(T+1) frozen_qty = db.Column(db.Integer, default=0) # 鍐荤粨鏁伴噺 avg_cost = db.Column(db.Numeric(10, 3), default=0.00) # 骞冲潎鎴愭湰 current_price = db.Column(db.Numeric(10, 3), default=0.00) # 褰撳墠浠锋牸 market_value = db.Column(db.Numeric(15, 2), default=0.00) # 甯傚€? profit = db.Column(db.Numeric(15, 2), default=0.00) # 鐩堜簭 profit_rate = db.Column(db.Numeric(10, 4), default=0.00) # 鐩堜簭姣斾緥 today_profit = db.Column(db.Numeric(15, 2), default=0.00) # 浠婃棩鐩堜簭 today_profit_rate = db.Column(db.Numeric(10, 4), default=0.00) # 浠婃棩鐩堜簭姣斾緥 created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) __table_args__ = ( db.UniqueConstraint('account_id', 'stock_code', name='unique_account_stock'), ) def update_market_value(self, current_price): """鏇存柊甯傚€煎拰鐩堜簭""" self.current_price = current_price self.market_value = self.position_qty * current_price total_cost = self.position_qty * self.avg_cost self.profit = self.market_value - total_cost self.profit_rate = (self.profit / total_cost * 100) if total_cost > 0 else 0 return self.market_value class SimulationOrder(db.Model): """妯℃嫙璁㈠崟""" __tablename__ = 'simulation_orders' id = db.Column(db.Integer, primary_key=True) account_id = db.Column(db.Integer, db.ForeignKey('simulation_accounts.id'), nullable=False) order_no = db.Column(db.String(32), unique=True, nullable=False) stock_code = db.Column(db.String(20), nullable=False) stock_name = db.Column(db.String(100)) order_type = db.Column(db.String(10), nullable=False) # BUY/SELL price_type = db.Column(db.String(10), default='MARKET') # MARKET/LIMIT order_price = db.Column(db.Numeric(10, 3)) # 濮旀墭浠锋牸 order_qty = db.Column(db.Integer, nullable=False) # 濮旀墭鏁伴噺 filled_qty = db.Column(db.Integer, default=0) # 鎴愪氦鏁伴噺 filled_price = db.Column(db.Numeric(10, 3)) # 鎴愪氦浠锋牸 filled_amount = db.Column(db.Numeric(15, 2)) # 鎴愪氦閲戦 commission = db.Column(db.Numeric(10, 2), default=0.00) # 鎵嬬画璐? stamp_tax = db.Column(db.Numeric(10, 2), default=0.00) # 鍗拌姳绋? transfer_fee = db.Column(db.Numeric(10, 2), default=0.00) # 杩囨埛璐? total_fee = db.Column(db.Numeric(10, 2), default=0.00) # 鎬昏垂鐢? status = db.Column(db.String(20), default='PENDING') # PENDING/PARTIAL/FILLED/CANCELLED/REJECTED reject_reason = db.Column(db.String(200)) order_time = db.Column(db.DateTime, default=beijing_now) filled_time = db.Column(db.DateTime) cancel_time = db.Column(db.DateTime) def calculate_fees(self): """璁$畻浜ゆ槗璐圭敤""" if not self.filled_amount: return 0 # 浣i噾锛堜竾鍒嗕箣2.5锛屾渶浣?鍏冿級 self.commission = max(float(self.filled_amount) * 0.00025, 5.0) # 鍗拌姳绋庯紙鍗栧嚭鏃舵敹鍙栧崈鍒嗕箣1锛? if self.order_type == 'SELL': self.stamp_tax = float(self.filled_amount) * 0.001 else: self.stamp_tax = 0 # 杩囨埛璐癸紙鍙屽悜鏀跺彇锛屼竾鍒嗕箣0.2锛? self.transfer_fee = float(self.filled_amount) * 0.00002 # 鎬昏垂鐢? self.total_fee = self.commission + self.stamp_tax + self.transfer_fee return self.total_fee class SimulationTransaction(db.Model): """妯℃嫙鎴愪氦璁板綍""" __tablename__ = 'simulation_transactions' id = db.Column(db.Integer, primary_key=True) account_id = db.Column(db.Integer, db.ForeignKey('simulation_accounts.id'), nullable=False) order_id = db.Column(db.Integer, db.ForeignKey('simulation_orders.id'), nullable=False) transaction_no = db.Column(db.String(32), unique=True, nullable=False) stock_code = db.Column(db.String(20), nullable=False) stock_name = db.Column(db.String(100)) transaction_type = db.Column(db.String(10), nullable=False) # BUY/SELL transaction_price = db.Column(db.Numeric(10, 3), nullable=False) transaction_qty = db.Column(db.Integer, nullable=False) transaction_amount = db.Column(db.Numeric(15, 2), nullable=False) commission = db.Column(db.Numeric(10, 2), default=0.00) stamp_tax = db.Column(db.Numeric(10, 2), default=0.00) transfer_fee = db.Column(db.Numeric(10, 2), default=0.00) total_fee = db.Column(db.Numeric(10, 2), default=0.00) transaction_time = db.Column(db.DateTime, default=beijing_now) settlement_date = db.Column(db.Date) # T+1缁撶畻鏃ユ湡 # 鍏崇郴 order = db.relationship('SimulationOrder', backref='transactions') class SimulationDailyStats(db.Model): """妯℃嫙璐︽埛鏃ョ粺璁?"" __tablename__ = 'simulation_daily_stats' id = db.Column(db.Integer, primary_key=True) account_id = db.Column(db.Integer, db.ForeignKey('simulation_accounts.id'), nullable=False) stat_date = db.Column(db.Date, nullable=False) opening_assets = db.Column(db.Numeric(15, 2)) # 鏈熷垵璧勪骇 closing_assets = db.Column(db.Numeric(15, 2)) # 鏈熸湯璧勪骇 daily_profit = db.Column(db.Numeric(15, 2)) # 鏃ョ泩浜? daily_profit_rate = db.Column(db.Numeric(10, 4)) # 鏃ユ敹鐩婄巼 total_profit = db.Column(db.Numeric(15, 2)) # 绱鐩堜簭 total_profit_rate = db.Column(db.Numeric(10, 4)) # 绱鏀剁泭鐜? trade_count = db.Column(db.Integer, default=0) # 浜ゆ槗娆℃暟 win_count = db.Column(db.Integer, default=0) # 鐩堝埄娆℃暟 loss_count = db.Column(db.Integer, default=0) # 浜忔崯娆℃暟 max_profit = db.Column(db.Numeric(15, 2)) # 鏈€澶х泩鍒? max_loss = db.Column(db.Numeric(15, 2)) # 鏈€澶т簭鎹? created_at = db.Column(db.DateTime, default=beijing_now) __table_args__ = ( db.UniqueConstraint('account_id', 'stat_date', name='unique_account_date'), ) def get_user_subscription_safe(user_id): """瀹夊叏鍦拌幏鍙栫敤鎴疯闃呬俊鎭?"" try: subscription = UserSubscription.query.filter_by(user_id=user_id).first() if not subscription: subscription = UserSubscription(user_id=user_id) db.session.add(subscription) db.session.commit() return subscription except Exception as e: # 杩斿洖榛樿鍏嶈垂鐗堟湰瀵硅薄 class DefaultSub: def to_dict(self): return { 'type': 'free', 'status': 'active', 'is_active': True, 'days_left': 999, 'billing_cycle': None, 'auto_renewal': False } return DefaultSub() def activate_user_subscription(user_id, plan_type, billing_cycle, extend_from_now=False): """ 婵€娲荤敤鎴疯闃咃紙鏂扮増锛氱画璐规椂浠庡綋鍓嶈闃呯粨鏉熸椂闂村紑濮嬪欢闀匡級 Args: user_id: 鐢ㄦ埛ID plan_type: 濂楅绫诲瀷 (pro/max) billing_cycle: 璁¤垂鍛ㄦ湡 (monthly/quarterly/semiannual/yearly) extend_from_now: 搴熷純鍙傛暟锛屼繚鐣欎互鍏煎锛堢幇鍦ㄨ嚜鍔ㄥ垽鏂級 Returns: UserSubscription 瀵硅薄 鎴?None """ try: subscription = UserSubscription.query.filter_by(user_id=user_id).first() if not subscription: # 鏂扮敤鎴凤紝鍒涘缓璁㈤槄璁板綍 subscription = UserSubscription(user_id=user_id) db.session.add(subscription) # 鏇存柊璁㈤槄绫诲瀷鍜岀姸鎬? subscription.subscription_type = plan_type subscription.subscription_status = 'active' subscription.billing_cycle = billing_cycle # 璁$畻璁㈤槄鍛ㄦ湡澶╂暟 cycle_days_map = { 'monthly': 30, 'quarterly': 90, # 3涓湀 'semiannual': 180, # 6涓湀 'yearly': 365 } days = cycle_days_map.get(billing_cycle, 30) now = beijing_now() # 鍒ゆ柇鏄柊璐繕鏄画璐? if subscription.end_date and subscription.end_date > now: # 缁垂锛氫粠褰撳墠璁㈤槄缁撴潫鏃堕棿寮€濮嬪欢闀? start_date = subscription.end_date end_date = start_date + timedelta(days=days) else: # 鏂拌喘鎴栬繃鏈熷悗閲嶆柊璐拱锛氫粠褰撳墠鏃堕棿寮€濮? start_date = now end_date = now + timedelta(days=days) subscription.start_date = start_date subscription.end_date = end_date subscription.updated_at = now db.session.commit() return subscription except Exception as e: print(f"婵€娲昏闃呭け璐? {e}") db.session.rollback() return None def validate_promo_code(code, plan_name, billing_cycle, amount, user_id): """楠岃瘉浼樻儬鐮? Returns: tuple: (promo_code_obj, error_message) """ try: promo = PromoCode.query.filter_by(code=code.upper(), is_active=True).first() if not promo: return None, "浼樻儬鐮佷笉瀛樺湪鎴栧凡澶辨晥" # 妫€鏌ユ湁鏁堟湡 now = beijing_now() if now < promo.valid_from: return None, "浼樻儬鐮佸皻鏈敓鏁? if now > promo.valid_until: return None, "浼樻儬鐮佸凡杩囨湡" # 妫€鏌ヤ娇鐢ㄦ鏁? if promo.max_uses and promo.current_uses >= promo.max_uses: return None, "浼樻儬鐮佸凡琚娇鐢ㄥ畬" # 妫€鏌ユ瘡鐢ㄦ埛浣跨敤娆℃暟 if promo.max_uses_per_user: user_usage_count = PromoCodeUsage.query.filter_by( promo_code_id=promo.id, user_id=user_id ).count() if user_usage_count >= promo.max_uses_per_user: return None, f"鎮ㄥ凡浣跨敤杩囨浼樻儬鐮侊紙闄愮敤{promo.max_uses_per_user}娆★級" # 妫€鏌ラ€傜敤濂楅 if promo.applicable_plans: try: applicable = json.loads(promo.applicable_plans) if plan_name not in applicable: return None, "璇ヤ紭鎯犵爜涓嶉€傜敤浜庢濂楅" except: pass # 妫€鏌ラ€傜敤鍛ㄦ湡 if promo.applicable_cycles: try: applicable = json.loads(promo.applicable_cycles) if billing_cycle not in applicable: return None, "璇ヤ紭鎯犵爜涓嶉€傜敤浜庢璁¤垂鍛ㄦ湡" except: pass # 妫€鏌ユ渶浣庢秷璐? if promo.min_amount and amount < float(promo.min_amount): return None, f"闇€婊float(promo.min_amount):.2f}鍏冩墠鍙娇鐢ㄦ浼樻儬鐮? return promo, None except Exception as e: return None, f"楠岃瘉浼樻儬鐮佹椂鍑洪敊: {str(e)}" def calculate_discount(promo_code, amount): """璁$畻浼樻儬閲戦""" try: if promo_code.discount_type == 'percentage': discount = amount * (float(promo_code.discount_value) / 100) else: # fixed_amount discount = float(promo_code.discount_value) # 纭繚鎶樻墸涓嶈秴杩囨€婚噾棰? return min(discount, amount) except: return 0 def calculate_subscription_price_simple(user_id, to_plan_name, to_cycle, promo_code=None): """ 绠€鍖栫増浠锋牸璁$畻锛氱画璐圭敤鎴峰拰鏂扮敤鎴蜂环鏍煎畬鍏ㄤ竴鑷达紝涓嶈绠楀墿浣欎环鍊? Args: user_id: 鐢ㄦ埛ID to_plan_name: 鐩爣濂楅鍚嶇О (pro/max) to_cycle: 璁¤垂鍛ㄦ湡 (monthly/quarterly/semiannual/yearly) promo_code: 浼樻儬鐮侊紙鍙€夛級 Returns: dict: { 'is_renewal': False/True, # 鏄惁涓虹画璐? 'subscription_type': 'new'/'renew', # 璁㈤槄绫诲瀷 'current_plan': 'pro', # 褰撳墠濂楅锛堝鏋滄湁锛? 'current_cycle': 'yearly', # 褰撳墠鍛ㄦ湡锛堝鏋滄湁锛? 'new_plan_price': 2699.00, # 鏂板椁愪环鏍? 'original_amount': 2699.00, # 鍘熶环 'discount_amount': 0, # 浼樻儬閲戦 'final_amount': 2699.00, # 瀹炰粯閲戦 'promo_code': None, # 浣跨敤鐨勪紭鎯犵爜 'promo_error': None # 浼樻儬鐮侀敊璇俊鎭? } """ try: # 1. 鑾峰彇褰撳墠璁㈤槄 current_sub = UserSubscription.query.filter_by(user_id=user_id).first() # 2. 鑾峰彇鐩爣濂楅 to_plan = SubscriptionPlan.query.filter_by(name=to_plan_name, is_active=True).first() if not to_plan: return {'error': '鐩爣濂楅涓嶅瓨鍦?} # 3. 鏍规嵁璁¤垂鍛ㄦ湡鑾峰彇浠锋牸 # 浼樺厛浠?pricing_options 鑾峰彇浠锋牸 price = None if to_plan.pricing_options: try: pricing_opts = json.loads(to_plan.pricing_options) # 鏌ユ壘鍖归厤鐨勫懆鏈? for opt in pricing_opts: cycle_key = opt.get('cycle_key', '') months = opt.get('months', 0) # 鍖归厤閫昏緫 if (cycle_key == to_cycle or (to_cycle == 'monthly' and months == 1) or (to_cycle == 'quarterly' and months == 3) or (to_cycle == 'semiannual' and months == 6) or (to_cycle == 'yearly' and months == 12)): price = float(opt.get('price', 0)) break except: pass # 濡傛灉 pricing_options 涓病鏈夋壘鍒帮紝浣跨敤鏃х殑 monthly_price/yearly_price if price is None: if to_cycle == 'yearly': price = float(to_plan.yearly_price) if to_plan.yearly_price else 0 else: # 榛樿鏈堜粯 price = float(to_plan.monthly_price) if to_plan.monthly_price else 0 if price <= 0: return {'error': f'{to_cycle} 鍛ㄦ湡浠锋牸鏈厤缃?} # 4. 鍒ゆ柇璁㈤槄绫诲瀷鍜岃绠椾环鏍? is_renewal = False is_upgrade = False is_downgrade = False subscription_type = 'new' current_plan = None current_cycle = None remaining_value = 0 final_price = price if current_sub and current_sub.subscription_type in ['pro', 'max']: current_plan = current_sub.subscription_type current_cycle = current_sub.billing_cycle if current_plan == to_plan_name: # 鍚岀骇缁垂锛氬欢闀挎椂闀匡紝鍏ㄤ环璐拱 is_renewal = True subscription_type = 'renew' elif current_plan == 'pro' and to_plan_name == 'max': # 鍗囩骇锛歅ro 鈫?Max锛岄渶瑕佽绠楀樊浠? is_upgrade = True subscription_type = 'upgrade' # 璁$畻褰撳墠璁㈤槄鐨勫墿浣欎环鍊? if current_sub.end_date and current_sub.end_date > datetime.utcnow(): # 鑾峰彇褰撳墠濂楅鐨勫師濮嬩环鏍? current_plan_obj = SubscriptionPlan.query.filter_by(name=current_plan, is_active=True).first() if current_plan_obj: current_price = None # 浼樺厛浠?pricing_options 鑾峰彇浠锋牸 if current_plan_obj.pricing_options: try: pricing_opts = json.loads(current_plan_obj.pricing_options) # 濡傛灉 current_cycle 涓虹┖鎴栨棤鏁堬紝鏍规嵁鍓╀綑澶╂暟鎺ㄦ柇璁¤垂鍛ㄦ湡 if not current_cycle or current_cycle.strip() == '': remaining_days_total = (current_sub.end_date - current_sub.start_date).days if current_sub.start_date else 365 # 鏍规嵁鎬诲ぉ鏁版帹鏂璐瑰懆鏈? if remaining_days_total <= 35: inferred_cycle = 'monthly' elif remaining_days_total <= 100: inferred_cycle = 'quarterly' elif remaining_days_total <= 200: inferred_cycle = 'semiannual' else: inferred_cycle = 'yearly' else: inferred_cycle = current_cycle for opt in pricing_opts: if opt.get('cycle_key') == inferred_cycle: current_price = float(opt.get('price', 0)) current_cycle = inferred_cycle # 鏇存柊鍛ㄦ湡淇℃伅 break except: pass # 濡傛灉 pricing_options 涓病鎵惧埌锛屼娇鐢?yearly_price 浣滀负榛樿 if current_price is None or current_price <= 0: current_price = float(current_plan_obj.yearly_price) if current_plan_obj.yearly_price else 0 current_cycle = 'yearly' if current_price and current_price > 0: # 璁$畻鍓╀綑澶╂暟 remaining_days = (current_sub.end_date - datetime.utcnow()).days # 璁$畻鎬诲ぉ鏁? cycle_days_map = { 'monthly': 30, 'quarterly': 90, 'semiannual': 180, 'yearly': 365 } total_days = cycle_days_map.get(current_cycle, 365) # 璁$畻鍓╀綑浠峰€? if total_days > 0 and remaining_days > 0: remaining_value = current_price * (remaining_days / total_days) # 瀹炰粯閲戦 = 鏂板椁愪环鏍?- 鍓╀綑浠峰€? final_price = max(0, price - remaining_value) # 濡傛灉鍓╀綑浠峰€?>= 鏂板椁愪环鏍硷紝鏍囪涓哄厤璐瑰崌绾? if remaining_value >= price: final_price = 0 elif current_plan == 'max' and to_plan_name == 'pro': # 闄嶇骇锛歁ax 鈫?Pro锛屽埌鏈熷悗鍒囨崲锛屽叏浠疯喘涔? is_downgrade = True subscription_type = 'downgrade' else: # 鍏朵粬鎯呭喌瑙嗕负鏂拌喘 subscription_type = 'new' # 5. 鏋勫缓缁撴灉 result = { 'is_renewal': is_renewal, 'is_upgrade': is_upgrade, 'is_downgrade': is_downgrade, 'subscription_type': subscription_type, 'current_plan': current_plan, 'current_cycle': current_cycle, 'new_plan_price': price, 'original_price': price, # 鏂板椁愬師浠? 'remaining_value': remaining_value, # 褰撳墠璁㈤槄鍓╀綑浠峰€硷紙浠呭崌绾ф椂鏈夋晥锛? 'original_amount': price, 'discount_amount': 0, 'final_amount': final_price, 'promo_code': None, 'promo_error': None } # 6. 搴旂敤浼樻儬鐮侊紙鍩轰簬宸环鍚庣殑閲戦锛? if promo_code and promo_code.strip(): # 浼樻儬鐮佷綔鐢ㄤ簬宸环鍚庣殑閲戦 promo, error = validate_promo_code(promo_code, to_plan_name, to_cycle, final_price, user_id) if promo: discount = calculate_discount(promo, final_price) result['discount_amount'] = float(discount) result['final_amount'] = final_price - float(discount) result['promo_code'] = promo.code elif error: result['promo_error'] = error return result except Exception as e: return {'error': f'浠锋牸璁$畻澶辫触: {str(e)}'} # 淇濈暀鏃у嚱鏁颁互鍏煎锛堟爣璁颁负搴熷純锛? def calculate_upgrade_price(user_id, to_plan_name, to_cycle, promo_code=None): """ 銆愬凡搴熷純銆戞棫鐗堝崌绾т环鏍艰绠楀嚱鏁帮紝淇濈暀浠ュ吋瀹规棫浠g爜 鏂颁唬鐮佽浣跨敤 calculate_subscription_price_simple """ # 鐩存帴璋冪敤鏂板嚱鏁? return calculate_subscription_price_simple(user_id, to_plan_name, to_cycle, promo_code) def initialize_subscription_plans_safe(): """瀹夊叏鍦板垵濮嬪寲璁㈤槄濂楅""" try: if SubscriptionPlan.query.first(): return pro_plan = SubscriptionPlan( name='pro', display_name='Pro 涓撲笟鐗?, description='浜嬩欢鍏宠仈鑲$エ娣卞害鍒嗘瀽 | 鍘嗗彶浜嬩欢鏅鸿兘瀵规瘮澶嶇洏 | 浜嬩欢姒傚康鍏宠仈涓庢寲鎺?| 姒傚康鏉垮潡涓偂杩借釜 | 姒傚康娣卞害鐮旀姤涓庤В璇?| 涓偂寮傚姩瀹炴椂棰勮', monthly_price=0.01, yearly_price=0.08, features=json.dumps([ "鍩虹鑲$エ鍒嗘瀽宸ュ叿", "鍘嗗彶鏁版嵁鏌ヨ", "鍩虹璐㈠姟鎶ヨ〃", "绠€鍗曟姇璧勮鍒掕褰?, "鏍囧噯瀹㈡湇鏀寔" ]), sort_order=1 ) max_plan = SubscriptionPlan( name='max', display_name='Max 鏃楄埌鐗?, description='鍖呭惈Pro鐗堝叏閮ㄥ姛鑳?| 浜嬩欢浼犲閾捐矾鏅鸿兘鍒嗘瀽 | 姒傚康婕斿彉鏃堕棿杞磋拷婧?| 涓偂鍏ㄦ柟浣嶆繁搴︾爺绌?| 浠峰皬鍓嶆姇鐮斿姪鎵嬫棤闄愪娇鐢?| 鏂板姛鑳戒紭鍏堜綋楠屾潈 | 涓撳睘瀹㈡湇涓€瀵逛竴鏈嶅姟', monthly_price=0.1, yearly_price=0.8, features=json.dumps([ "鍏ㄩ儴Pro鐗堟湰鍔熻兘", "楂樼骇鍒嗘瀽宸ュ叿", "瀹炴椂鏁版嵁鎺ㄩ€?, "涓撲笟璐㈠姟鍒嗘瀽鎶ュ憡", "AI鎶曡祫寤鸿", "鏃犻檺鎶曡祫璁″垝瀛樺偍", "浼樺厛瀹㈡湇鏀寔", "鐙鐮旀姤璁块棶" ]), sort_order=2 ) db.session.add(pro_plan) db.session.add(max_plan) db.session.commit() except Exception as e: pass # -------------------------------------------- # 璁㈤槄绛夌骇宸ュ叿鍑芥暟 # -------------------------------------------- def _get_current_subscription_info(): """鑾峰彇褰撳墠鐧诲綍鐢ㄦ埛璁㈤槄淇℃伅鐨勫瓧鍏稿舰寮忥紝鏈櫥褰曟垨寮傚父鏃惰涓哄厤璐圭敤鎴枫€?"" try: user_id = session.get('user_id') if not user_id: return { 'type': 'free', 'status': 'active', 'is_active': True } sub = get_user_subscription_safe(user_id) data = sub.to_dict() # 鏍囧噯鍖栧瓧娈靛悕 return { 'type': data.get('type') or data.get('subscription_type') or 'free', 'status': data.get('status') or data.get('subscription_status') or 'active', 'is_active': data.get('is_active', True) } except Exception: return { 'type': 'free', 'status': 'active', 'is_active': True } def _subscription_level(sub_type): """灏嗚闃呯被鍨嬫槧灏勫埌绛夌骇鏁板€硷紝free=0, pro=1, max=2銆?"" mapping = {'free': 0, 'pro': 1, 'max': 2} return mapping.get((sub_type or 'free').lower(), 0) def _has_required_level(required: str) -> bool: """鍒ゆ柇褰撳墠鐢ㄦ埛鏄惁杈惧埌鎵€闇€璁㈤槄绾у埆銆?"" info = _get_current_subscription_info() if not info.get('is_active', True): return False return _subscription_level(info.get('type')) >= _subscription_level(required) # ============================================ # 寰俊寮€鏀惧钩鍙板煙鍚嶆牎楠? # ============================================ @app.route('/gvQnxIQ5Rs.txt', methods=['GET']) def wechat_domain_verify(): """寰俊寮€鏀惧钩鍙板煙鍚嶆牎楠屾枃浠?"" return 'd526e9e857dbd2621e5100811972e8c5', 200, {'Content-Type': 'text/plain'} @app.route('/MP_verify_17Fo4JhapMw6vtNa.txt', methods=['GET']) def wechat_mp_domain_verify(): """寰俊鍏紬鍙风綉椤垫巿鏉冨煙鍚嶆牎楠屾枃浠?"" return '17Fo4JhapMw6vtNa', 200, {'Content-Type': 'text/plain'} # ============================================ # 璁㈤槄鐩稿叧API鎺ュ彛 # ============================================ @app.route('/api/subscription/plans', methods=['GET']) def get_subscription_plans(): """鑾峰彇璁㈤槄濂楅鍒楄〃""" try: plans = SubscriptionPlan.query.filter_by(is_active=True).order_by(SubscriptionPlan.sort_order).all() return jsonify({ 'success': True, 'data': [plan.to_dict() for plan in plans] }) except Exception as e: # 杩斿洖榛樿濂楅锛堝寘鍚玴ricing_options浠ュ吋瀹规柊鍓嶇锛? default_plans = [ { 'id': 1, 'name': 'pro', 'display_name': 'Pro鐗堟湰', 'description': '閫傚悎涓汉鎶曡祫鑰呯殑鍩虹鍔熻兘濂楅', 'monthly_price': 198, 'yearly_price': 2000, 'pricing_options': [ {'months': 1, 'price': 198, 'label': '鏈堜粯', 'cycle_key': 'monthly'}, {'months': 3, 'price': 534, 'label': '3涓湀', 'cycle_key': '3months', 'discount_percent': 10}, {'months': 6, 'price': 950, 'label': '鍗婂勾', 'cycle_key': '6months', 'discount_percent': 20}, {'months': 12, 'price': 2000, 'label': '1骞?, 'cycle_key': 'yearly', 'discount_percent': 16}, {'months': 24, 'price': 3600, 'label': '2骞?, 'cycle_key': '2years', 'discount_percent': 24}, {'months': 36, 'price': 5040, 'label': '3骞?, 'cycle_key': '3years', 'discount_percent': 29} ], 'features': ['鍩虹鑲$エ鍒嗘瀽宸ュ叿', '鍘嗗彶鏁版嵁鏌ヨ', '鍩虹璐㈠姟鎶ヨ〃', '绠€鍗曟姇璧勮鍒掕褰?, '鏍囧噯瀹㈡湇鏀寔'], 'is_active': True, 'sort_order': 1 }, { 'id': 2, 'name': 'max', 'display_name': 'Max鐗堟湰', 'description': '閫傚悎涓撲笟鎶曡祫鑰呯殑鍏ㄥ姛鑳藉椁?, 'monthly_price': 998, 'yearly_price': 10000, 'pricing_options': [ {'months': 1, 'price': 998, 'label': '鏈堜粯', 'cycle_key': 'monthly'}, {'months': 3, 'price': 2695, 'label': '3涓湀', 'cycle_key': '3months', 'discount_percent': 10}, {'months': 6, 'price': 4790, 'label': '鍗婂勾', 'cycle_key': '6months', 'discount_percent': 20}, {'months': 12, 'price': 10000, 'label': '1骞?, 'cycle_key': 'yearly', 'discount_percent': 17}, {'months': 24, 'price': 18000, 'label': '2骞?, 'cycle_key': '2years', 'discount_percent': 25}, {'months': 36, 'price': 25200, 'label': '3骞?, 'cycle_key': '3years', 'discount_percent': 30} ], 'features': ['鍏ㄩ儴Pro鐗堟湰鍔熻兘', '楂樼骇鍒嗘瀽宸ュ叿', '瀹炴椂鏁版嵁鎺ㄩ€?, 'API璁块棶', '浼樺厛瀹㈡湇鏀寔'], 'is_active': True, 'sort_order': 2 } ] return jsonify({ 'success': True, 'data': default_plans }) @app.route('/api/subscription/current', methods=['GET']) def get_current_subscription(): """鑾峰彇褰撳墠鐢ㄦ埛鐨勮闃呬俊鎭?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 subscription = get_user_subscription_safe(session['user_id']) return jsonify({ 'success': True, 'data': subscription.to_dict() }) except Exception as e: return jsonify({ 'success': True, 'data': { 'type': 'free', 'status': 'active', 'is_active': True, 'days_left': 999 } }) @app.route('/api/subscription/info', methods=['GET']) def get_subscription_info(): """鑾峰彇褰撳墠鐢ㄦ埛鐨勮闃呬俊鎭?- 鍓嶇涓撶敤鎺ュ彛""" try: info = _get_current_subscription_info() return jsonify({ 'success': True, 'data': info }) except Exception as e: print(f"鑾峰彇璁㈤槄淇℃伅閿欒: {e}") return jsonify({ 'success': True, 'data': { 'type': 'free', 'status': 'active', 'is_active': True, 'days_left': 999 } }) @app.route('/api/promo-code/validate', methods=['POST']) def validate_promo_code_api(): """楠岃瘉浼樻儬鐮?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() code = data.get('code', '').strip() plan_name = data.get('plan_name') billing_cycle = data.get('billing_cycle') amount = data.get('amount', 0) if not code or not plan_name or not billing_cycle: return jsonify({'success': False, 'error': '鍙傛暟涓嶅畬鏁?}), 400 # 楠岃瘉浼樻儬鐮? promo, error = validate_promo_code(code, plan_name, billing_cycle, amount, session['user_id']) if error: return jsonify({ 'success': False, 'valid': False, 'error': error }) # 璁$畻鎶樻墸 discount_amount = calculate_discount(promo, amount) final_amount = amount - discount_amount return jsonify({ 'success': True, 'valid': True, 'promo_code': promo.to_dict(), 'discount_amount': discount_amount, 'final_amount': final_amount }) except Exception as e: return jsonify({ 'success': False, 'error': f'楠岃瘉澶辫触: {str(e)}' }), 500 @app.route('/api/subscription/calculate-price', methods=['POST']) def calculate_subscription_price(): """ 璁$畻璁㈤槄浠锋牸锛堟柊鐗堬細缁垂鍜屾柊璐环鏍间竴鑷达級 Request Body: { "to_plan": "pro", "to_cycle": "yearly", "promo_code": "WELCOME2025" // 鍙€? } Response: { "success": true, "data": { "is_renewal": true, // 鏄惁涓虹画璐? "subscription_type": "renew", // new 鎴?renew "current_plan": "pro", // 褰撳墠濂楅锛堝鏋滄湁锛? "current_cycle": "monthly", // 褰撳墠鍛ㄦ湡锛堝鏋滄湁锛? "new_plan_price": 2699.00, "original_amount": 2699.00, "discount_amount": 0, "final_amount": 2699.00, "promo_code": null, "promo_error": null } } """ try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() to_plan = data.get('to_plan') to_cycle = data.get('to_cycle') promo_code = (data.get('promo_code') or '').strip() or None if not to_plan or not to_cycle: return jsonify({'success': False, 'error': '鍙傛暟涓嶅畬鏁?}), 400 # 浣跨敤鏂扮殑绠€鍖栦环鏍艰绠楀嚱鏁? result = calculate_subscription_price_simple(session['user_id'], to_plan, to_cycle, promo_code) if 'error' in result: return jsonify({ 'success': False, 'error': result['error'] }), 400 return jsonify({ 'success': True, 'data': result }) except Exception as e: return jsonify({ 'success': False, 'error': f'璁$畻澶辫触: {str(e)}' }), 500 @app.route('/api/subscription/free-upgrade', methods=['POST']) @login_required def free_upgrade_subscription(): """ 鍏嶈垂鍗囩骇璁㈤槄锛堝綋鍓╀綑浠峰€?>= 鏂板椁愪环鏍兼椂锛? Request Body: { "plan_name": "max", "billing_cycle": "yearly" } """ try: data = request.get_json() plan_name = data.get('plan_name') billing_cycle = data.get('billing_cycle') if not plan_name or not billing_cycle: return jsonify({'success': False, 'error': '鍙傛暟涓嶅畬鏁?}), 400 user_id = current_user.id # 璁$畻浠锋牸锛岄獙璇佹槸鍚﹀彲浠ュ厤璐瑰崌绾? price_result = calculate_subscription_price_simple(user_id, plan_name, billing_cycle, None) if 'error' in price_result: return jsonify({'success': False, 'error': price_result['error']}), 400 # 妫€鏌ユ槸鍚︿负鍗囩骇涓斿疄浠橀噾棰濅负0 if not price_result.get('is_upgrade') or price_result.get('final_amount', 1) > 0: return jsonify({'success': False, 'error': '褰撳墠鎯呭喌涓嶇鍚堝厤璐瑰崌绾ф潯浠?}), 400 # 鑾峰彇褰撳墠璁㈤槄 subscription = UserSubscription.query.filter_by(user_id=user_id).first() if not subscription: return jsonify({'success': False, 'error': '鏈壘鍒拌闃呰褰?}), 404 # 璁$畻鏂扮殑鍒版湡鏃堕棿锛堟寜鍓╀綑浠峰€兼姌绠楋級 remaining_value = price_result.get('remaining_value', 0) new_plan_price = price_result.get('new_plan_price', 0) if new_plan_price > 0: # 璁$畻鍙互鍏戞崲鐨勬柊濂楅澶╂暟 value_ratio = remaining_value / new_plan_price cycle_days_map = { 'monthly': 30, 'quarterly': 90, 'semiannual': 180, 'yearly': 365 } new_cycle_days = cycle_days_map.get(billing_cycle, 365) # 鏂扮殑鍒版湡澶╂暟 = 鍛ㄦ湡澶╂暟 脳 浠峰€兼瘮渚? new_days = int(new_cycle_days * value_ratio) # 鏇存柊璁㈤槄淇℃伅 subscription.subscription_type = plan_name subscription.billing_cycle = billing_cycle subscription.start_date = datetime.utcnow() subscription.end_date = datetime.utcnow() + timedelta(days=new_days) subscription.subscription_status = 'active' subscription.updated_at = datetime.utcnow() db.session.commit() return jsonify({ 'success': True, 'message': f'鍗囩骇鎴愬姛锛佹偍鐨剓plan_name.upper()}鐗堟湰灏嗘寔缁瓄new_days}澶?, 'data': { 'subscription_type': plan_name, 'end_date': subscription.end_date.isoformat(), 'days': new_days } }) else: return jsonify({'success': False, 'error': '浠锋牸璁$畻寮傚父'}), 500 except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': f'鍗囩骇澶辫触: {str(e)}'}), 500 @app.route('/api/payment/create-order', methods=['POST']) def create_payment_order(): """ 鍒涘缓鏀粯璁㈠崟锛堟柊鐗堬細绠€鍖栭€昏緫锛屼笉鍐嶈褰曞崌绾э級 Request Body: { "plan_name": "pro", "billing_cycle": "yearly", "promo_code": "WELCOME2025" // 鍙€? } """ try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() plan_name = data.get('plan_name') billing_cycle = data.get('billing_cycle') promo_code = (data.get('promo_code') or '').strip() or None if not plan_name or not billing_cycle: return jsonify({'success': False, 'error': '鍙傛暟涓嶅畬鏁?}), 400 # 浣跨敤鏂扮殑绠€鍖栦环鏍艰绠? price_result = calculate_subscription_price_simple(session['user_id'], plan_name, billing_cycle, promo_code) if 'error' in price_result: return jsonify({'success': False, 'error': price_result['error']}), 400 amount = price_result['final_amount'] subscription_type = price_result.get('subscription_type', 'new') # new 鎴?renew # 妫€鏌ユ槸鍚︿负鍏嶈垂鍗囩骇锛堥噾棰濅负0锛? if amount <= 0 and price_result.get('is_upgrade'): return jsonify({ 'success': False, 'error': '褰撳墠鍓╀綑浠峰€煎彲鐩存帴鍏嶈垂鍗囩骇锛岃浣跨敤鍏嶈垂鍗囩骇鍔熻兘', 'should_free_upgrade': True, 'price_info': price_result }), 400 # 鍒涘缓璁㈠崟 try: # 鑾峰彇鍘熶环鍜屾姌鎵i噾棰? original_amount = price_result.get('original_amount', amount) discount_amount = price_result.get('discount_amount', 0) order = PaymentOrder( user_id=session['user_id'], plan_name=plan_name, billing_cycle=billing_cycle, amount=amount, original_amount=original_amount, discount_amount=discount_amount ) # 娣诲姞璁㈤槄绫诲瀷鏍囪锛堢敤浜庡墠绔睍绀猴級 order.remark = f"{subscription_type}璁㈤槄" if subscription_type == 'renew' else "鏂拌喘璁㈤槄" # 濡傛灉浣跨敤浜嗕紭鎯犵爜锛屽叧鑱斾紭鎯犵爜 if promo_code and price_result.get('promo_code'): promo_obj = PromoCode.query.filter_by(code=promo_code.upper()).first() if promo_obj: order.promo_code_id = promo_obj.id print(f"馃摝 璁㈠崟鍏宠仈浼樻儬鐮? {promo_obj.code} (ID: {promo_obj.id})") db.session.add(order) db.session.commit() except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': f'璁㈠崟鍒涘缓澶辫触: {str(e)}'}), 500 # 灏濊瘯璋冪敤鐪熷疄鐨勫井淇℃敮浠楢PI锛堜娇鐢?subprocess 缁曡繃 eventlet DNS 闂锛? try: import subprocess import urllib.parse # 浣跨敤鐙珛鑴氭湰妫€鏌ラ厤缃? script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'wechat_pay_worker.py') # 鍏堟鏌ラ厤缃? check_result = subprocess.run( [sys.executable, script_path, 'check'], capture_output=True, text=True, timeout=10 ) if check_result.returncode != 0: check_data = json.loads(check_result.stdout) if check_result.stdout else {} ready_msg = check_data.get('error', check_data.get('message', '鏈煡閿欒')) order.qr_code_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data=wxpay://order/{order.order_no}" order.remark = f"婕旂ず妯″紡 - {ready_msg}" else: # 鍒涘缓寰俊鏀粯璁㈠崟 plan_display_name = f"{plan_name.upper()}鐗堟湰-{billing_cycle}" body = f"VFr-{plan_display_name}" product_id = f"{plan_name}_{billing_cycle}" create_result = subprocess.run( [sys.executable, script_path, 'create', order.order_no, str(float(amount)), body, product_id], capture_output=True, text=True, timeout=60 ) print(f"[寰俊鏀粯] 鍒涘缓璁㈠崟杩斿洖: {create_result.stdout}") if create_result.stderr: print(f"[寰俊鏀粯] 閿欒杈撳嚭: {create_result.stderr}") wechat_result = json.loads(create_result.stdout) if create_result.stdout else {'success': False, 'error': '鏃犺繑鍥?} if wechat_result.get('success'): # 鑾峰彇寰俊杩斿洖鐨勫師濮媍ode_url wechat_code_url = wechat_result['code_url'] # 灏嗗井淇″崗璁甎RL杞崲涓轰簩缁寸爜鍥剧墖URL encoded_url = urllib.parse.quote(wechat_code_url, safe='') qr_image_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data={encoded_url}" order.qr_code_url = qr_image_url order.prepay_id = wechat_result.get('prepay_id') order.remark = f"寰俊鏀粯 - {wechat_code_url}" else: order.qr_code_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data=wxpay://order/{order.order_no}" order.remark = f"寰俊鏀粯澶辫触: {wechat_result.get('error')}" except subprocess.TimeoutExpired: order.qr_code_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data=wxpay://order/{order.order_no}" order.remark = "寰俊鏀粯瓒呮椂" except json.JSONDecodeError as e: order.qr_code_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data=wxpay://order/{order.order_no}" order.remark = f"寰俊鏀粯杩斿洖瑙f瀽澶辫触: {str(e)}" except Exception as e: import traceback print(f"[寰俊鏀粯] Exception: {e}") traceback.print_exc() order.qr_code_url = f"https://api.qrserver.com/v1/create-qr-code/?size=200x200&data=wxpay://order/{order.order_no}" order.remark = f"鏀粯寮傚父: {str(e)}" db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟鍒涘缓鎴愬姛' }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': '鍒涘缓璁㈠崟澶辫触'}), 500 @app.route('/api/payment/order//status', methods=['GET']) def check_order_status(order_id): """鏌ヨ璁㈠崟鏀粯鐘舵€?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 # 鏌ユ壘璁㈠崟 order = PaymentOrder.query.filter_by( id=order_id, user_id=session['user_id'] ).first() if not order: return jsonify({'success': False, 'error': '璁㈠崟涓嶅瓨鍦?}), 404 # 濡傛灉璁㈠崟宸茬粡鏄凡鏀粯鐘舵€侊紝鐩存帴杩斿洖 if order.status == 'paid': return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟宸叉敮浠?, 'payment_success': True }) # 濡傛灉璁㈠崟杩囨湡锛屾爣璁颁负杩囨湡 if order.is_expired(): order.status = 'expired' db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟宸茶繃鏈? }) # 璋冪敤寰俊鏀粯API鏌ヨ鐪熷疄鐘舵€侊紙浣跨敤 subprocess 缁曡繃 eventlet DNS 闂锛? try: import subprocess script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'wechat_pay_worker.py') query_proc = subprocess.run( [sys.executable, script_path, 'query', order.order_no], capture_output=True, text=True, timeout=30 ) query_result = json.loads(query_proc.stdout) if query_proc.stdout else {'success': False, 'error': '鏃犺繑鍥?} if query_result.get('success'): trade_state = query_result.get('trade_state') transaction_id = query_result.get('transaction_id') if trade_state == 'SUCCESS': # 鏀粯鎴愬姛锛屾洿鏂拌鍗曠姸鎬? order.mark_as_paid(transaction_id) # 婵€娲荤敤鎴疯闃? activate_user_subscription(order.user_id, order.plan_name, order.billing_cycle) # 璁板綍浼樻儬鐮佷娇鐢ㄦ儏鍐? if order.promo_code_id: try: existing_usage = PromoCodeUsage.query.filter_by(order_id=order.id).first() if not existing_usage: usage = PromoCodeUsage( promo_code_id=order.promo_code_id, user_id=order.user_id, order_id=order.id, original_amount=order.original_amount or order.amount, discount_amount=order.discount_amount or 0, final_amount=order.amount ) db.session.add(usage) promo = PromoCode.query.get(order.promo_code_id) if promo: promo.current_uses = (promo.current_uses or 0) + 1 print(f"馃帿 浼樻儬鐮佷娇鐢ㄨ褰曞凡鍒涘缓: {promo.code}") except Exception as e: print(f"鈿狅笍 璁板綍浼樻儬鐮佷娇鐢ㄥけ璐? {e}") db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '鏀粯鎴愬姛锛佽闃呭凡婵€娲?, 'payment_success': True }) elif trade_state in ['NOTPAY', 'USERPAYING']: # 鏈敮浠樻垨鏀粯涓? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '绛夊緟鏀粯...', 'payment_success': False }) else: # 鏀粯澶辫触鎴栧彇娑? order.status = 'cancelled' db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '鏀粯宸插彇娑?, 'payment_success': False }) else: # 寰俊鏌ヨ澶辫触锛岃繑鍥炲綋鍓嶇姸鎬? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': f"鏌ヨ澶辫触: {query_result.get('error')}", 'payment_success': False }) except Exception as e: # 鏌ヨ澶辫触锛岃繑鍥炲綋鍓嶈鍗曠姸鎬? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '鏃犳硶鏌ヨ鏀粯鐘舵€侊紝璇风◢鍚庨噸璇?, 'payment_success': False }) except Exception as e: return jsonify({'success': False, 'error': '鏌ヨ澶辫触'}), 500 @app.route('/api/payment/order//force-update', methods=['POST']) def force_update_order_status(order_id): """寮哄埗鏇存柊璁㈠崟鏀粯鐘舵€侊紙璋冭瘯鐢級""" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 # 鏌ユ壘璁㈠崟 order = PaymentOrder.query.filter_by( id=order_id, user_id=session['user_id'] ).first() if not order: return jsonify({'success': False, 'error': '璁㈠崟涓嶅瓨鍦?}), 404 # 妫€鏌ュ井淇℃敮浠樼姸鎬侊紙浣跨敤 subprocess 缁曡繃 eventlet DNS 闂锛? try: import subprocess script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'wechat_pay_worker.py') query_proc = subprocess.run( [sys.executable, script_path, 'query', order.order_no], capture_output=True, text=True, timeout=30 ) query_result = json.loads(query_proc.stdout) if query_proc.stdout else {'success': False, 'error': '鏃犺繑鍥?} if query_result.get('success') and query_result.get('trade_state') == 'SUCCESS': # 寮哄埗鏇存柊涓哄凡鏀粯 old_status = order.status order.mark_as_paid(query_result.get('transaction_id')) # 婵€娲荤敤鎴疯闃? activate_user_subscription(order.user_id, order.plan_name, order.billing_cycle) # 璁板綍浼樻儬鐮佷娇鐢紙濡傛灉浣跨敤浜嗕紭鎯犵爜锛? if order.promo_code_id: try: # 妫€鏌ユ槸鍚﹀凡缁忚褰曡繃锛堥槻姝㈤噸澶嶏級 existing_usage = PromoCodeUsage.query.filter_by(order_id=order.id).first() if not existing_usage: promo_usage = PromoCodeUsage( promo_code_id=order.promo_code_id, user_id=order.user_id, order_id=order.id, original_amount=order.original_amount or order.amount, discount_amount=order.discount_amount or 0, final_amount=order.amount ) db.session.add(promo_usage) # 鏇存柊浼樻儬鐮佷娇鐢ㄦ鏁? promo = PromoCode.query.get(order.promo_code_id) if promo: promo.current_uses = (promo.current_uses or 0) + 1 print(f"馃帿 浼樻儬鐮佷娇鐢ㄨ褰曞凡鍒涘缓: {promo.code}") else: print(f"鈩癸笍 浼樻儬鐮佷娇鐢ㄨ褰曞凡瀛樺湪锛岃烦杩?) except Exception as e: print(f"鈿狅笍 璁板綍浼樻儬鐮佷娇鐢ㄥけ璐? {e}") db.session.commit() print(f"鉁?璁㈠崟鐘舵€佸己鍒舵洿鏂版垚鍔? {old_status} -> paid") return jsonify({ 'success': True, 'message': f'璁㈠崟鐘舵€佸凡浠?{old_status} 鏇存柊涓?paid', 'data': order.to_dict(), 'payment_success': True }) else: return jsonify({ 'success': False, 'error': '寰俊鏀粯鐘舵€佷笉鏄垚鍔熺姸鎬侊紝鏃犳硶寮哄埗鏇存柊' }) except Exception as e: print(f"鉂?寮哄埗鏇存柊澶辫触: {e}") return jsonify({ 'success': False, 'error': f'寮哄埗鏇存柊澶辫触: {str(e)}' }) except Exception as e: print(f"寮哄埗鏇存柊璁㈠崟鐘舵€佸け璐? {str(e)}") return jsonify({'success': False, 'error': '鎿嶄綔澶辫触'}), 500 @app.route('/api/payment/wechat/callback', methods=['POST']) def wechat_payment_callback(): """寰俊鏀粯鍥炶皟澶勭悊""" try: # 鑾峰彇鍘熷XML鏁版嵁 raw_data = request.get_data() print(f"馃摜 鏀跺埌寰俊鏀粯鍥炶皟: {raw_data}") # 楠岃瘉鍥炶皟鏁版嵁 try: from wechat_pay import create_wechat_pay_instance wechat_pay = create_wechat_pay_instance() verify_result = wechat_pay.verify_callback(raw_data.decode('utf-8')) if not verify_result['success']: print(f"鉂?寰俊鏀粯鍥炶皟楠岃瘉澶辫触: {verify_result['error']}") return '' callback_data = verify_result['data'] except Exception as e: print(f"鉂?寰俊鏀粯鍥炶皟澶勭悊寮傚父: {e}") # 绠€鍗曡В鏋怷ML锛坒allback锛? callback_data = _parse_xml_callback(raw_data.decode('utf-8')) if not callback_data: return '' # 鑾峰彇鍏抽敭瀛楁 return_code = callback_data.get('return_code') result_code = callback_data.get('result_code') order_no = callback_data.get('out_trade_no') transaction_id = callback_data.get('transaction_id') print(f"馃摝 鍥炶皟鏁版嵁瑙f瀽:") print(f" 杩斿洖鐮? {return_code}") print(f" 缁撴灉鐮? {result_code}") print(f" 璁㈠崟鍙? {order_no}") print(f" 浜ゆ槗鍙? {transaction_id}") if not order_no: return '' # 鏌ユ壘璁㈠崟 order = PaymentOrder.query.filter_by(order_no=order_no).first() if not order: print(f"鉂?璁㈠崟涓嶅瓨鍦? {order_no}") return '' # 澶勭悊鏀粯鎴愬姛 if return_code == 'SUCCESS' and result_code == 'SUCCESS': print(f"馃帀 鏀粯鍥炶皟鎴愬姛: 璁㈠崟 {order_no}") # 妫€鏌ヨ鍗曟槸鍚﹀凡缁忓鐞嗚繃 if order.status == 'paid': print(f"鈩癸笍 璁㈠崟宸插鐞嗚繃: {order_no}") db.session.commit() return '' # 鏇存柊璁㈠崟鐘舵€侊紙鏃犺涔嬪墠鏄粈涔堢姸鎬侊級 old_status = order.status order.mark_as_paid(transaction_id) print(f"馃摑 璁㈠崟鐘舵€佸凡鏇存柊: {old_status} -> paid") # 婵€娲荤敤鎴疯闃? subscription = activate_user_subscription(order.user_id, order.plan_name, order.billing_cycle) if subscription: print(f"鉁?鐢ㄦ埛璁㈤槄宸叉縺娲? 鐢ㄦ埛{order.user_id}, 濂楅{order.plan_name}") else: print(f"鈿狅笍 璁㈤槄婵€娲诲け璐ワ紝浣嗚鍗曞凡鏍囪涓哄凡鏀粯") # 璁板綍浼樻儬鐮佷娇鐢ㄦ儏鍐? if order.promo_code_id: try: # 妫€鏌ユ槸鍚﹀凡缁忚褰曡繃锛堥槻姝㈤噸澶嶏級 existing_usage = PromoCodeUsage.query.filter_by( order_id=order.id ).first() if not existing_usage: # 鍒涘缓浼樻儬鐮佷娇鐢ㄨ褰? usage = PromoCodeUsage( promo_code_id=order.promo_code_id, user_id=order.user_id, order_id=order.id, original_amount=order.original_amount or order.amount, discount_amount=order.discount_amount or 0, final_amount=order.amount ) db.session.add(usage) # 鏇存柊浼樻儬鐮佷娇鐢ㄦ鏁? promo = PromoCode.query.get(order.promo_code_id) if promo: promo.current_uses = (promo.current_uses or 0) + 1 print(f"馃帿 浼樻儬鐮佷娇鐢ㄨ褰曞凡鍒涘缓: {promo.code}, 褰撳墠浣跨敤娆℃暟: {promo.current_uses}") else: print(f"鈩癸笍 浼樻儬鐮佷娇鐢ㄨ褰曞凡瀛樺湪锛岃烦杩?) except Exception as e: print(f"鈿狅笍 璁板綍浼樻儬鐮佷娇鐢ㄥけ璐? {e}") # 涓嶅奖鍝嶄富娴佺▼锛岀户缁墽琛? db.session.commit() # 杩斿洖鎴愬姛鍝嶅簲缁欏井淇? return '' except Exception as e: db.session.rollback() print(f"鉂?寰俊鏀粯鍥炶皟澶勭悊澶辫触: {e}") import traceback app.logger.error(f"鍥炶皟澶勭悊閿欒: {e}", exc_info=True) return '' def _parse_xml_callback(xml_data): """绠€鍗曠殑XML鍥炶皟鏁版嵁瑙f瀽""" try: import xml.etree.ElementTree as ET root = ET.fromstring(xml_data) result = {} for child in root: result[child.tag] = child.text return result except Exception as e: print(f"XML瑙f瀽澶辫触: {e}") return None # ======================================== # 鏀粯瀹濇敮浠樼浉鍏矨PI # ======================================== @app.route('/api/payment/alipay/create-order', methods=['POST']) def create_alipay_order(): """ 鍒涘缓鏀粯瀹濇敮浠樿鍗? Request Body: { "plan_name": "pro", "billing_cycle": "yearly", "promo_code": "WELCOME2025", // 鍙€? "is_mobile": true // 鍙€夛紝鏄惁涓烘墜鏈虹锛堣嚜鍔ㄤ娇鐢?WAP 鏀粯锛? } """ try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() plan_name = data.get('plan_name') billing_cycle = data.get('billing_cycle') promo_code = (data.get('promo_code') or '').strip() or None # 鍓嶇浼犲叆鐨勮澶囩被鍨嬶紝鐢ㄤ簬鍐冲畾浣跨敤 page 鏀粯杩樻槸 wap 鏀粯 is_mobile = data.get('is_mobile', False) if not plan_name or not billing_cycle: return jsonify({'success': False, 'error': '鍙傛暟涓嶅畬鏁?}), 400 # 浣跨敤绠€鍖栦环鏍艰绠? price_result = calculate_subscription_price_simple(session['user_id'], plan_name, billing_cycle, promo_code) if 'error' in price_result: return jsonify({'success': False, 'error': price_result['error']}), 400 amount = price_result['final_amount'] subscription_type = price_result.get('subscription_type', 'new') # 妫€鏌ユ槸鍚︿负鍏嶈垂鍗囩骇 if amount <= 0 and price_result.get('is_upgrade'): return jsonify({ 'success': False, 'error': '褰撳墠鍓╀綑浠峰€煎彲鐩存帴鍏嶈垂鍗囩骇锛岃浣跨敤鍏嶈垂鍗囩骇鍔熻兘', 'should_free_upgrade': True, 'price_info': price_result }), 400 # 鍒涘缓璁㈠崟 try: original_amount = price_result.get('original_amount', amount) discount_amount = price_result.get('discount_amount', 0) order = PaymentOrder( user_id=session['user_id'], plan_name=plan_name, billing_cycle=billing_cycle, amount=amount, original_amount=original_amount, discount_amount=discount_amount ) # 璁剧疆鏀粯鏂瑰紡涓烘敮浠樺疂 order.payment_method = 'alipay' order.remark = f"{subscription_type}璁㈤槄" if subscription_type == 'renew' else "鏂拌喘璁㈤槄" # 鍏宠仈浼樻儬鐮? if promo_code and price_result.get('promo_code'): promo_obj = PromoCode.query.filter_by(code=promo_code.upper()).first() if promo_obj: order.promo_code_id = promo_obj.id print(f"馃摝 璁㈠崟鍏宠仈浼樻儬鐮? {promo_obj.code} (ID: {promo_obj.id})") db.session.add(order) db.session.commit() except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': f'璁㈠崟鍒涘缓澶辫触: {str(e)}'}), 500 # 璋冪敤鏀粯瀹濇敮浠楢PI锛堜娇鐢?subprocess 缁曡繃 eventlet DNS 闂锛? try: import subprocess script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'alipay_pay_worker.py') # 鍏堟鏌ラ厤缃? check_result = subprocess.run( [sys.executable, script_path, 'check'], capture_output=True, text=True, timeout=10 ) if check_result.returncode != 0: check_data = json.loads(check_result.stdout) if check_result.stdout else {} error_msg = check_data.get('error', check_data.get('message', '鏀粯瀹濋厤缃敊璇?)) order.remark = f"鏀粯瀹濋厤缃敊璇?- {error_msg}" db.session.commit() return jsonify({ 'success': False, 'error': f'鏀粯瀹濇敮浠樻殏涓嶅彲鐢? {error_msg}' }), 500 # 鍒涘缓鏀粯瀹濊鍗? plan_display_name = f"{plan_name.upper()}鐗堟湰-{billing_cycle}" subject = f"VFr-{plan_display_name}" body = f"浠峰€煎墠娌胯闃呮湇鍔?{plan_display_name}" # 閲戦鏍煎紡鍖栦负涓や綅灏忔暟锛堟敮浠樺疂瑕佹眰锛? amount_str = f"{float(amount):.2f}" # 鏍规嵁璁惧绫诲瀷閫夋嫨鏀粯鏂瑰紡锛歸ap=鎵嬫満缃戠珯鏀粯锛宲age=鐢佃剳缃戠珯鏀粯 pay_type = 'wap' if is_mobile else 'page' print(f"[鏀粯瀹漖 璁惧绫诲瀷: {'鎵嬫満' if is_mobile else '鐢佃剳'}, 鏀粯鏂瑰紡: {pay_type}") create_result = subprocess.run( [sys.executable, script_path, 'create', order.order_no, amount_str, subject, body, pay_type], capture_output=True, text=True, timeout=60 ) print(f"[鏀粯瀹漖 鍒涘缓璁㈠崟杩斿洖: {create_result.stdout}") if create_result.stderr: print(f"[鏀粯瀹漖 閿欒杈撳嚭: {create_result.stderr}") alipay_result = json.loads(create_result.stdout) if create_result.stdout else {'success': False, 'error': '鏃犺繑鍥?} if alipay_result.get('success'): # 鑾峰彇鏀粯瀹濊繑鍥炵殑鏀粯閾炬帴 pay_url = alipay_result['pay_url'] order.pay_url = pay_url order.remark = f"鏀粯瀹濇敮浠?- 璁㈠崟宸插垱寤? db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟鍒涘缓鎴愬姛' }) else: order.remark = f"鏀粯瀹濇敮浠樺け璐? {alipay_result.get('error')}" db.session.commit() return jsonify({ 'success': False, 'error': f"鏀粯瀹濊鍗曞垱寤哄け璐? {alipay_result.get('error')}" }), 500 except subprocess.TimeoutExpired: order.remark = "鏀粯瀹濇敮浠樿秴鏃? db.session.commit() return jsonify({'success': False, 'error': '鏀粯瀹濇敮浠樿秴鏃?}), 500 except json.JSONDecodeError as e: order.remark = f"鏀粯瀹濊繑鍥炶В鏋愬け璐? {str(e)}" db.session.commit() return jsonify({'success': False, 'error': '鏀粯瀹濊繑鍥炴暟鎹紓甯?}), 500 except Exception as e: import traceback print(f"[鏀粯瀹漖 Exception: {e}") traceback.print_exc() order.remark = f"鏀粯寮傚父: {str(e)}" db.session.commit() return jsonify({'success': False, 'error': '鏀粯寮傚父'}), 500 except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': '鍒涘缓璁㈠崟澶辫触'}), 500 @app.route('/api/payment/alipay/callback', methods=['POST']) def alipay_payment_callback(): """鏀粯瀹濆紓姝ュ洖璋冨鐞?"" try: # 鑾峰彇POST鍙傛暟 callback_params = request.form.to_dict() print(f"馃摜 鏀跺埌鏀粯瀹濇敮浠樺洖璋? {callback_params}") # 楠岃瘉鍥炶皟鏁版嵁 try: from alipay_pay import create_alipay_instance alipay = create_alipay_instance() verify_result = alipay.verify_callback(callback_params.copy()) if not verify_result['success']: print(f"鉂?鏀粯瀹濆洖璋冪鍚嶉獙璇佸け璐? {verify_result['error']}") return 'fail' callback_data = verify_result['data'] except Exception as e: print(f"鉂?鏀粯瀹濆洖璋冨鐞嗗紓甯? {e}") return 'fail' # 鑾峰彇鍏抽敭瀛楁 trade_status = callback_data.get('trade_status') out_trade_no = callback_data.get('out_trade_no') # 鍟嗘埛璁㈠崟鍙? trade_no = callback_data.get('trade_no') # 鏀粯瀹濅氦鏄撳彿 total_amount = callback_data.get('total_amount') print(f"馃摝 鏀粯瀹濆洖璋冩暟鎹В鏋?") print(f" 浜ゆ槗鐘舵€? {trade_status}") print(f" 璁㈠崟鍙? {out_trade_no}") print(f" 浜ゆ槗鍙? {trade_no}") print(f" 閲戦: {total_amount}") if not out_trade_no: print("鉂?缂哄皯璁㈠崟鍙?) return 'fail' # 鏌ユ壘璁㈠崟 order = PaymentOrder.query.filter_by(order_no=out_trade_no).first() if not order: print(f"鉂?璁㈠崟涓嶅瓨鍦? {out_trade_no}") return 'fail' # 鍙鐞嗕氦鏄撴垚鍔熺殑鍥炶皟 if trade_status in ['TRADE_SUCCESS', 'TRADE_FINISHED']: print(f"馃帀 鏀粯瀹濇敮浠樻垚鍔? 璁㈠崟 {out_trade_no}") # 妫€鏌ヨ鍗曟槸鍚﹀凡缁忓鐞嗚繃 if order.status == 'paid': print(f"鈩癸笍 璁㈠崟宸插鐞嗚繃: {out_trade_no}") return 'success' # 鏇存柊璁㈠崟鐘舵€? old_status = order.status order.mark_as_paid(trade_no, 'alipay') print(f"馃摑 璁㈠崟鐘舵€佸凡鏇存柊: {old_status} -> paid") # 婵€娲荤敤鎴疯闃? subscription = activate_user_subscription(order.user_id, order.plan_name, order.billing_cycle) if subscription: print(f"鉁?鐢ㄦ埛璁㈤槄宸叉縺娲? 鐢ㄦ埛{order.user_id}, 濂楅{order.plan_name}") else: print(f"鈿狅笍 璁㈤槄婵€娲诲け璐ワ紝浣嗚鍗曞凡鏍囪涓哄凡鏀粯") # 璁板綍浼樻儬鐮佷娇鐢ㄦ儏鍐? if order.promo_code_id: try: existing_usage = PromoCodeUsage.query.filter_by(order_id=order.id).first() if not existing_usage: usage = PromoCodeUsage( promo_code_id=order.promo_code_id, user_id=order.user_id, order_id=order.id, original_amount=order.original_amount or order.amount, discount_amount=order.discount_amount or 0, final_amount=order.amount ) db.session.add(usage) promo = PromoCode.query.get(order.promo_code_id) if promo: promo.current_uses = (promo.current_uses or 0) + 1 print(f"馃帿 浼樻儬鐮佷娇鐢ㄨ褰曞凡鍒涘缓: {promo.code}, 褰撳墠浣跨敤娆℃暟: {promo.current_uses}") else: print(f"鈩癸笍 浼樻儬鐮佷娇鐢ㄨ褰曞凡瀛樺湪锛岃烦杩?) except Exception as e: print(f"鈿狅笍 璁板綍浼樻儬鐮佷娇鐢ㄥけ璐? {e}") db.session.commit() elif trade_status == 'TRADE_CLOSED': # 浜ゆ槗鍏抽棴 if order.status not in ['paid', 'cancelled']: order.status = 'cancelled' db.session.commit() print(f"馃摑 璁㈠崟宸插叧闂? {out_trade_no}") # 杩斿洖鎴愬姛鍝嶅簲缁欐敮浠樺疂 return 'success' except Exception as e: db.session.rollback() print(f"鉂?鏀粯瀹濆洖璋冨鐞嗗け璐? {e}") import traceback app.logger.error(f"鏀粯瀹濆洖璋冨鐞嗛敊璇? {e}", exc_info=True) return 'fail' @app.route('/api/payment/alipay/return', methods=['GET']) def alipay_payment_return(): """鏀粯瀹濆悓姝ヨ繑鍥炲鐞嗭紙鐢ㄦ埛鏀粯鍚庤烦杞洖鏉ワ級""" try: # 鑾峰彇GET鍙傛暟 return_params = request.args.to_dict() print(f"馃摜 鏀粯瀹濆悓姝ヨ繑鍥? {return_params}") out_trade_no = return_params.get('out_trade_no') if out_trade_no: # 閲嶅畾鍚戝埌鍓嶇鏀粯缁撴灉椤甸潰 return redirect(f'{FRONTEND_URL}/pricing?payment_return=alipay&order_no={out_trade_no}') else: return redirect(f'{FRONTEND_URL}/pricing?payment_return=alipay&error=missing_order') except Exception as e: print(f"鉂?鏀粯瀹濆悓姝ヨ繑鍥炲鐞嗗け璐? {e}") return redirect(f'{FRONTEND_URL}/pricing?payment_return=alipay&error=exception') @app.route('/api/payment/alipay/order//status', methods=['GET']) def check_alipay_order_status(order_id): """鏌ヨ鏀粯瀹濊鍗曟敮浠樼姸鎬?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 # 鏌ユ壘璁㈠崟 order = PaymentOrder.query.filter_by( id=order_id, user_id=session['user_id'] ).first() if not order: return jsonify({'success': False, 'error': '璁㈠崟涓嶅瓨鍦?}), 404 # 濡傛灉璁㈠崟宸茬粡鏄凡鏀粯鐘舵€侊紝鐩存帴杩斿洖 if order.status == 'paid': return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟宸叉敮浠?, 'payment_success': True }) # 濡傛灉璁㈠崟杩囨湡锛屾爣璁颁负杩囨湡 if order.is_expired(): order.status = 'expired' db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '璁㈠崟宸茶繃鏈? }) # 璋冪敤鏀粯瀹滱PI鏌ヨ鐪熷疄鐘舵€? try: import subprocess script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'alipay_pay_worker.py') query_proc = subprocess.run( [sys.executable, script_path, 'query', order.order_no], capture_output=True, text=True, timeout=30 ) query_result = json.loads(query_proc.stdout) if query_proc.stdout else {'success': False, 'error': '鏃犺繑鍥?} if query_result.get('success'): trade_state = query_result.get('trade_state') trade_no = query_result.get('trade_no') if trade_state == 'SUCCESS': # 鏀粯鎴愬姛锛屾洿鏂拌鍗曠姸鎬? order.mark_as_paid(trade_no, 'alipay') # 婵€娲荤敤鎴疯闃? activate_user_subscription(order.user_id, order.plan_name, order.billing_cycle) # 璁板綍浼樻儬鐮佷娇鐢ㄦ儏鍐? if order.promo_code_id: try: existing_usage = PromoCodeUsage.query.filter_by(order_id=order.id).first() if not existing_usage: usage = PromoCodeUsage( promo_code_id=order.promo_code_id, user_id=order.user_id, order_id=order.id, original_amount=order.original_amount or order.amount, discount_amount=order.discount_amount or 0, final_amount=order.amount ) db.session.add(usage) promo = PromoCode.query.get(order.promo_code_id) if promo: promo.current_uses = (promo.current_uses or 0) + 1 print(f"馃帿 浼樻儬鐮佷娇鐢ㄨ褰曞凡鍒涘缓: {promo.code}") except Exception as e: print(f"鈿狅笍 璁板綍浼樻儬鐮佷娇鐢ㄥけ璐? {e}") db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '鏀粯鎴愬姛锛佽闃呭凡婵€娲?, 'payment_success': True }) elif trade_state in ['NOTPAY', 'WAIT_BUYER_PAY']: # 鏈敮浠樻垨绛夊緟鏀粯 return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '绛夊緟鏀粯...', 'payment_success': False }) elif trade_state in ['CLOSED', 'TRADE_CLOSED']: # 浜ゆ槗鍏抽棴 order.status = 'cancelled' db.session.commit() return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '浜ゆ槗宸插叧闂?, 'payment_success': False }) else: # 鍏朵粬鐘舵€? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': f'褰撳墠鐘舵€? {trade_state}', 'payment_success': False }) else: # 鏀粯瀹濇煡璇㈠け璐ワ紝杩斿洖褰撳墠鐘舵€? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': f"鏌ヨ澶辫触: {query_result.get('error')}", 'payment_success': False }) except Exception as e: # 鏌ヨ澶辫触锛岃繑鍥炲綋鍓嶈鍗曠姸鎬? return jsonify({ 'success': True, 'data': order.to_dict(), 'message': '鏃犳硶鏌ヨ鏀粯鐘舵€侊紝璇风◢鍚庨噸璇?, 'payment_success': False }) except Exception as e: return jsonify({'success': False, 'error': '鏌ヨ澶辫触'}), 500 @app.route('/api/payment/alipay/order-by-no//status', methods=['GET']) def check_alipay_order_status_by_no(order_no): """閫氳繃璁㈠崟鍙锋煡璇㈡敮浠樺疂璁㈠崟鏀粯鐘舵€侊紙鐢ㄤ簬鎵嬫満绔敮浠樿繑鍥烇級""" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 # 閫氳繃璁㈠崟鍙锋煡鎵捐鍗? order = PaymentOrder.query.filter_by( order_no=order_no, user_id=session['user_id'] ).first() if not order: return jsonify({'success': False, 'error': '璁㈠崟涓嶅瓨鍦?}), 404 # 澶嶇敤鐜版湁鐨勭姸鎬佹鏌ラ€昏緫 return check_alipay_order_status(str(order.id)) except Exception as e: return jsonify({'success': False, 'error': '鏌ヨ澶辫触'}), 500 @app.route('/api/auth/session', methods=['GET']) def get_session_info(): """鑾峰彇褰撳墠鐧诲綍鐢ㄦ埛淇℃伅""" if 'user_id' in session: user = User.query.get(session['user_id']) if user: # 鑾峰彇鐢ㄦ埛璁㈤槄淇℃伅 subscription_info = get_user_subscription_safe(user.id).to_dict() return jsonify({ 'success': True, 'isAuthenticated': True, 'user': { 'id': user.id, 'username': user.username, 'nickname': user.nickname or user.username, 'email': user.email, 'phone': user.phone, 'phone_confirmed': bool(user.phone_confirmed), 'email_confirmed': bool(user.email_confirmed) if hasattr(user, 'email_confirmed') else None, 'avatar_url': user.avatar_url, 'has_wechat': bool(user.wechat_open_id), 'created_at': user.created_at.isoformat() if user.created_at else None, 'last_seen': user.last_seen.isoformat() if user.last_seen else None, # 灏嗚闃呭瓧娈垫槧灏勫埌鍓嶇鏈熸湜鐨勫瓧娈靛悕 'subscription_type': subscription_info['type'], 'subscription_status': subscription_info['status'], 'subscription_end_date': subscription_info['end_date'], 'is_subscription_active': subscription_info['is_active'], 'subscription_days_left': subscription_info['days_left'] } }) return jsonify({ 'success': True, 'isAuthenticated': False, 'user': None }) def generate_verification_code(): """鐢熸垚6浣嶆暟瀛楅獙璇佺爜""" return ''.join(random.choices(string.digits, k=6)) @app.route('/api/auth/login', methods=['POST']) def login(): """浼犵粺鐧诲綍 - 浣跨敤Session""" try: username = request.form.get('username') email = request.form.get('email') phone = request.form.get('phone') password = request.form.get('password') # 楠岃瘉蹇呰鍙傛暟 if not password: return jsonify({'success': False, 'error': '瀵嗙爜涓嶈兘涓虹┖'}), 400 # 鏍规嵁鎻愪緵鐨勪俊鎭煡鎵剧敤鎴? user = None if username: # 妫€鏌sername鏄惁涓烘墜鏈哄彿鏍煎紡 if re.match(r'^1[3-9]\d{9}$', username): # 濡傛灉username鏄墜鏈哄彿鏍煎紡锛屽厛鎸夋墜鏈哄彿鏌ユ壘 user = User.query.filter_by(phone=username).first() if not user: # 濡傛灉娌℃壘鍒帮紝鍐嶆寜鐢ㄦ埛鍚嶆煡鎵? user = User.find_by_login_info(username) else: # 涓嶆槸鎵嬫満鍙锋牸寮忥紝鎸夌敤鎴峰悕鏌ユ壘 user = User.find_by_login_info(username) elif email: user = User.query.filter_by(email=email).first() elif phone: user = User.query.filter_by(phone=phone).first() else: return jsonify({'success': False, 'error': '璇锋彁渚涚敤鎴峰悕銆侀偖绠辨垨鎵嬫満鍙?}), 400 if not user: return jsonify({'success': False, 'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 # 灏濊瘯瀵嗙爜楠岃瘉 password_valid = user.check_password(password) if not password_valid: # 杩樺彲浠ュ皾璇曠洿鎺ラ獙璇? if user.password_hash: from werkzeug.security import check_password_hash direct_check = check_password_hash(user.password_hash, password) return jsonify({'success': False, 'error': '瀵嗙爜閿欒'}), 401 # 璁剧疆session session.permanent = True # 浣跨敤姘镐箙session session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True # Flask-Login 鐧诲綍 login_user(user, remember=True) # 鏇存柊鏈€鍚庣櫥褰曟椂闂? user.update_last_seen() return jsonify({ 'success': True, 'message': '鐧诲綍鎴愬姛', 'user': { 'id': user.id, 'username': user.username, 'nickname': user.nickname or user.username, 'email': user.email, 'phone': user.phone, 'avatar_url': user.avatar_url, 'has_wechat': bool(user.wechat_open_id) } }) except Exception as e: import traceback app.logger.error(f"鍥炶皟澶勭悊閿欒: {e}", exc_info=True) return jsonify({'success': False, 'error': '鐧诲綍澶勭悊澶辫触锛岃閲嶈瘯'}), 500 # 娣诲姞OPTIONS璇锋眰澶勭悊 @app.before_request def handle_preflight(): if request.method == "OPTIONS": response = make_response() response.headers.add("Access-Control-Allow-Origin", "*") response.headers.add('Access-Control-Allow-Headers', "*") response.headers.add('Access-Control-Allow-Methods', "*") return response # 淇敼瀵嗙爜API @app.route('/api/account/change-password', methods=['POST']) @login_required def change_password(): """淇敼褰撳墠鐢ㄦ埛瀵嗙爜""" try: data = request.get_json() or request.form current_password = data.get('currentPassword') or data.get('current_password') new_password = data.get('newPassword') or data.get('new_password') is_first_set = data.get('isFirstSet', False) # 鏄惁涓洪娆¤缃瘑鐮? if not new_password: return jsonify({'success': False, 'error': '鏂板瘑鐮佷笉鑳戒负绌?}), 400 if len(new_password) < 6: return jsonify({'success': False, 'error': '鏂板瘑鐮佽嚦灏戦渶瑕?涓瓧绗?}), 400 # 鑾峰彇褰撳墠鐢ㄦ埛 user = current_user if not user: return jsonify({'success': False, 'error': '鐢ㄦ埛鏈櫥褰?}), 401 # 妫€鏌ユ槸鍚︿负寰俊鐢ㄦ埛涓旈娆¤缃瘑鐮? is_wechat_user = bool(user.wechat_open_id) # 濡傛灉鏄井淇$敤鎴烽娆¤缃瘑鐮侊紝鎴栬€呮槑纭爣璁颁负棣栨璁剧疆锛屽垯璺宠繃褰撳墠瀵嗙爜楠岃瘉 if is_first_set or (is_wechat_user and not current_password): pass # 璺宠繃褰撳墠瀵嗙爜楠岃瘉 else: # 鏅€氱敤鎴锋垨闈為娆¤缃紝闇€瑕侀獙璇佸綋鍓嶅瘑鐮? if not current_password: return jsonify({'success': False, 'error': '璇疯緭鍏ュ綋鍓嶅瘑鐮?}), 400 if not user.check_password(current_password): return jsonify({'success': False, 'error': '褰撳墠瀵嗙爜閿欒'}), 400 # 璁剧疆鏂板瘑鐮? user.set_password(new_password) db.session.commit() return jsonify({ 'success': True, 'message': '瀵嗙爜璁剧疆鎴愬姛' if (is_first_set or is_wechat_user) else '瀵嗙爜淇敼鎴愬姛' }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 # 妫€鏌ョ敤鎴峰瘑鐮佺姸鎬丄PI @app.route('/api/account/password-status', methods=['GET']) @login_required def get_password_status(): """鑾峰彇褰撳墠鐢ㄦ埛鐨勫瘑鐮佺姸鎬佷俊鎭?"" try: user = current_user if not user: return jsonify({'success': False, 'error': '鐢ㄦ埛鏈櫥褰?}), 401 is_wechat_user = bool(user.wechat_open_id) return jsonify({ 'success': True, 'data': { 'isWechatUser': is_wechat_user, 'hasPassword': bool(user.password_hash), 'needsFirstTimeSetup': is_wechat_user # 寰俊鐢ㄦ埛闇€瑕侀娆¤缃? } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 # 妫€鏌ョ敤鎴蜂俊鎭畬鏁存€PI @app.route('/api/account/profile-completeness', methods=['GET']) @login_required def get_profile_completeness(): try: user = current_user if not user: return jsonify({'success': False, 'error': '鐢ㄦ埛鏈櫥褰?}), 401 is_wechat_user = bool(user.wechat_open_id) # 妫€鏌ュ悇椤逛俊鎭? completeness = { 'hasPassword': bool(user.password_hash), 'hasPhone': bool(user.phone), 'hasEmail': bool(user.email and '@' in user.email and not user.email.endswith('@valuefrontier.temp')), 'isWechatUser': is_wechat_user } # 璁$畻瀹屾暣搴? total_items = 3 completed_items = sum([completeness['hasPassword'], completeness['hasPhone'], completeness['hasEmail']]) completeness_percentage = int((completed_items / total_items) * 100) # 鏅鸿兘鍒ゆ柇鏄惁闇€瑕佹彁閱? needs_attention = False missing_items = [] # 鍙湪鐢ㄦ埛棣栨鐧诲綍鎴栨渶杩戠櫥褰曟椂鎻愰啋 if is_wechat_user: # 妫€鏌ョ敤鎴锋槸鍚︽槸鏂扮敤鎴凤紙娉ㄥ唽7澶╁唴锛? is_new_user = (datetime.now() - user.created_at).days < 7 # 妫€鏌ユ槸鍚︽渶杩戞病鏈夋彁閱掕繃锛堜娇鐢╯ession璁板綍锛? last_reminder = session.get('last_completeness_reminder') should_remind = False if not last_reminder: should_remind = True else: # 姣?澶╂渶澶氭彁閱掍竴娆? days_since_reminder = (datetime.now() - datetime.fromisoformat(last_reminder)).days should_remind = days_since_reminder >= 7 # 鍙鏂扮敤鎴锋垨闀挎椂闂存湭瀹屽杽鐨勭敤鎴锋彁閱? if (is_new_user or completeness_percentage < 50) and should_remind: needs_attention = True if not completeness['hasPassword']: missing_items.append('鐧诲綍瀵嗙爜') if not completeness['hasPhone']: missing_items.append('鎵嬫満鍙?) if not completeness['hasEmail']: missing_items.append('閭') # 璁板綍鏈鎻愰啋鏃堕棿 session['last_completeness_reminder'] = datetime.now().isoformat() return jsonify({ 'success': True, 'data': { 'completeness': completeness, 'completenessPercentage': completeness_percentage, 'needsAttention': needs_attention, 'missingItems': missing_items, 'isComplete': completed_items == total_items, 'showReminder': needs_attention # 鍓嶇浣跨敤杩欎釜瀛楁鍐冲畾鏄惁鏄剧ず鎻愰啋 } }) except Exception as e: print(f"鑾峰彇璧勬枡瀹屾暣鎬ч敊璇? {e}") return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/auth/logout', methods=['POST']) def logout(): """鐧诲嚭 - 娓呴櫎Session""" logout_user() # Flask-Login 鐧诲嚭 session.clear() return jsonify({'success': True, 'message': '宸茬櫥鍑?}) @app.route('/api/auth/send-verification-code', methods=['POST']) def send_verification_code(): """鍙戦€侀獙璇佺爜锛堟敮鎸佹墜鏈哄彿鍜岄偖绠憋級""" try: data = request.get_json() credential = data.get('credential') # 鎵嬫満鍙锋垨閭 code_type = data.get('type') # 'phone' 鎴?'email' purpose = data.get('purpose', 'login') # 'login' 鎴?'register' if not credential or not code_type: return jsonify({'success': False, 'error': '缂哄皯蹇呰鍙傛暟'}), 400 # 娓呯悊鏍煎紡瀛楃锛堢┖鏍笺€佹í绾裤€佹嫭鍙风瓑锛? if code_type == 'phone': # 绉婚櫎鎵嬫満鍙蜂腑鐨勭┖鏍笺€佹í绾裤€佹嫭鍙枫€佸姞鍙风瓑鏍煎紡瀛楃 credential = re.sub(r'[\s\-\(\)\+]', '', credential) print(f"馃摫 娓呯悊鍚庣殑鎵嬫満鍙? {credential}") elif code_type == 'email': # 閭鍙Щ闄ょ┖鏍? credential = credential.strip() # 鐢熸垚楠岃瘉鐮? verification_code = generate_verification_code() # 瀛樺偍楠岃瘉鐮佸埌session锛堝疄闄呯敓浜х幆澧冨缓璁娇鐢≧edis锛? session_key = f'verification_code_{code_type}_{credential}_{purpose}' session[session_key] = { 'code': verification_code, 'timestamp': time.time(), 'attempts': 0 } if code_type == 'phone': # 鎵嬫満鍙烽獙璇佺爜鍙戦€? if not re.match(r'^1[3-9]\d{9}$', credential): return jsonify({'success': False, 'error': '鎵嬫満鍙锋牸寮忎笉姝g‘'}), 400 # 鍙戦€佺湡瀹炵煭淇¢獙璇佺爜 if send_sms_code(credential, verification_code, SMS_TEMPLATE_LOGIN): print(f"[鐭俊宸插彂閫乚 楠岃瘉鐮佸埌 {credential}: {verification_code}") else: return jsonify({'success': False, 'error': '鐭俊鍙戦€佸け璐ワ紝璇风◢鍚庨噸璇?}), 500 elif code_type == 'email': # 閭楠岃瘉鐮佸彂閫? if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', credential): return jsonify({'success': False, 'error': '閭鏍煎紡涓嶆纭?}), 400 # 鍙戦€佺湡瀹為偖浠堕獙璇佺爜 if send_email_code(credential, verification_code): print(f"[閭欢宸插彂閫乚 楠岃瘉鐮佸埌 {credential}: {verification_code}") else: return jsonify({'success': False, 'error': '閭欢鍙戦€佸け璐ワ紝璇风◢鍚庨噸璇?}), 500 else: return jsonify({'success': False, 'error': '涓嶆敮鎸佺殑楠岃瘉鐮佺被鍨?}), 400 return jsonify({ 'success': True, 'message': f'楠岃瘉鐮佸凡鍙戦€佸埌鎮ㄧ殑{code_type}' }) except Exception as e: print(f"鍙戦€侀獙璇佺爜閿欒: {e}") return jsonify({'success': False, 'error': '鍙戦€侀獙璇佺爜澶辫触'}), 500 @app.route('/api/auth/login-with-code', methods=['POST']) def login_with_verification_code(): """浣跨敤楠岃瘉鐮佺櫥褰?娉ㄥ唽锛堣嚜鍔ㄦ敞鍐岋級""" try: data = request.get_json() credential = data.get('credential') # 鎵嬫満鍙锋垨閭 verification_code = data.get('verification_code') login_type = data.get('login_type') # 'phone' 鎴?'email' if not credential or not verification_code or not login_type: return jsonify({'success': False, 'error': '缂哄皯蹇呰鍙傛暟'}), 400 # 娓呯悊鏍煎紡瀛楃锛堢┖鏍笺€佹í绾裤€佹嫭鍙风瓑锛? if login_type == 'phone': # 绉婚櫎鎵嬫満鍙蜂腑鐨勭┖鏍笺€佹í绾裤€佹嫭鍙枫€佸姞鍙风瓑鏍煎紡瀛楃 original_credential = credential credential = re.sub(r'[\s\-\(\)\+]', '', credential) if original_credential != credential: print(f"馃摫 鐧诲綍鏃舵竻鐞嗘墜鏈哄彿: {original_credential} -> {credential}") elif login_type == 'email': # 閭鍙Щ闄ゅ墠鍚庣┖鏍? credential = credential.strip() # 妫€鏌ラ獙璇佺爜 session_key = f'verification_code_{login_type}_{credential}_login' stored_code_info = session.get(session_key) if not stored_code_info: return jsonify({'success': False, 'error': '楠岃瘉鐮佸凡杩囨湡鎴栦笉瀛樺湪'}), 400 # 妫€鏌ラ獙璇佺爜鏄惁杩囨湡锛?鍒嗛挓锛? if time.time() - stored_code_info['timestamp'] > 300: session.pop(session_key, None) return jsonify({'success': False, 'error': '楠岃瘉鐮佸凡杩囨湡'}), 400 # 妫€鏌ュ皾璇曟鏁? if stored_code_info['attempts'] >= 3: session.pop(session_key, None) return jsonify({'success': False, 'error': '楠岃瘉鐮侀敊璇鏁拌繃澶?}), 400 # 楠岃瘉鐮侀敊璇? if stored_code_info['code'] != verification_code: stored_code_info['attempts'] += 1 session[session_key] = stored_code_info return jsonify({'success': False, 'error': '楠岃瘉鐮侀敊璇?}), 400 # 楠岃瘉鐮佹纭紝鏌ユ壘鐢ㄦ埛 user = None is_new_user = False if login_type == 'phone': user = User.query.filter_by(phone=credential).first() if not user: # 鑷姩娉ㄥ唽鏂扮敤鎴? is_new_user = True # 鐢熸垚鍞竴鐢ㄦ埛鍚? base_username = f"user_{credential}" username = base_username counter = 1 while User.query.filter_by(username=username).first(): username = f"{base_username}_{counter}" counter += 1 # 鍒涘缓鏂扮敤鎴? user = User(username=username, phone=credential) user.phone_confirmed = True user.email = f"{username}@valuefrontier.temp" # 涓存椂閭 db.session.add(user) db.session.commit() elif login_type == 'email': user = User.query.filter_by(email=credential).first() if not user: # 鑷姩娉ㄥ唽鏂扮敤鎴? is_new_user = True # 浠庨偖绠辩敓鎴愮敤鎴峰悕 email_prefix = credential.split('@')[0] base_username = f"user_{email_prefix}" username = base_username counter = 1 while User.query.filter_by(username=username).first(): username = f"{base_username}_{counter}" counter += 1 # 濡傛灉鐢ㄦ埛涓嶅瓨鍦紝鑷姩鍒涘缓鏂扮敤鎴? if not user: try: # 鐢熸垚鐢ㄦ埛鍚? if login_type == 'phone': # 浣跨敤鎵嬫満鍙风敓鎴愮敤鎴峰悕 base_username = f"鐢ㄦ埛{credential[-4:]}" elif login_type == 'email': # 浣跨敤閭鍓嶇紑鐢熸垚鐢ㄦ埛鍚? base_username = credential.split('@')[0] else: base_username = "鏂扮敤鎴? # 纭繚鐢ㄦ埛鍚嶅敮涓€ username = base_username counter = 1 while User.is_username_taken(username): username = f"{base_username}_{counter}" counter += 1 # 鍒涘缓鏂扮敤鎴? user = User(username=username) # 璁剧疆鎵嬫満鍙锋垨閭 if login_type == 'phone': user.phone = credential elif login_type == 'email': user.email = credential # 璁剧疆榛樿瀵嗙爜锛堜娇鐢ㄩ殢鏈哄瘑鐮侊紝鐢ㄦ埛鍚庣画鍙互淇敼锛? user.set_password(uuid.uuid4().hex) user.status = 'active' user.nickname = username db.session.add(user) db.session.commit() is_new_user = True print(f"鉁?鑷姩鍒涘缓鏂扮敤鎴? {username}, {login_type}: {credential}") except Exception as e: print(f"鉂?鍒涘缓鐢ㄦ埛澶辫触: {e}") db.session.rollback() return jsonify({'success': False, 'error': '鍒涘缓鐢ㄦ埛澶辫触'}), 500 # 娓呴櫎楠岃瘉鐮? session.pop(session_key, None) # 璁剧疆session session.permanent = True session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True # Flask-Login 鐧诲綍 login_user(user, remember=True) # 鏇存柊鏈€鍚庣櫥褰曟椂闂? user.update_last_seen() # 鏍规嵁鏄惁涓烘柊鐢ㄦ埛杩斿洖涓嶅悓鐨勬秷鎭? message = '娉ㄥ唽鎴愬姛锛屾杩庡姞鍏ワ紒' if is_new_user else '鐧诲綍鎴愬姛' return jsonify({ 'success': True, 'message': message, 'is_new_user': is_new_user, 'user': { 'id': user.id, 'username': user.username, 'nickname': user.nickname or user.username, 'email': user.email, 'phone': user.phone, 'avatar_url': user.avatar_url, 'has_wechat': bool(user.wechat_open_id) } }) except Exception as e: print(f"楠岃瘉鐮佺櫥褰曢敊璇? {e}") db.session.rollback() return jsonify({'success': False, 'error': '鐧诲綍澶辫触'}), 500 @app.route('/api/auth/register', methods=['POST']) def register(): """鐢ㄦ埛娉ㄥ唽 - 浣跨敤Session""" username = request.form.get('username') email = request.form.get('email') password = request.form.get('password') # 楠岃瘉杈撳叆 if not all([username, email, password]): return jsonify({'success': False, 'error': '鎵€鏈夊瓧娈甸兘鏄繀濉殑'}), 400 # 妫€鏌ョ敤鎴峰悕鍜岄偖绠辨槸鍚﹀凡瀛樺湪 if User.is_username_taken(username): return jsonify({'success': False, 'error': '鐢ㄦ埛鍚嶅凡瀛樺湪'}), 400 if User.is_email_taken(email): return jsonify({'success': False, 'error': '閭宸茶浣跨敤'}), 400 try: # 鍒涘缓鏂扮敤鎴? user = User(username=username, email=email) user.set_password(password) user.email_confirmed = True # 鏆傛椂榛樿宸茬‘璁? db.session.add(user) db.session.flush() # 鑾峰彇 user.id # 鑷姩鍒涘缓绉垎璐︽埛锛屽垵濮?0000绉垎 credit_account = UserCreditAccount( user_id=user.id, balance=10000, frozen=0 ) db.session.add(credit_account) db.session.commit() # 鑷姩鐧诲綍 session.permanent = True session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True # Flask-Login 鐧诲綍 login_user(user, remember=True) return jsonify({ 'success': True, 'message': '娉ㄥ唽鎴愬姛', 'user': { 'id': user.id, 'username': user.username, 'nickname': user.nickname or user.username, 'email': user.email } }), 201 except Exception as e: db.session.rollback() print(f"楠岃瘉鐮佺櫥褰?娉ㄥ唽閿欒: {e}") return jsonify({'success': False, 'error': '鐧诲綍澶辫触'}), 500 def send_sms_code(phone, code, template_id): """鍙戦€佺煭淇¢獙璇佺爜锛堜娇鐢?subprocess 缁曡繃 eventlet DNS 闂锛?"" import subprocess import os try: # 鑾峰彇鑴氭湰璺緞 script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sms_sender.py') print(f"[鐭俊] 鍑嗗鍙戦€侀獙璇佺爜鍒?{phone}锛屾ā鏉縄D: {template_id}") # 浣跨敤 subprocess 鍦ㄧ嫭绔嬭繘绋嬩腑鍙戦€佺煭淇★紙缁曡繃 eventlet DNS锛? result = subprocess.run( [ sys.executable, # 浣跨敤褰撳墠 Python 瑙i噴鍣? script_path, phone, code, template_id, SMS_SECRET_ID, SMS_SECRET_KEY, SMS_SDK_APP_ID, SMS_SIGN_NAME ], capture_output=True, text=True, timeout=30 ) if result.returncode == 0: print(f"[鐭俊] 鉁?鍙戦€佹垚鍔? {result.stdout.strip()}") return True else: print(f"[鐭俊] 鉁?鍙戦€佸け璐? {result.stderr.strip()}") return False except subprocess.TimeoutExpired: print(f"[鐭俊] 鉁?鍙戦€佽秴鏃?) return False except Exception as e: print(f"[鐭俊] 鉁?鍙戦€佸紓甯? {type(e).__name__}: {e}") return False def send_email_code(email, code): """鍙戦€侀偖浠堕獙璇佺爜锛堜娇鐢?subprocess 缁曡繃 eventlet DNS 闂锛?"" import subprocess import os try: # 鑾峰彇鑴氭湰璺緞 script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'email_sender.py') subject = '浠峰€煎墠娌?- 楠岃瘉鐮? body = f'鎮ㄧ殑楠岃瘉鐮佹槸锛歿code}锛屾湁鏁堟湡5鍒嗛挓銆傚闈炴湰浜烘搷浣滐紝璇峰拷鐣ユ閭欢銆? print(f"[閭欢] 鍑嗗鍙戦€侀獙璇佺爜鍒?{email}") print(f"[閭欢] 鏈嶅姟鍣? {MAIL_SERVER}:{MAIL_PORT}, SSL: {MAIL_USE_SSL}") # 浣跨敤 subprocess 鍦ㄧ嫭绔嬭繘绋嬩腑鍙戦€侀偖浠讹紙缁曡繃 eventlet DNS锛? result = subprocess.run( [ sys.executable, # 浣跨敤褰撳墠 Python 瑙i噴鍣? script_path, email, subject, body, MAIL_SERVER, str(MAIL_PORT), MAIL_USERNAME, MAIL_PASSWORD, str(MAIL_USE_SSL).lower() ], capture_output=True, text=True, timeout=60 ) if result.returncode == 0: print(f"[閭欢] 鉁?鍙戦€佹垚鍔? {result.stdout.strip()}") return True else: print(f"[閭欢] 鉁?鍙戦€佸け璐? {result.stderr.strip()}") return False except subprocess.TimeoutExpired: print(f"[閭欢] 鉁?鍙戦€佽秴鏃?) return False except Exception as e: print(f"[閭欢] 鉁?鍙戦€佸紓甯? {type(e).__name__}: {e}") return False @app.route('/api/auth/send-sms-code', methods=['POST']) def send_sms_verification(): """鍙戦€佹墜鏈洪獙璇佺爜""" data = request.get_json() phone = data.get('phone') if not phone: return jsonify({'error': '鎵嬫満鍙蜂笉鑳戒负绌?}), 400 # 娉ㄥ唽鏃堕獙璇佹槸鍚﹀凡娉ㄥ唽锛涜嫢鐢ㄤ簬缁戝畾鎵嬫満锛岄渶瑕佸彟澶栨帴鍙? # 杩欓噷淇濈暀鍘熼€昏緫锛屾柊澧炵粦瀹氭帴鍙e鐞嗕笉鍚岃鍒? if User.query.filter_by(phone=phone).first(): return jsonify({'error': '璇ユ墜鏈哄彿宸叉敞鍐?}), 400 # 鐢熸垚楠岃瘉鐮? code = generate_verification_code() # 鍙戦€佺煭淇? if send_sms_code(phone, code, SMS_TEMPLATE_REGISTER): # 瀛樺偍楠岃瘉鐮佸埌 Redis锛?鍒嗛挓鏈夋晥锛? set_verification_code(f'phone_{phone}', code) return jsonify({'message': '楠岃瘉鐮佸凡鍙戦€?}), 200 else: return jsonify({'error': '楠岃瘉鐮佸彂閫佸け璐?}), 500 @app.route('/api/auth/send-email-code', methods=['POST']) def send_email_verification(): """鍙戦€侀偖绠遍獙璇佺爜""" data = request.get_json() email = data.get('email') if not email: return jsonify({'error': '閭涓嶈兘涓虹┖'}), 400 if User.query.filter_by(email=email).first(): return jsonify({'error': '璇ラ偖绠卞凡娉ㄥ唽'}), 400 # 鐢熸垚楠岃瘉鐮? code = generate_verification_code() # 鍙戦€侀偖浠? if send_email_code(email, code): # 瀛樺偍楠岃瘉鐮佸埌 Redis锛?鍒嗛挓鏈夋晥锛? set_verification_code(f'email_{email}', code) return jsonify({'message': '楠岃瘉鐮佸凡鍙戦€?}), 200 else: return jsonify({'error': '楠岃瘉鐮佸彂閫佸け璐?}), 500 @app.route('/api/auth/register/phone', methods=['POST']) def register_with_phone(): """鎵嬫満鍙锋敞鍐?- 浣跨敤Session""" data = request.get_json() phone = data.get('phone') code = data.get('code') password = data.get('password') username = data.get('username') if not all([phone, code, password, username]): return jsonify({'success': False, 'error': '鎵€鏈夊瓧娈甸兘鏄繀濉殑'}), 400 # 楠岃瘉楠岃瘉鐮侊紙浠?Redis 鑾峰彇锛? stored_code = get_verification_code(f'phone_{phone}') if not stored_code or stored_code['expires'] < time.time(): return jsonify({'success': False, 'error': '楠岃瘉鐮佸凡杩囨湡'}), 400 if stored_code['code'] != code: return jsonify({'success': False, 'error': '楠岃瘉鐮侀敊璇?}), 400 if User.query.filter_by(username=username).first(): return jsonify({'success': False, 'error': '鐢ㄦ埛鍚嶅凡瀛樺湪'}), 400 try: # 鍒涘缓鐢ㄦ埛 user = User(username=username, phone=phone) user.email = f"{username}@valuefrontier.temp" user.set_password(password) user.phone_confirmed = True db.session.add(user) db.session.flush() # 鑾峰彇 user.id # 鑷姩鍒涘缓绉垎璐︽埛锛屽垵濮?0000绉垎 credit_account = UserCreditAccount( user_id=user.id, balance=10000, frozen=0 ) db.session.add(credit_account) db.session.commit() # 娓呴櫎楠岃瘉鐮侊紙浠?Redis 鍒犻櫎锛? delete_verification_code(f'phone_{phone}') # 鑷姩鐧诲綍 session.permanent = True session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True # Flask-Login 鐧诲綍 login_user(user, remember=True) return jsonify({ 'success': True, 'message': '娉ㄥ唽鎴愬姛', 'user': { 'id': user.id, 'username': user.username, 'phone': user.phone } }), 201 except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': '娉ㄥ唽澶辫触锛岃閲嶈瘯'}), 500 @app.route('/api/account/phone/send-code', methods=['POST']) def send_sms_bind_code(): """鍙戦€佺粦瀹氭墜鏈洪獙璇佺爜锛堥渶宸茬櫥褰曪級""" # 璋冭瘯鏃ュ織锛氭鏌?session 鐘舵€? user_agent = request.headers.get('User-Agent', '') is_wechat = 'MicroMessenger' in user_agent print(f"[缁戝畾鎵嬫満楠岃瘉鐮乚 User-Agent: {user_agent[:100]}...") print(f"[缁戝畾鎵嬫満楠岃瘉鐮乚 鏄惁寰俊娴忚鍣? {is_wechat}") print(f"[缁戝畾鎵嬫満楠岃瘉鐮乚 session 鍐呭: logged_in={session.get('logged_in')}, user_id={session.get('user_id')}") print(f"[缁戝畾鎵嬫満楠岃瘉鐮乚 Cookie: {request.cookies.get('session', 'None')[:20] if request.cookies.get('session') else 'None'}...") if not session.get('logged_in'): print(f"[缁戝畾鎵嬫満楠岃瘉鐮乚 鉂?鏈櫥褰曪紝鎷掔粷璇锋眰") return jsonify({'error': '鏈櫥褰?}), 401 data = request.get_json() phone = data.get('phone') if not phone: return jsonify({'error': '鎵嬫満鍙蜂笉鑳戒负绌?}), 400 # 缁戝畾鏃惰姹傛墜鏈哄彿鏈鍗犵敤 if User.query.filter_by(phone=phone).first(): return jsonify({'error': '璇ユ墜鏈哄彿宸茶鍏朵粬璐﹀彿浣跨敤'}), 400 code = generate_verification_code() if send_sms_code(phone, code, SMS_TEMPLATE_REGISTER): # 瀛樺偍楠岃瘉鐮佸埌 Redis锛?鍒嗛挓鏈夋晥锛? set_verification_code(f'bind_{phone}', code) return jsonify({'message': '楠岃瘉鐮佸凡鍙戦€?}), 200 else: return jsonify({'error': '楠岃瘉鐮佸彂閫佸け璐?}), 500 @app.route('/api/account/phone/bind', methods=['POST']) def bind_phone(): """褰撳墠鐧诲綍鐢ㄦ埛缁戝畾鎵嬫満鍙?"" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 data = request.get_json() phone = data.get('phone') code = data.get('code') if not phone or not code: return jsonify({'error': '鎵嬫満鍙峰拰楠岃瘉鐮佷笉鑳戒负绌?}), 400 # 浠?Redis 鑾峰彇楠岃瘉鐮? stored = get_verification_code(f'bind_{phone}') if not stored or stored['expires'] < time.time(): return jsonify({'error': '楠岃瘉鐮佸凡杩囨湡'}), 400 if stored['code'] != code: return jsonify({'error': '楠岃瘉鐮侀敊璇?}), 400 if User.query.filter_by(phone=phone).first(): return jsonify({'error': '璇ユ墜鏈哄彿宸茶鍏朵粬璐﹀彿浣跨敤'}), 400 try: user = User.query.get(session.get('user_id')) if not user: return jsonify({'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 user.phone = phone user.confirm_phone() # 娓呴櫎楠岃瘉鐮侊紙浠?Redis 鍒犻櫎锛? delete_verification_code(f'bind_{phone}') return jsonify({'message': '缁戝畾鎴愬姛', 'success': True}), 200 except Exception as e: print(f"Bind phone error: {e}") db.session.rollback() return jsonify({'error': '缁戝畾澶辫触锛岃閲嶈瘯'}), 500 @app.route('/api/account/phone/unbind', methods=['POST']) def unbind_phone(): """瑙g粦鎵嬫満鍙凤紙闇€宸茬櫥褰曪級""" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 try: user = User.query.get(session.get('user_id')) if not user: return jsonify({'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 user.phone = None user.phone_confirmed = False user.phone_confirm_time = None db.session.commit() return jsonify({'message': '瑙g粦鎴愬姛', 'success': True}), 200 except Exception as e: print(f"Unbind phone error: {e}") db.session.rollback() return jsonify({'error': '瑙g粦澶辫触锛岃閲嶈瘯'}), 500 @app.route('/api/account/email/send-bind-code', methods=['POST']) def send_email_bind_code(): """鍙戦€佺粦瀹氶偖绠遍獙璇佺爜锛堥渶宸茬櫥褰曪級""" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 data = request.get_json() email = data.get('email') if not email: return jsonify({'error': '閭涓嶈兘涓虹┖'}), 400 # 閭鏍煎紡楠岃瘉 if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email): return jsonify({'error': '閭鏍煎紡涓嶆纭?}), 400 # 妫€鏌ラ偖绠辨槸鍚﹀凡琚叾浠栬处鍙蜂娇鐢? if User.query.filter_by(email=email).first(): return jsonify({'error': '璇ラ偖绠卞凡琚叾浠栬处鍙蜂娇鐢?}), 400 # 鐢熸垚楠岃瘉鐮? code = ''.join(random.choices(string.digits, k=6)) if send_email_code(email, code): # 瀛樺偍楠岃瘉鐮佸埌 Redis锛?鍒嗛挓鏈夋晥锛? set_verification_code(f'bind_{email}', code) return jsonify({'message': '楠岃瘉鐮佸凡鍙戦€?}), 200 else: return jsonify({'error': '楠岃瘉鐮佸彂閫佸け璐?}), 500 @app.route('/api/account/email/bind', methods=['POST']) def bind_email(): """褰撳墠鐧诲綍鐢ㄦ埛缁戝畾閭""" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 data = request.get_json() email = data.get('email') code = data.get('code') if not email or not code: return jsonify({'error': '閭鍜岄獙璇佺爜涓嶈兘涓虹┖'}), 400 # 浠?Redis 鑾峰彇楠岃瘉鐮? stored = get_verification_code(f'bind_{email}') if not stored or stored['expires'] < time.time(): return jsonify({'error': '楠岃瘉鐮佸凡杩囨湡'}), 400 if stored['code'] != code: return jsonify({'error': '楠岃瘉鐮侀敊璇?}), 400 if User.query.filter_by(email=email).first(): return jsonify({'error': '璇ラ偖绠卞凡琚叾浠栬处鍙蜂娇鐢?}), 400 try: user = User.query.get(session.get('user_id')) if not user: return jsonify({'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 user.email = email user.confirm_email() db.session.commit() # 娓呴櫎楠岃瘉鐮侊紙浠?Redis 鍒犻櫎锛? delete_verification_code(f'bind_{email}') return jsonify({ 'message': '閭缁戝畾鎴愬姛', 'success': True, 'user': { 'email': user.email, 'email_confirmed': user.email_confirmed } }), 200 except Exception as e: print(f"Bind email error: {e}") db.session.rollback() return jsonify({'error': '缁戝畾澶辫触锛岃閲嶈瘯'}), 500 @app.route('/api/account/email/unbind', methods=['POST']) def unbind_email(): """瑙g粦閭锛堥渶宸茬櫥褰曪級""" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 try: user = User.query.get(session.get('user_id')) if not user: return jsonify({'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 user.email = None user.email_confirmed = False db.session.commit() return jsonify({'message': '瑙g粦鎴愬姛', 'success': True}), 200 except Exception as e: print(f"Unbind email error: {e}") db.session.rollback() return jsonify({'error': '瑙g粦澶辫触锛岃閲嶈瘯'}), 500 @app.route('/api/auth/register/email', methods=['POST']) def register_with_email(): """閭娉ㄥ唽 - 浣跨敤Session""" data = request.get_json() email = data.get('email') code = data.get('code') password = data.get('password') username = data.get('username') if not all([email, code, password, username]): return jsonify({'success': False, 'error': '鎵€鏈夊瓧娈甸兘鏄繀濉殑'}), 400 # 楠岃瘉楠岃瘉鐮侊紙浠?Redis 鑾峰彇锛? stored_code = get_verification_code(f'email_{email}') if not stored_code or stored_code['expires'] < time.time(): return jsonify({'success': False, 'error': '楠岃瘉鐮佸凡杩囨湡'}), 400 if stored_code['code'] != code: return jsonify({'success': False, 'error': '楠岃瘉鐮侀敊璇?}), 400 if User.query.filter_by(username=username).first(): return jsonify({'success': False, 'error': '鐢ㄦ埛鍚嶅凡瀛樺湪'}), 400 try: # 鍒涘缓鐢ㄦ埛 user = User(username=username, email=email) user.set_password(password) user.email_confirmed = True db.session.add(user) db.session.flush() # 鑾峰彇 user.id # 鑷姩鍒涘缓绉垎璐︽埛锛屽垵濮?0000绉垎 credit_account = UserCreditAccount( user_id=user.id, balance=10000, frozen=0 ) db.session.add(credit_account) db.session.commit() # 娓呴櫎楠岃瘉鐮侊紙浠?Redis 鍒犻櫎锛? delete_verification_code(f'email_{email}') # 鑷姩鐧诲綍 session.permanent = True session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True # Flask-Login 鐧诲綍 login_user(user, remember=True) return jsonify({ 'success': True, 'message': '娉ㄥ唽鎴愬姛', 'user': { 'id': user.id, 'username': user.username, 'email': user.email } }), 201 except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': '娉ㄥ唽澶辫触锛岃閲嶈瘯'}), 500 def _safe_http_get(url, params=None, timeout=10): """瀹夊叏鐨?HTTP GET 璇锋眰锛堢粫杩?eventlet DNS 闂锛? 浣跨敤 subprocess 璋冪敤 curl锛屽畬鍏ㄧ粫杩?Python/eventlet 鐨勭綉缁滄爤 """ import subprocess import urllib.parse # 鏋勫缓瀹屾暣 URL if params: query_string = urllib.parse.urlencode(params) full_url = f"{url}?{query_string}" else: full_url = url try: # 浣跨敤 curl 鍙戣捣璇锋眰锛岀粫杩?eventlet DNS 闂 result = subprocess.run( ['curl', '-s', '-m', str(timeout), full_url], capture_output=True, text=True, timeout=timeout + 5 ) if result.returncode != 0: print(f"鉂?curl 璇锋眰澶辫触: returncode={result.returncode}, stderr={result.stderr}") return None # 杩斿洖涓€涓ā鎷?Response 瀵硅薄 class MockResponse: def __init__(self, text): self.text = text self.content = text.encode('utf-8') self.encoding = 'utf-8' def json(self): return json.loads(self.text) return MockResponse(result.stdout) except subprocess.TimeoutExpired: print(f"鉂?curl 璇锋眰瓒呮椂: {full_url}") return None except Exception as e: print(f"鉂?curl 璇锋眰寮傚父: {type(e).__name__}: {e}") return None def get_wechat_access_token(code, appid=None, appsecret=None): """閫氳繃code鑾峰彇寰俊access_token Args: code: 寰俊鎺堟潈鍚庤繑鍥炵殑 code appid: 寰俊 AppID锛堝彲閫夛紝榛樿浣跨敤寮€鏀惧钩鍙伴厤缃級 appsecret: 寰俊 AppSecret锛堝彲閫夛紝榛樿浣跨敤寮€鏀惧钩鍙伴厤缃級 """ url = "https://api.weixin.qq.com/sns/oauth2/access_token" params = { 'appid': appid or WECHAT_OPEN_APPID, 'secret': appsecret or WECHAT_OPEN_APPSECRET, 'code': code, 'grant_type': 'authorization_code' } try: print(f"馃攧 姝e湪鑾峰彇寰俊 access_token... (appid={params['appid'][:8]}...)") response = _safe_http_get(url, params=params, timeout=15) data = response.json() if 'errcode' in data: print(f"鉂?WeChat access token error: {data}") return None print(f"鉁?鎴愬姛鑾峰彇 access_token: openid={data.get('openid', 'N/A')}") return data except Exception as e: print(f"鉂?WeChat access token request error: {type(e).__name__}: {e}") import traceback traceback.print_exc() return None def get_wechat_userinfo(access_token, openid): """鑾峰彇寰俊鐢ㄦ埛淇℃伅锛堝寘鍚玌nionID锛?"" url = "https://api.weixin.qq.com/sns/userinfo" params = { 'access_token': access_token, 'openid': openid, 'lang': 'zh_CN' } try: print(f"馃攧 姝e湪鑾峰彇寰俊鐢ㄦ埛淇℃伅... (openid={openid})") response = _safe_http_get(url, params=params, timeout=15) response.encoding = 'utf-8' # 鏄庣‘璁剧疆缂栫爜涓篣TF-8 data = response.json() if 'errcode' in data: print(f"鉂?WeChat userinfo error: {data}") return None # 纭繚nickname瀛楁鐨勭紪鐮佹纭? if 'nickname' in data and data['nickname']: # 纭繚鏄电О鏄纭殑UTF-8缂栫爜 try: # 妫€鏌ユ槸鍚﹀凡缁忔槸姝g‘鐨刄TF-8瀛楃涓? data['nickname'] = data['nickname'].encode('utf-8').decode('utf-8') except (UnicodeEncodeError, UnicodeDecodeError) as e: print(f"Nickname encoding error: {e}, using default") data['nickname'] = '寰俊鐢ㄦ埛' print(f"鉁?鎴愬姛鑾峰彇鐢ㄦ埛淇℃伅: nickname={data.get('nickname', 'N/A')}") return data except Exception as e: print(f"鉂?WeChat userinfo request error: {type(e).__name__}: {e}") import traceback traceback.print_exc() return None @app.route('/api/auth/wechat/qrcode', methods=['GET']) def get_wechat_qrcode(): """杩斿洖寰俊鎺堟潈URL锛屽墠绔娇鐢╥frame灞曠ず""" # 鐢熸垚鍞竴state鍙傛暟 state = uuid.uuid4().hex # URL缂栫爜鍥炶皟鍦板潃 redirect_uri = urllib.parse.quote_plus(WECHAT_REDIRECT_URI) # 鏋勫缓寰俊鎺堟潈URL锛圥C 鎵爜鐧诲綍浣跨敤寮€鏀惧钩鍙?AppID锛? wechat_auth_url = ( f"https://open.weixin.qq.com/connect/qrconnect?" f"appid={WECHAT_OPEN_APPID}&redirect_uri={redirect_uri}" f"&response_type=code&scope=snsapi_login&state={state}" "#wechat_redirect" ) # 瀛樺偍session淇℃伅鍒?Redis if not set_wechat_session(state, { 'status': 'waiting', 'user_info': None, 'wechat_openid': None, 'wechat_unionid': None }): return jsonify({'error': '鏈嶅姟鏆傛椂涓嶅彲鐢紝璇风◢鍚庨噸璇?}), 500 return jsonify({"code":0, "data": { 'auth_url': wechat_auth_url, 'session_id': state, 'expires_in': 300 }}), 200 @app.route('/api/auth/wechat/h5-auth', methods=['POST']) def get_wechat_h5_auth_url(): """ 鑾峰彇寰俊 H5 缃戦〉鎺堟潈 URL 鐢ㄤ簬鎵嬫満娴忚鍣ㄨ烦杞井淇?App 鎺堟潈 """ data = request.get_json() or {} frontend_redirect = data.get('redirect_url', '/home') # 鐢熸垚鍞竴 state state = uuid.uuid4().hex # 缂栫爜鍥炶皟鍦板潃 redirect_uri = urllib.parse.quote_plus(WECHAT_REDIRECT_URI) # 鏋勫缓鎺堟潈 URL锛圚5 缃戦〉鎺堟潈浣跨敤鍏紬鍙?AppID锛? auth_url = ( f"https://open.weixin.qq.com/connect/oauth2/authorize?" f"appid={WECHAT_MP_APPID}&redirect_uri={redirect_uri}" f"&response_type=code&scope=snsapi_userinfo&state={state}" "#wechat_redirect" ) # 瀛樺偍 session 淇℃伅鍒?Redis if not set_wechat_session(state, { 'status': 'waiting', 'mode': 'h5', # 鏍囪涓?H5 妯″紡 'frontend_redirect': frontend_redirect, 'user_info': None, 'wechat_openid': None, 'wechat_unionid': None }): return jsonify({'error': '鏈嶅姟鏆傛椂涓嶅彲鐢紝璇风◢鍚庨噸璇?}), 500 return jsonify({ 'auth_url': auth_url, 'state': state }), 200 @app.route('/api/account/wechat/qrcode', methods=['GET']) def get_wechat_bind_qrcode(): """鍙戣捣寰俊缁戝畾浜岀淮鐮侊紝浼氳瘽鏍囪涓虹粦瀹氭ā寮?"" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 # 鐢熸垚鍞竴state鍙傛暟 state = uuid.uuid4().hex # URL缂栫爜鍥炶皟鍦板潃 redirect_uri = urllib.parse.quote_plus(WECHAT_REDIRECT_URI) # 鏋勫缓寰俊鎺堟潈URL锛圥C 鎵爜缁戝畾浣跨敤寮€鏀惧钩鍙?AppID锛? wechat_auth_url = ( f"https://open.weixin.qq.com/connect/qrconnect?" f"appid={WECHAT_OPEN_APPID}&redirect_uri={redirect_uri}" f"&response_type=code&scope=snsapi_login&state={state}" "#wechat_redirect" ) # 瀛樺偍session淇℃伅鍒?Redis锛屾爣璁颁负缁戝畾妯″紡骞惰褰曠洰鏍囩敤鎴? if not set_wechat_session(state, { 'status': 'waiting', 'mode': 'bind', 'bind_user_id': session.get('user_id'), 'user_info': None, 'wechat_openid': None, 'wechat_unionid': None }): return jsonify({'error': '鏈嶅姟鏆傛椂涓嶅彲鐢紝璇风◢鍚庨噸璇?}), 500 return jsonify({ 'auth_url': wechat_auth_url, 'session_id': state, 'expires_in': 300 }), 200 @app.route('/api/auth/wechat/check', methods=['POST']) def check_wechat_scan(): """妫€鏌ュ井淇℃壂鐮佺姸鎬?"" data = request.get_json() session_id = data.get('session_id') if not session_id: return jsonify({'status': 'invalid', 'error': '鏃犳晥鐨剆ession'}), 400 # 浠?Redis 鑾峰彇 session sess = get_wechat_session(session_id) if not sess: return jsonify({'status': 'expired'}), 200 # Redis 鑷姩杩囨湡锛岃繑鍥?expired # 鑾峰彇鍓╀綑 TTL ttl = redis_client.ttl(f"{WECHAT_SESSION_PREFIX}{session_id}") expires_in = max(0, ttl) if ttl > 0 else 0 return jsonify({ 'status': sess['status'], 'user_info': sess.get('user_info'), 'expires_in': expires_in }), 200 @app.route('/api/account/wechat/check', methods=['POST']) def check_wechat_bind_scan(): """妫€鏌ュ井淇℃壂鐮佺粦瀹氱姸鎬?"" data = request.get_json() session_id = data.get('session_id') if not session_id: return jsonify({'status': 'invalid', 'error': '鏃犳晥鐨剆ession'}), 400 # 浠?Redis 鑾峰彇 session sess = get_wechat_session(session_id) if not sess: return jsonify({'status': 'expired'}), 200 # Redis 鑷姩杩囨湡锛岃繑鍥?expired # 缁戝畾妯″紡闄愬埗 if sess.get('mode') != 'bind': return jsonify({'status': 'invalid', 'error': '浼氳瘽妯″紡閿欒'}), 400 # 鑾峰彇鍓╀綑 TTL ttl = redis_client.ttl(f"{WECHAT_SESSION_PREFIX}{session_id}") expires_in = max(0, ttl) if ttl > 0 else 0 return jsonify({ 'status': sess['status'], 'user_info': sess.get('user_info'), 'expires_in': expires_in }), 200 @app.route('/api/auth/wechat/callback', methods=['GET']) def wechat_callback(): """寰俊鎺堟潈鍥炶皟澶勭悊 - 浣跨敤Session""" code = request.args.get('code') state = request.args.get('state') error = request.args.get('error') # 閿欒澶勭悊锛氱敤鎴锋嫆缁濇巿鏉? if error: if state and wechat_session_exists(state): update_wechat_session(state, {'status': 'auth_denied', 'error': '鐢ㄦ埛鎷掔粷鎺堟潈'}) print(f"鉂?鐢ㄦ埛鎷掔粷鎺堟潈: state={state}") return redirect(f'{FRONTEND_URL}/home?error=wechat_auth_denied') # 鍙傛暟楠岃瘉 if not code or not state: if state and wechat_session_exists(state): update_wechat_session(state, {'status': 'auth_failed', 'error': '鎺堟潈鍙傛暟缂哄け'}) return redirect(f'{FRONTEND_URL}/home?error=wechat_auth_failed') # 浠?Redis 鑾峰彇 session锛堣嚜鍔ㄥ鐞嗚繃鏈燂級 session_data = get_wechat_session(state) if not session_data: return redirect(f'{FRONTEND_URL}/home?error=session_expired') try: # 姝ラ1: 鐢ㄦ埛宸叉壂鐮佸苟鎺堟潈锛堝井淇″洖璋冭繃鏉ヨ鏄庣敤鎴峰凡瀹屾垚鎵爜+鎺堟潈锛? update_wechat_session(state, {'status': 'scanned'}) print(f"鉁?寰俊鎵爜鍥炶皟: state={state}, code={code[:10]}...") # 姝ラ2: 鏍规嵁鎺堟潈妯″紡閫夋嫨瀵瑰簲鐨?AppID/AppSecret # H5 妯″紡浣跨敤鍏紬鍙烽厤缃紝PC 鎵爜鍜岀粦瀹氭ā寮忎娇鐢ㄥ紑鏀惧钩鍙伴厤缃? if session_data.get('mode') == 'h5': appid = WECHAT_MP_APPID appsecret = WECHAT_MP_APPSECRET print(f"馃摫 H5 妯″紡鎺堟潈锛屼娇鐢ㄥ叕浼楀彿閰嶇疆") else: appid = WECHAT_OPEN_APPID appsecret = WECHAT_OPEN_APPSECRET print(f"馃捇 PC 妯″紡鎺堟潈锛屼娇鐢ㄥ紑鏀惧钩鍙伴厤缃?) # 姝ラ3: 鑾峰彇access_token token_data = get_wechat_access_token(code, appid, appsecret) if not token_data: update_wechat_session(state, {'status': 'auth_failed', 'error': '鑾峰彇璁块棶浠ょ墝澶辫触'}) print(f"鉂?鑾峰彇寰俊access_token澶辫触: state={state}") return redirect(f'{FRONTEND_URL}/home?error=token_failed') # 姝ラ3: Token鑾峰彇鎴愬姛锛屾爣璁颁负宸叉巿鏉? update_wechat_session(state, {'status': 'authorized'}) print(f"鉁?寰俊鎺堟潈鎴愬姛: openid={token_data['openid']}") # 姝ラ4: 鑾峰彇鐢ㄦ埛淇℃伅 user_info = get_wechat_userinfo(token_data['access_token'], token_data['openid']) if not user_info: update_wechat_session(state, {'status': 'auth_failed', 'error': '鑾峰彇鐢ㄦ埛淇℃伅澶辫触'}) print(f"鉂?鑾峰彇寰俊鐢ㄦ埛淇℃伅澶辫触: openid={token_data['openid']}") return redirect(f'{FRONTEND_URL}/home?error=userinfo_failed') # 鏌ユ壘鎴栧垱寤虹敤鎴?/ 鎴栧鐞嗙粦瀹? openid = token_data['openid'] unionid = user_info.get('unionid') or token_data.get('unionid') # 濡傛灉鏄粦瀹氭祦绋? if session_data.get('mode') == 'bind': try: target_user_id = session.get('user_id') or session_data.get('bind_user_id') if not target_user_id: return redirect(f'{FRONTEND_URL}/home?error=bind_no_user') target_user = User.query.get(target_user_id) if not target_user: return redirect(f'{FRONTEND_URL}/home?error=bind_user_missing') # 妫€鏌ヨ寰俊鏄惁宸茶鍏朵粬璐︽埛缁戝畾 existing = None if unionid: existing = User.query.filter_by(wechat_union_id=unionid).first() if not existing: existing = User.query.filter_by(wechat_open_id=openid).first() if existing and existing.id != target_user.id: update_wechat_session(state, {'status': 'bind_conflict'}) return redirect(f'{FRONTEND_URL}/home?bind=conflict') # 鎵ц缁戝畾 target_user.bind_wechat(openid, unionid, wechat_info=user_info) # 鏍囪缁戝畾瀹屾垚锛屼緵鍓嶇杞 update_wechat_session(state, {'status': 'bind_ready', 'user_info': {'user_id': target_user.id}}) return redirect(f'{FRONTEND_URL}/home?bind=success') except Exception as e: print(f"鉂?寰俊缁戝畾澶辫触: {e}") db.session.rollback() update_wechat_session(state, {'status': 'bind_failed'}) return redirect(f'{FRONTEND_URL}/home?bind=failed') user = None is_new_user = False # 缁熶竴浣跨敤 unionid 鍖归厤鐢ㄦ埛锛圚5 鍜?PC 妯″紡閮戒竴鏍凤級 if not unionid: # 娌℃湁鑾峰彇鍒?unionid锛屾棤娉曞叧鑱旇处鍙? mode_name = 'H5' if session_data.get('mode') == 'h5' else 'PC' update_wechat_session(state, {'status': 'auth_failed', 'error': f'{mode_name}鎺堟潈鏈繑鍥瀠nionid'}) print(f"鉂?{mode_name} 鎺堟潈鏈繑鍥?unionid, openid={openid}, user_info={user_info}") # 璋冭瘯淇℃伅锛氬皢寰俊杩斿洖鐨勬暟鎹€氳繃 URL 浼犵粰鍓嶇 debug_params = urllib.parse.urlencode({ 'error': 'no_unionid', 'debug_mode': mode_name, 'debug_openid': openid[:10] + '...' if openid else 'null', 'debug_has_unionid_in_token': '1' if token_data.get('unionid') else '0', 'debug_has_unionid_in_userinfo': '1' if user_info.get('unionid') else '0', 'debug_nickname': user_info.get('nickname', '')[:10], 'debug_keys_in_userinfo': ','.join(user_info.keys()) if user_info else 'null', }) return redirect(f'{FRONTEND_URL}/home?{debug_params}') user = User.query.filter_by(wechat_union_id=unionid).first() if not user: # 鍒涘缓鏂扮敤鎴? # 鍏堟竻鐞嗗井淇℃樀绉? raw_nickname = user_info.get('nickname', '寰俊鐢ㄦ埛') # 鍒涘缓涓存椂鐢ㄦ埛瀹炰緥浠ヤ娇鐢ㄦ竻鐞嗘柟娉? temp_user = User.__new__(User) sanitized_nickname = temp_user._sanitize_nickname(raw_nickname) username = sanitized_nickname counter = 1 while User.is_username_taken(username): username = f"{sanitized_nickname}_{counter}" counter += 1 user = User(username=username) user.nickname = sanitized_nickname user.avatar_url = user_info.get('headimgurl') user.wechat_open_id = openid user.wechat_union_id = unionid user.set_password(uuid.uuid4().hex) user.status = 'active' db.session.add(user) db.session.commit() is_new_user = True print(f"鉁?寰俊鎵爜鑷姩鍒涘缓鏂扮敤鎴? {username}, openid: {openid}") # 鏇存柊鏈€鍚庣櫥褰曟椂闂? user.update_last_seen() # 璁剧疆session session.permanent = True session['user_id'] = user.id session['username'] = user.username session['logged_in'] = True session['wechat_login'] = True # 鏍囪鏄井淇$櫥褰? # Flask-Login 鐧诲綍 login_user(user, remember=True) # 鏇存柊寰俊session鐘舵€侊紝渚涘墠绔疆璇㈡娴? mode = session_data.get('mode') # H5 妯″紡锛氶噸瀹氬悜鍒板墠绔洖璋冮〉闈? if mode == 'h5': frontend_redirect = session_data.get('frontend_redirect', '/home/wechat-callback') # 娓呯悊 session delete_wechat_session(state) print(f"鉁?H5 寰俊鐧诲綍鎴愬姛锛岄噸瀹氬悜鍒? {frontend_redirect}") # 璋冭瘯淇℃伅锛氭惡甯﹀井淇¤繑鍥炵殑鍏抽敭鏁版嵁 debug_params = urllib.parse.urlencode({ 'wechat_login': 'success', 'debug_is_new_user': '1' if is_new_user else '0', 'debug_has_unionid': '1' if unionid else '0', 'debug_unionid': (unionid[:10] + '...') if unionid else 'null', 'debug_openid': (openid[:10] + '...') if openid else 'null', 'debug_user_id': user.id, 'debug_nickname': user_info.get('nickname', '')[:10], }) # 鈿?淇锛氭纭鐞嗗凡鏈夋煡璇㈠弬鏁扮殑 URL separator = '&' if '?' in frontend_redirect else '?' return redirect(f"{frontend_redirect}{separator}{debug_params}") # PC 鎵爜妯″紡锛氭洿鏂扮姸鎬佷緵鍓嶇杞 if not mode: new_status = 'register_ready' if is_new_user else 'login_ready' update_wechat_session(state, {'status': new_status, 'user_info': {'user_id': user.id}}) print(f"鉁?寰俊鎵爜鐘舵€佸凡鏇存柊: {new_status}, user_id: {user.id}") # 鈿?PC 鎵爜妯″紡锛氶噸瀹氬悜鍒板墠绔洖璋冮〉闈? # 寰俊鎵爜鐧诲綍浼氳烦杞暣涓〉闈紝鎵€浠ラ渶瑕侀噸瀹氬悜鍒板墠绔鐞? pc_redirect_params = urllib.parse.urlencode({ 'wechat_login': 'success', 'state': state, 'is_new_user': '1' if is_new_user else '0', }) print(f"鉁?PC 寰俊鐧诲綍鎴愬姛锛岄噸瀹氬悜鍒板墠绔洖璋冮〉闈?) return redirect(f"{FRONTEND_URL}/home/wechat-callback?{pc_redirect_params}") except Exception as e: print(f"鉂?寰俊鐧诲綍澶辫触: {e}") import traceback traceback.print_exc() db.session.rollback() # 鏇存柊session鐘舵€佷负澶辫触 if wechat_session_exists(state): update_wechat_session(state, {'status': 'auth_failed', 'error': str(e)}) # 鈿?閲嶅畾鍚戝埌棣栭〉骞舵樉绀洪敊璇? return redirect(f'{FRONTEND_URL}/home?error=wechat_login_failed') @app.route('/api/auth/login/wechat', methods=['POST']) def login_with_wechat(): """寰俊鐧诲綍 - 淇鐗堟湰""" data = request.get_json() session_id = data.get('session_id') if not session_id: return jsonify({'success': False, 'error': 'session_id涓嶈兘涓虹┖'}), 400 # 浠?Redis 鑾峰彇 session wechat_sess = get_wechat_session(session_id) if not wechat_sess: return jsonify({'success': False, 'error': '浼氳瘽涓嶅瓨鍦ㄦ垨宸茶繃鏈?}), 400 # 妫€鏌ession鐘舵€? if wechat_sess['status'] not in ['login_ready', 'register_ready']: return jsonify({'success': False, 'error': '浼氳瘽鐘舵€佹棤鏁?}), 400 # 妫€鏌ユ槸鍚︽湁鐢ㄦ埛淇℃伅 user_info = wechat_sess.get('user_info') if not user_info or not user_info.get('user_id'): return jsonify({'success': False, 'error': '鐢ㄦ埛淇℃伅涓嶅畬鏁?}), 400 try: user = User.query.get(user_info['user_id']) if not user: return jsonify({'success': False, 'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 # 鏇存柊鏈€鍚庣櫥褰曟椂闂? user.update_last_seen() # Redis 浼氳嚜鍔ㄨ繃鏈燂紝鏃犻渶鎵嬪姩寤惰繜鍒犻櫎 # 淇濈暀 session 鐘舵€佷緵鍓嶇杞锛孯edis TTL 浼氳嚜鍔ㄦ竻鐞? # 鐢熸垚鐧诲綍鍝嶅簲 response_data = { 'success': True, 'message': '鐧诲綍鎴愬姛' if wechat_sess['status'] == 'login_ready' else '娉ㄥ唽骞剁櫥褰曟垚鍔?, 'user': { 'id': user.id, 'username': user.username, 'nickname': user.nickname or user.username, 'email': user.email, 'phone': user.phone, 'phone_confirmed': bool(user.phone_confirmed), 'avatar_url': user.avatar_url, 'has_wechat': True, 'wechat_open_id': user.wechat_open_id, 'wechat_union_id': user.wechat_union_id, 'created_at': user.created_at.isoformat() if user.created_at else None, 'last_seen': user.last_seen.isoformat() if user.last_seen else None }, 'isNewUser': wechat_sess['status'] == 'register_ready' # 鏍囪鏄惁涓烘柊鐢ㄦ埛 } # 濡傛灉闇€瑕乼oken璁よ瘉锛屽彲浠ュ湪杩欓噷鐢熸垚 # response_data['token'] = generate_token(user.id) return jsonify(response_data), 200 except Exception as e: print(f"鉂?寰俊鐧诲綍閿欒: {e}") import traceback app.logger.error(f"鍥炶皟澶勭悊閿欒: {e}", exc_info=True) return jsonify({ 'success': False, 'error': '鐧诲綍澶辫触锛岃閲嶈瘯' }), 500 @app.route('/api/account/wechat/unbind', methods=['POST']) def unbind_wechat_account(): """瑙g粦褰撳墠鐧诲綍鐢ㄦ埛鐨勫井淇?"" if not session.get('logged_in'): return jsonify({'error': '鏈櫥褰?}), 401 try: user = User.query.get(session.get('user_id')) if not user: return jsonify({'error': '鐢ㄦ埛涓嶅瓨鍦?}), 404 user.unbind_wechat() return jsonify({'message': '瑙g粦鎴愬姛', 'success': True}), 200 except Exception as e: print(f"Unbind wechat error: {e}") db.session.rollback() return jsonify({'error': '瑙g粦澶辫触锛岃閲嶈瘯'}), 500 # ============ H5 璺宠浆灏忕▼搴忕浉鍏?API ============ def get_wechat_access_token_cached(appid, appsecret): """ 鑾峰彇寰俊 access_token锛圧edis 缂撳瓨锛屾敮鎸佸 Worker锛? Args: appid: 寰俊 AppID锛堝叕浼楀彿鎴栧皬绋嬪簭锛? appsecret: 瀵瑰簲鐨?AppSecret Returns: access_token 瀛楃涓诧紝澶辫触杩斿洖 None """ cache_key = f"{WECHAT_ACCESS_TOKEN_PREFIX}{appid}" # 1. 灏濊瘯浠?Redis 鑾峰彇缂撳瓨 try: cached = redis_client.get(cache_key) if cached: data = json.loads(cached) # 鎻愬墠 5 鍒嗛挓鍒锋柊锛岄伩鍏嶄复鐣岄棶棰? if data.get('expires_at', 0) > time.time() + 300: print(f"[access_token] 浣跨敤缂撳瓨: appid={appid[:8]}...") return data['token'] except Exception as e: print(f"[access_token] Redis 璇诲彇澶辫触: {e}") # 2. 璇锋眰鏂?token url = "https://api.weixin.qq.com/cgi-bin/token" params = { 'grant_type': 'client_credential', 'appid': appid, 'secret': appsecret } try: response = requests.get(url, params=params, timeout=10) result = response.json() if 'access_token' in result: token = result['access_token'] expires_in = result.get('expires_in', 7200) # 3. 瀛樺叆 Redis锛圱TL 姣?token 鏈夋晥鏈熺煭 60 绉掞級 cache_data = { 'token': token, 'expires_at': time.time() + expires_in } redis_client.setex( cache_key, expires_in - 60, json.dumps(cache_data) ) print(f"[access_token] 鑾峰彇鎴愬姛: appid={appid[:8]}..., expires_in={expires_in}s") return token else: print(f"[access_token] 鑾峰彇澶辫触: errcode={result.get('errcode')}, errmsg={result.get('errmsg')}") return None except Exception as e: print(f"[access_token] 璇锋眰寮傚父: {e}") return None def get_jsapi_ticket_cached(appid, appsecret): """ 鑾峰彇 jsapi_ticket锛圧edis 缂撳瓨锛? 鐢ㄤ簬 JS-SDK 绛惧悕 """ cache_key = f"{WECHAT_JSAPI_TICKET_PREFIX}{appid}" # 1. 灏濊瘯浠庣紦瀛樿幏鍙? try: cached = redis_client.get(cache_key) if cached: data = json.loads(cached) if data.get('expires_at', 0) > time.time() + 300: print(f"[jsapi_ticket] 浣跨敤缂撳瓨") return data['ticket'] except Exception as e: print(f"[jsapi_ticket] Redis 璇诲彇澶辫触: {e}") # 2. 鑾峰彇 access_token access_token = get_wechat_access_token_cached(appid, appsecret) if not access_token: return None # 3. 璇锋眰 jsapi_ticket url = "https://api.weixin.qq.com/cgi-bin/ticket/getticket" params = { 'access_token': access_token, 'type': 'jsapi' } try: response = requests.get(url, params=params, timeout=10) result = response.json() if result.get('errcode') == 0: ticket = result['ticket'] expires_in = result.get('expires_in', 7200) # 瀛樺叆 Redis cache_data = { 'ticket': ticket, 'expires_at': time.time() + expires_in } redis_client.setex( cache_key, expires_in - 60, json.dumps(cache_data) ) print(f"[jsapi_ticket] 鑾峰彇鎴愬姛, expires_in={expires_in}s") return ticket else: print(f"[jsapi_ticket] 鑾峰彇澶辫触: errcode={result.get('errcode')}, errmsg={result.get('errmsg')}") return None except Exception as e: print(f"[jsapi_ticket] 璇锋眰寮傚父: {e}") return None def generate_jssdk_signature(url, appid, appsecret): """ 鐢熸垚 JS-SDK 绛惧悕閰嶇疆 Args: url: 褰撳墠椤甸潰 URL锛堜笉鍚?# 鍙婂叾鍚庣殑閮ㄥ垎锛? appid: 鍏紬鍙?AppID appsecret: 鍏紬鍙?AppSecret Returns: 绛惧悕閰嶇疆瀛楀吀锛屽け璐ヨ繑鍥?None """ import hashlib # 鑾峰彇 jsapi_ticket ticket = get_jsapi_ticket_cached(appid, appsecret) if not ticket: return None # 鐢熸垚绛惧悕鍙傛暟 timestamp = int(time.time()) nonce_str = uuid.uuid4().hex # 绛惧悕瀛楃涓诧紙蹇呴』鎸夊瓧鍏稿簭鎺掑簭锛侊級 sign_str = f"jsapi_ticket={ticket}&noncestr={nonce_str}×tamp={timestamp}&url={url}" # SHA1 绛惧悕 signature = hashlib.sha1(sign_str.encode('utf-8')).hexdigest() return { 'appId': appid, 'timestamp': timestamp, 'nonceStr': nonce_str, 'signature': signature, 'jsApiList': ['updateAppMessageShareData', 'updateTimelineShareData'], 'openTagList': ['wx-open-launch-weapp'] } @app.route('/api/wechat/jssdk-config', methods=['POST']) def api_wechat_jssdk_config(): """鑾峰彇寰俊 JS-SDK 绛惧悕閰嶇疆锛堢敤浜庡紑鏀炬爣绛撅級""" try: print(f"[JS-SDK Config] 鏀跺埌璇锋眰") data = request.get_json() or {} url = data.get('url') print(f"[JS-SDK Config] URL: {url}") if not url: print(f"[JS-SDK Config] 閿欒: 缂哄皯 url 鍙傛暟") return jsonify({ 'code': 400, 'message': '缂哄皯蹇呰鍙傛暟 url', 'data': None }), 400 # URL 鏍¢獙锛氬繀椤绘槸鍏佽鐨勫煙鍚? from urllib.parse import urlparse parsed = urlparse(url) # 鎵╁睍鍏佽鐨勫煙鍚嶅垪琛紝鍖呮嫭 API 鍩熷悕 allowed_domains = ['valuefrontier.cn', 'www.valuefrontier.cn', 'api.valuefrontier.cn', 'localhost', '127.0.0.1'] domain = parsed.netloc.split(':')[0] print(f"[JS-SDK Config] 瑙f瀽鍩熷悕: {domain}") if domain not in allowed_domains: return jsonify({ 'code': 400, 'message': 'URL 鍩熷悕涓嶅湪鍏佽鑼冨洿鍐?, 'data': None }), 400 # URL 澶勭悊锛氱Щ闄?hash 閮ㄥ垎 if '#' in url: url = url.split('#')[0] # 鐢熸垚绛惧悕锛堜娇鐢ㄥ叕浼楀彿閰嶇疆锛? print(f"[JS-SDK Config] 寮€濮嬬敓鎴愮鍚?..") config = generate_jssdk_signature( url=url, appid=WECHAT_MP_APPID, appsecret=WECHAT_MP_APPSECRET ) print(f"[JS-SDK Config] 绛惧悕鐢熸垚瀹屾垚: {config is not None}") if not config: return jsonify({ 'code': 500, 'message': '鑾峰彇绛惧悕閰嶇疆澶辫触锛岃绋嶅悗閲嶈瘯', 'data': None }), 500 return jsonify({ 'code': 200, 'message': 'success', 'data': config }) except Exception as e: print(f"[JS-SDK Config] 寮傚父: {e}") import traceback traceback.print_exc() return jsonify({ 'code': 500, 'message': '鏈嶅姟鍣ㄥ唴閮ㄩ敊璇?, 'data': None }), 500 @app.route('/api/miniprogram/url-scheme', methods=['POST']) def api_miniprogram_url_scheme(): """鐢熸垚灏忕▼搴?URL Scheme锛堝閮ㄦ祻瑙堝櫒璺宠浆灏忕▼搴忕敤锛?"" try: # 棰戠巼闄愬埗 client_ip = request.headers.get('X-Forwarded-For', request.remote_addr) if client_ip: client_ip = client_ip.split(',')[0].strip() rate_key = f"rate_limit:urlscheme:{client_ip}" current = redis_client.incr(rate_key) if current == 1: redis_client.expire(rate_key, 60) if current > 30: # 姣忓垎閽熸渶澶?30 娆? return jsonify({ 'code': 429, 'message': '璇锋眰杩囦簬棰戠箒锛岃绋嶅悗鍐嶈瘯', 'data': None }), 429 data = request.get_json() or {} # 鍙傛暟鏍¢獙 path = data.get('path') if path and not path.startswith('/'): path = '/' + path # 鑷姩琛ュ叏 / # 鑾峰彇灏忕▼搴?access_token access_token = get_wechat_access_token_cached( WECHAT_MINIPROGRAM_APPID, WECHAT_MINIPROGRAM_APPSECRET ) if not access_token: return jsonify({ 'code': 500, 'message': '鑾峰彇璁块棶浠ょ墝澶辫触', 'data': None }), 500 # 鏋勫缓璇锋眰鍙傛暟 wx_url = f"https://api.weixin.qq.com/wxa/generatescheme?access_token={access_token}" expire_type = data.get('expire_type', 1) expire_interval = min(data.get('expire_interval', 30), 30) # 鏈€闀?0澶? payload = { "is_expire": expire_type == 1 } # 璺宠浆淇℃伅 if path or data.get('query'): payload["jump_wxa"] = {} if path: payload["jump_wxa"]["path"] = path if data.get('query'): payload["jump_wxa"]["query"] = data.get('query') # 鏈夋晥鏈熻缃? if expire_type == 1: if data.get('expire_time'): payload["expire_time"] = data.get('expire_time') else: payload["expire_interval"] = expire_interval response = requests.post(wx_url, json=payload, timeout=10) result = response.json() if result.get('errcode') == 0: return jsonify({ 'code': 200, 'message': 'success', 'data': { 'openlink': result['openlink'], 'expire_time': data.get('expire_time') or (int(time.time()) + expire_interval * 86400), 'created_at': datetime.utcnow().isoformat() + 'Z' } }) else: print(f"[URL Scheme] 鐢熸垚澶辫触: errcode={result.get('errcode')}, errmsg={result.get('errmsg')}") return jsonify({ 'code': 500, 'message': f"鐢熸垚 URL Scheme 澶辫触: {result.get('errmsg', '鏈煡閿欒')}", 'data': None }), 500 except Exception as e: print(f"[URL Scheme] 寮傚父: {e}") import traceback traceback.print_exc() return jsonify({ 'code': 500, 'message': '鏈嶅姟鍣ㄥ唴閮ㄩ敊璇?, 'data': None }), 500 # 璇勮妯″瀷 class EventComment(db.Model): """浜嬩欢璇勮""" __tablename__ = 'event_comment' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) author = db.Column(db.String(50), default='鍖垮悕鐢ㄦ埛') content = db.Column(db.Text, nullable=False) parent_id = db.Column(db.Integer, db.ForeignKey('event_comment.id')) likes = db.Column(db.Integer, default=0) created_at = db.Column(db.DateTime, default=beijing_now) status = db.Column(db.String(20), default='active') user = db.relationship('User', backref='event_comments') replies = db.relationship('EventComment', backref=db.backref('parent', remote_side=[id])) def to_dict(self, user_session_id=None, current_user_id=None): # 妫€鏌ュ綋鍓嶇敤鎴锋槸鍚﹀凡鐐硅禐 user_liked = False if user_session_id: like_record = CommentLike.query.filter_by( comment_id=self.id, session_id=user_session_id ).first() user_liked = like_record is not None # 妫€鏌ュ綋鍓嶇敤鎴锋槸鍚﹀彲浠ュ垹闄ゆ璇勮 can_delete = current_user_id is not None and self.user_id == current_user_id return { 'id': self.id, 'event_id': self.event_id, 'author': self.author, 'content': self.content, 'parent_id': self.parent_id, 'likes': self.likes, 'created_at': self.created_at.isoformat() if self.created_at else None, 'user_liked': user_liked, 'can_delete': can_delete, 'user_id': self.user_id, 'replies': [reply.to_dict(user_session_id, current_user_id) for reply in self.replies if reply.status == 'active'] } class CommentLike(db.Model): """璇勮鐐硅禐璁板綍""" __tablename__ = 'comment_like' id = db.Column(db.Integer, primary_key=True) comment_id = db.Column(db.Integer, db.ForeignKey('event_comment.id'), nullable=False) session_id = db.Column(db.String(100), nullable=False) created_at = db.Column(db.DateTime, default=beijing_now) __table_args__ = (db.UniqueConstraint('comment_id', 'session_id'),) @app.after_request def after_request(response): """澶勭悊鎵€鏈夊搷搴旓紝娣诲姞CORS澶撮儴鍜屽畨鍏ㄥご閮?"" origin = request.headers.get('Origin') allowed_origins = ['http://localhost:3000', 'http://127.0.0.1:3000', 'http://localhost:5173', 'https://valuefrontier.cn', 'http://valuefrontier.cn', 'https://www.valuefrontier.cn', 'http://www.valuefrontier.cn'] if origin in allowed_origins: response.headers['Access-Control-Allow-Origin'] = origin response.headers['Access-Control-Allow-Credentials'] = 'true' response.headers['Access-Control-Allow-Headers'] = 'Content-Type,Authorization,X-Requested-With,Cache-Control' response.headers['Access-Control-Allow-Methods'] = 'GET,PUT,POST,DELETE,OPTIONS' response.headers['Access-Control-Expose-Headers'] = 'Content-Type,Authorization' # 澶勭悊棰勬璇锋眰 if request.method == 'OPTIONS': response.status_code = 200 return response def add_cors_headers(response): """娣诲姞CORS澶达紙淇濈暀鍘熸湁鍑芥暟浠ュ吋瀹癸級""" origin = request.headers.get('Origin') allowed_origins = ['http://localhost:3000', 'http://127.0.0.1:3000', 'http://localhost:5173', 'https://valuefrontier.cn', 'http://valuefrontier.cn', 'https://www.valuefrontier.cn', 'http://www.valuefrontier.cn'] if origin in allowed_origins: response.headers['Access-Control-Allow-Origin'] = origin else: response.headers['Access-Control-Allow-Origin'] = 'http://localhost:3000' response.headers['Access-Control-Allow-Headers'] = 'Content-Type,Authorization,X-Requested-With,Cache-Control' response.headers['Access-Control-Allow-Methods'] = 'GET,PUT,POST,DELETE,OPTIONS' response.headers['Access-Control-Allow-Credentials'] = 'true' return response class EventFollow(db.Model): """浜嬩欢鍏虫敞""" id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False) created_at = db.Column(db.DateTime, default=beijing_now) user = db.relationship('User', backref='event_follows') __table_args__ = (db.UniqueConstraint('user_id', 'event_id'),) class FutureEventFollow(db.Model): """鏈潵浜嬩欢鍏虫敞""" __tablename__ = 'future_event_follow' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) future_event_id = db.Column(db.Integer, nullable=False) # future_events琛ㄧ殑id created_at = db.Column(db.DateTime, default=beijing_now) user = db.relationship('User', backref='future_event_follows') __table_args__ = (db.UniqueConstraint('user_id', 'future_event_id'),) # 鈥斺€?鑷€夎偂杈撳叆缁熶竴鍖栦笌鍚嶇О琛ュ叏宸ュ叿 鈥斺€? def _normalize_stock_input(raw_input: str): """瑙f瀽鐢ㄦ埛杈撳叆涓烘爣鍑?浣嶈偂绁ㄤ唬鐮佷笌鍙€夊悕绉般€? 鏀寔锛? - 6浣嶄唬鐮? "600519"锛屾垨甯﹀悗缂€ "600519.SH"/"600519.SZ" - 鍚嶇О(浠g爜): "璐靛窞鑼呭彴(600519)" 鎴?"璐靛窞鑼呭彴锛?00519锛? 杩斿洖 (code6, name_or_none) """ if not raw_input: return None, None s = str(raw_input).strip() # 鍚嶇О(600519) 鎴?鍚嶇О锛?00519锛? m = re.match(r"^(.+?)[\(锛圿\s*(\d{6})\s*[\)锛塢\s*$", s) if m: name = m.group(1).strip() code = m.group(2) return code, (name if name else None) # 600519 鎴?600519.SH / 600519.SZ m2 = re.match(r"^(\d{6})(?:\.(?:SH|SZ))?$", s, re.IGNORECASE) if m2: return m2.group(1), None # SH600519 / SZ000001 m3 = re.match(r"^(SH|SZ)(\d{6})$", s, re.IGNORECASE) if m3: return m3.group(2), None return None, None def _query_stock_name_by_code(code6: str): """鏍规嵁6浣嶄唬鐮佹煡璇㈣偂绁ㄥ悕绉帮紝鏌ヤ笉鍒拌繑鍥濶one銆?"" try: with engine.connect() as conn: q = text(""" SELECT SECNAME FROM ea_baseinfo WHERE SECCODE = :c LIMIT 1 """) row = conn.execute(q, {'c': code6}).fetchone() if row: return row[0] except Exception: pass return None class Watchlist(db.Model): """鐢ㄦ埛鑷€夎偂""" __tablename__ = 'watchlist' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) stock_code = db.Column(db.String(20), nullable=False) stock_name = db.Column(db.String(100), nullable=True) created_at = db.Column(db.DateTime, default=beijing_now) user = db.relationship('User', backref='watchlist') __table_args__ = (db.UniqueConstraint('user_id', 'stock_code'),) @app.route('/api/account/watchlist', methods=['GET']) def get_my_watchlist(): """鑾峰彇褰撳墠鐢ㄦ埛鐨勮嚜閫夎偂鍒楄〃""" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 items = Watchlist.query.filter_by(user_id=session['user_id']).order_by(Watchlist.created_at.desc()).all() # 鎳掓洿鏂帮細缁熶竴浠g爜涓?浣嶃€佽ˉ鍏ㄧ己澶辩殑鍚嶇О锛屽苟鍘婚噸锛堝悓涓€浠g爜淇濈暀涓€涓褰曪級 from collections import defaultdict groups = defaultdict(list) for i in items: code6, _ = _normalize_stock_input(i.stock_code) normalized_code = code6 or (i.stock_code.strip().upper() if isinstance(i.stock_code, str) else i.stock_code) groups[normalized_code].append(i) dirty = False to_delete = [] for code6, group in groups.items(): # 閫夋嫨淇濈暀璁板綍锛氫紭鍏堟湁鍚嶇О鐨勶紝鍏舵鍒涘缓鏃堕棿鏃╃殑 def sort_key(x): return (x.stock_name is None, x.created_at or datetime.min) group_sorted = sorted(group, key=sort_key) keep = group_sorted[0] # 瑙勮寖淇濈暀椤? if keep.stock_code != code6: keep.stock_code = code6 dirty = True if not keep.stock_name and code6: nm = _query_stock_name_by_code(code6) if nm: keep.stock_name = nm dirty = True # 鍏朵綑鍒犻櫎 for g in group_sorted[1:]: to_delete.append(g) if to_delete: for g in to_delete: db.session.delete(g) dirty = True if dirty: db.session.commit() return jsonify({'success': True, 'data': [ { 'id': i.id, 'stock_code': i.stock_code, 'stock_name': i.stock_name, 'created_at': i.created_at.isoformat() if i.created_at else None } for i in items ]}) except Exception as e: print(f"Error in get_my_watchlist: {str(e)}") return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/account/watchlist', methods=['POST']) def add_to_watchlist(): """娣诲姞鍒拌嚜閫夎偂""" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() or {} raw_code = data.get('stock_code') raw_name = data.get('stock_name') code6, name_from_input = _normalize_stock_input(raw_code) if not code6: return jsonify({'success': False, 'error': '鏃犳晥鐨勮偂绁ㄦ爣璇?}), 400 # 浼樺厛浣跨敤浼犲叆鍚嶇О锛屽叾娆′粠杈撳叆瑙f瀽涓幏寰楋紝鏈€鍚庢煡搴撹ˉ鍏? final_name = raw_name or name_from_input or _query_stock_name_by_code(code6) # 鏌ユ壘宸插瓨鍦ㄨ褰曪紝鍏煎鍘嗗彶锛?浣?甯﹀悗缂€ candidates = [code6, f"{code6}.SH", f"{code6}.SZ"] existing = Watchlist.query.filter( Watchlist.user_id == session['user_id'], Watchlist.stock_code.in_(candidates) ).first() if existing: # 缁熶竴涓?浣嶏紝琛ュ叏鍚嶇О updated = False if existing.stock_code != code6: existing.stock_code = code6 updated = True if (not existing.stock_name) and final_name: existing.stock_name = final_name updated = True if updated: db.session.commit() return jsonify({'success': True, 'data': {'id': existing.id}}) item = Watchlist(user_id=session['user_id'], stock_code=code6, stock_name=final_name) db.session.add(item) db.session.commit() return jsonify({'success': True, 'data': {'id': item.id}}) # 娉ㄦ剰锛?realtime 璺敱蹇呴』鍦?/ 涔嬪墠瀹氫箟锛屽惁鍒欎細琚敊璇尮閰? @app.route('/api/account/watchlist/realtime', methods=['GET']) def get_watchlist_realtime(): """鑾峰彇鑷€夎偂瀹炴椂琛屾儏鏁版嵁锛堝熀浜庡垎閽熺嚎锛? 浼樺寲涓烘壒閲忔煡璇?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 # 鑾峰彇鐢ㄦ埛鑷€夎偂鍒楄〃 watchlist = Watchlist.query.filter_by(user_id=session['user_id']).all() if not watchlist: return jsonify({'success': True, 'data': []}) # 鑾峰彇鑲$エ浠g爜鍒楄〃骞舵爣鍑嗗寲 code_mapping = {} # code6 -> full_code 鏄犲皠 full_codes = [] for item in watchlist: code6, _ = _normalize_stock_input(item.stock_code) normalized = code6 or str(item.stock_code).strip().upper() # 杞崲涓哄甫鍚庣紑鐨勫畬鏁翠唬鐮? if '.' in normalized: full_code = normalized elif normalized.startswith('6'): full_code = f"{normalized}.SH" elif normalized.startswith(('8', '9', '4')): full_code = f"{normalized}.BJ" else: full_code = f"{normalized}.SZ" code_mapping[normalized] = full_code full_codes.append(full_code) if not full_codes: return jsonify({'success': True, 'data': []}) # 浣跨敤鎵归噺鏌ヨ鑾峰彇鏈€鏂拌鎯咃紙鍗曟鏌ヨ锛? client = get_clickhouse_client() today = datetime.now().date() start_date = today - timedelta(days=7) # 鎵归噺鏌ヨ锛氳幏鍙栨瘡鍙偂绁ㄧ殑鏈€鏂颁竴鏉″垎閽熸暟鎹? batch_query = """ WITH latest AS ( SELECT code, close, timestamp, high, low, volume, amt, ROW_NUMBER() OVER (PARTITION BY code ORDER BY timestamp DESC) as rn FROM stock_minute WHERE code IN %(codes)s AND timestamp >= %(start)s ) SELECT code, close, timestamp, high, low, volume, amt FROM latest WHERE rn = 1 """ result = client.execute(batch_query, { 'codes': full_codes, 'start': datetime.combine(start_date, dt_time(9, 30)) }) # 鏋勫缓鏈€鏂颁环鏍兼槧灏? latest_data_map = {} for row in result: code, close, ts, high, low, volume, amt = row latest_data_map[code] = { 'close': float(close), 'timestamp': ts, 'high': float(high), 'low': float(low), 'volume': int(volume), 'amount': float(amt) } # 鎵归噺鏌ヨ鍓嶆敹鐩樹环锛堜娇鐢?ea_trade 琛紝鏇村噯纭級 prev_close_map = {} if latest_data_map: # 鑾峰彇鍓嶄竴浜ゆ槗鏃? prev_trading_day = None for td in reversed(trading_days): if td < today: prev_trading_day = td break if prev_trading_day: base_codes = [code.split('.')[0] for code in full_codes] prev_day_str = prev_trading_day.strftime('%Y%m%d') with engine.connect() as conn: placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} params['trade_date'] = prev_day_str prev_result = conn.execute(text(f""" SELECT SECCODE, F007N as close_price FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE = :trade_date """), params).fetchall() for row in prev_result: base_code, close_price = row[0], row[1] if close_price: prev_close_map[base_code] = float(close_price) # 鏋勫缓鍝嶅簲鏁版嵁 quotes_data = {} for code6, full_code in code_mapping.items(): latest = latest_data_map.get(full_code) if latest: base_code = full_code.split('.')[0] prev_close = prev_close_map.get(base_code, latest['close']) change = latest['close'] - prev_close change_percent = (change / prev_close * 100) if prev_close > 0 else 0.0 quotes_data[code6] = { 'price': latest['close'], 'prev_close': prev_close, 'change': change, 'change_percent': change_percent, 'high': latest['high'], 'low': latest['low'], 'volume': latest['volume'], 'amount': latest['amount'], 'update_time': latest['timestamp'].strftime('%H:%M:%S') } response_data = [] for item in watchlist: code6, _ = _normalize_stock_input(item.stock_code) quote = quotes_data.get(code6 or item.stock_code, {}) response_data.append({ 'stock_code': code6 or item.stock_code, 'stock_name': item.stock_name or (code6 and _query_stock_name_by_code(code6)) or None, 'current_price': quote.get('price', 0), 'prev_close': quote.get('prev_close', 0), 'change': quote.get('change', 0), 'change_percent': quote.get('change_percent', 0), 'high': quote.get('high', 0), 'low': quote.get('low', 0), 'volume': quote.get('volume', 0), 'amount': quote.get('amount', 0), 'update_time': quote.get('update_time', ''), }) return jsonify({ 'success': True, 'data': response_data }) except Exception as e: print(f"鑾峰彇瀹炴椂琛屾儏澶辫触: {str(e)}") import traceback traceback.print_exc() return jsonify({'success': False, 'error': '鑾峰彇瀹炴椂琛屾儏澶辫触'}), 500 @app.route('/api/account/watchlist/', methods=['DELETE']) def remove_from_watchlist(stock_code): """浠庤嚜閫夎偂绉婚櫎""" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 code6, _ = _normalize_stock_input(stock_code) candidates = [] if code6: candidates = [code6, f"{code6}.SH", f"{code6}.SZ"] # 鍖呭惈鍘熷浼犲叆锛堜互鍏煎鍘嗗彶锛? if stock_code not in candidates: candidates.append(stock_code) item = Watchlist.query.filter( Watchlist.user_id == session['user_id'], Watchlist.stock_code.in_(candidates) ).first() if not item: return jsonify({'success': False, 'error': '鏈壘鍒拌嚜閫夐」'}), 404 db.session.delete(item) db.session.commit() return jsonify({'success': True}) # 鎶曡祫璁″垝鍜屽鐩樼浉鍏崇殑妯″瀷 class InvestmentPlan(db.Model): __tablename__ = 'investment_plans' id = db.Column(db.Integer, primary_key=True, autoincrement=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) date = db.Column(db.Date, nullable=False) title = db.Column(db.String(200), nullable=False) content = db.Column(db.Text) type = db.Column(db.String(20)) # 'plan' or 'review' stocks = db.Column(db.Text) # JSON array of stock codes tags = db.Column(db.String(500)) # JSON array of tags status = db.Column(db.String(20), default='active') # active, completed, cancelled created_at = db.Column(db.DateTime, default=datetime.utcnow) updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) def to_dict(self): return { 'id': self.id, 'date': self.date.isoformat() if self.date else None, 'title': self.title, 'content': self.content, 'type': self.type, 'stocks': json.loads(self.stocks) if self.stocks else [], 'tags': json.loads(self.tags) if self.tags else [], 'status': self.status, 'created_at': self.created_at.isoformat() if self.created_at else None, 'updated_at': self.updated_at.isoformat() if self.updated_at else None } @app.route('/api/account/investment-plans', methods=['GET']) def get_investment_plans(): """鑾峰彇鎶曡祫璁″垝鍜屽鐩樿褰?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 plan_type = request.args.get('type') # 'plan', 'review', or None for all start_date = request.args.get('start_date') end_date = request.args.get('end_date') query = InvestmentPlan.query.filter_by(user_id=session['user_id']) if plan_type: query = query.filter_by(type=plan_type) if start_date: query = query.filter(InvestmentPlan.date >= datetime.fromisoformat(start_date).date()) if end_date: query = query.filter(InvestmentPlan.date <= datetime.fromisoformat(end_date).date()) plans = query.order_by(InvestmentPlan.date.desc()).all() return jsonify({ 'success': True, 'data': [plan.to_dict() for plan in plans] }) except Exception as e: print(f"鑾峰彇鎶曡祫璁″垝澶辫触: {str(e)}") return jsonify({'success': False, 'error': '鑾峰彇鏁版嵁澶辫触'}), 500 @app.route('/api/account/investment-plans', methods=['POST']) def create_investment_plan(): """鍒涘缓鎶曡祫璁″垝鎴栧鐩樿褰?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 data = request.get_json() # 楠岃瘉蹇呰瀛楁 if not data.get('date') or not data.get('title') or not data.get('type'): return jsonify({'success': False, 'error': '缂哄皯蹇呰瀛楁'}), 400 plan = InvestmentPlan( user_id=session['user_id'], date=datetime.fromisoformat(data['date']).date(), title=data['title'], content=data.get('content', ''), type=data['type'], stocks=json.dumps(data.get('stocks', [])), tags=json.dumps(data.get('tags', [])), status=data.get('status', 'active') ) db.session.add(plan) db.session.commit() return jsonify({ 'success': True, 'data': plan.to_dict() }) except Exception as e: db.session.rollback() print(f"鍒涘缓鎶曡祫璁″垝澶辫触: {str(e)}") return jsonify({'success': False, 'error': '鍒涘缓澶辫触'}), 500 @app.route('/api/account/investment-plans/', methods=['PUT']) def update_investment_plan(plan_id): """鏇存柊鎶曡祫璁″垝鎴栧鐩樿褰?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 plan = InvestmentPlan.query.filter_by(id=plan_id, user_id=session['user_id']).first() if not plan: return jsonify({'success': False, 'error': '鏈壘鍒拌璁板綍'}), 404 data = request.get_json() if 'date' in data: plan.date = datetime.fromisoformat(data['date']).date() if 'title' in data: plan.title = data['title'] if 'content' in data: plan.content = data['content'] if 'stocks' in data: plan.stocks = json.dumps(data['stocks']) if 'tags' in data: plan.tags = json.dumps(data['tags']) if 'status' in data: plan.status = data['status'] plan.updated_at = datetime.utcnow() db.session.commit() return jsonify({ 'success': True, 'data': plan.to_dict() }) except Exception as e: db.session.rollback() print(f"鏇存柊鎶曡祫璁″垝澶辫触: {str(e)}") return jsonify({'success': False, 'error': '鏇存柊澶辫触'}), 500 @app.route('/api/account/investment-plans/', methods=['DELETE']) def delete_investment_plan(plan_id): """鍒犻櫎鎶曡祫璁″垝鎴栧鐩樿褰?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 plan = InvestmentPlan.query.filter_by(id=plan_id, user_id=session['user_id']).first() if not plan: return jsonify({'success': False, 'error': '鏈壘鍒拌璁板綍'}), 404 db.session.delete(plan) db.session.commit() return jsonify({'success': True}) except Exception as e: db.session.rollback() print(f"鍒犻櫎鎶曡祫璁″垝澶辫触: {str(e)}") return jsonify({'success': False, 'error': '鍒犻櫎澶辫触'}), 500 @app.route('/api/account/events/following', methods=['GET']) def get_my_following_events(): """鑾峰彇鎴戝叧娉ㄧ殑浜嬩欢鍒楄〃""" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 follows = EventFollow.query.filter_by(user_id=session['user_id']).order_by(EventFollow.created_at.desc()).all() event_ids = [f.event_id for f in follows] if not event_ids: return jsonify({'success': True, 'data': []}) events = Event.query.filter(Event.id.in_(event_ids)).all() data = [] for ev in events: data.append({ 'id': ev.id, 'title': ev.title, 'event_type': ev.event_type, 'start_time': ev.start_time.isoformat() if ev.start_time else None, 'view_count': ev.view_count or 0, 'related_avg_chg': ev.related_avg_chg, 'follower_count': ev.follower_count, }) return jsonify({'success': True, 'data': data}) @app.route('/api/account/events/comments', methods=['GET']) def get_my_event_comments(): """鑾峰彇鎴戝湪浜嬩欢涓婄殑璇勮锛圗ventComment锛?"" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 comments = EventComment.query.filter_by(user_id=session['user_id']).order_by(EventComment.created_at.desc()).limit( 100).all() return jsonify({'success': True, 'data': [c.to_dict() for c in comments]}) @app.route('/api/account/events/posts', methods=['GET']) def get_my_event_posts(): """鑾峰彇鎴戝湪浜嬩欢涓婄殑甯栧瓙锛圥ost锛? 鐢ㄤ簬涓汉涓績鏄剧ず""" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 try: # 鏌ヨ褰撳墠鐢ㄦ埛鐨勬墍鏈?Post锛堟寜鍒涘缓鏃堕棿鍊掑簭锛? posts = Post.query.filter_by( user_id=session['user_id'], status='active' ).order_by(Post.created_at.desc()).limit(100).all() posts_data = [] for post in posts: # 鑾峰彇鍏宠仈鐨勪簨浠朵俊鎭? event = Event.query.get(post.event_id) event_title = event.title if event else '鏈煡浜嬩欢' # 鑾峰彇鐢ㄦ埛淇℃伅 user = User.query.get(post.user_id) author = user.username if user else '鍖垮悕鐢ㄦ埛' # 鈿?杩斿洖鏍煎紡鍏煎鏃?EventComment.to_dict() posts_data.append({ 'id': post.id, 'event_id': post.event_id, 'event_title': event_title, # 鈿?鏂板瀛楁锛堟棫 API 娌℃湁锛? 'user_id': post.user_id, 'author': author, # 鈿?鍏煎鏃ф牸寮忥紙瀛楃涓茬被鍨嬶級 'content': post.content, 'title': post.title, # Post 鐙湁瀛楁锛堝彲閫夛級 'content_type': post.content_type, # Post 鐙湁瀛楁 'likes': post.likes_count, # 鈿?鍏煎鏃у瓧娈靛悕 'created_at': post.created_at.isoformat(), 'updated_at': post.updated_at.isoformat(), 'status': post.status, }) return jsonify({'success': True, 'data': posts_data}) except Exception as e: print(f"鑾峰彇鐢ㄦ埛甯栧瓙澶辫触: {e}") return jsonify({'success': False, 'error': '鑾峰彇甯栧瓙澶辫触'}), 500 @app.route('/api/account/future-events/following', methods=['GET']) def get_my_following_future_events(): """鑾峰彇褰撳墠鐢ㄦ埛鍏虫敞鐨勬湭鏉ヤ簨浠?"" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 try: # 鑾峰彇鐢ㄦ埛鍏虫敞鐨勬湭鏉ヤ簨浠禝D鍒楄〃 follows = FutureEventFollow.query.filter_by(user_id=session['user_id']).all() future_event_ids = [f.future_event_id for f in follows] if not future_event_ids: return jsonify({'success': True, 'data': []}) # 鏌ヨ鏈潵浜嬩欢璇︽儏 sql = """ SELECT * FROM future_events WHERE data_id IN :event_ids ORDER BY calendar_time \ """ result = db.session.execute( text(sql), {'event_ids': tuple(future_event_ids)} ) events = [] # 鎵€鏈夎繑鍥炵殑浜嬩欢閮芥槸宸插叧娉ㄧ殑 following_ids = set(future_event_ids) for row in result: event_data = process_future_event_row(row, following_ids) events.append(event_data) return jsonify({'success': True, 'data': events}) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 class PostLike(db.Model): """甯栧瓙鐐硅禐""" id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) post_id = db.Column(db.Integer, db.ForeignKey('post.id'), nullable=False) created_at = db.Column(db.DateTime, default=beijing_now) user = db.relationship('User', backref='post_likes') __table_args__ = (db.UniqueConstraint('user_id', 'post_id'),) # =========================== # 棰勬祴甯傚満绯荤粺妯″瀷 # =========================== class UserCreditAccount(db.Model): """鐢ㄦ埛绉垎璐︽埛""" __tablename__ = 'user_credit_account' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False, unique=True) # 绉垎浣欓 balance = db.Column(db.Float, default=10000.0, nullable=False) # 鍒濆10000绉垎 frozen_balance = db.Column(db.Float, default=0.0, nullable=False) # 鍐荤粨绉垎 total_earned = db.Column(db.Float, default=0.0, nullable=False) # 绱鑾峰緱 total_spent = db.Column(db.Float, default=0.0, nullable=False) # 绱娑堣垂 # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) last_daily_bonus_at = db.Column(db.DateTime) # 鏈€鍚庝竴娆¢鍙栨瘡鏃ュ鍔辨椂闂? # 鍏崇郴 user = db.relationship('User', backref=db.backref('credit_account', uselist=False)) def __repr__(self): return f'' class PredictionTopic(db.Model): """棰勬祴璇濋""" __tablename__ = 'prediction_topic' id = db.Column(db.Integer, primary_key=True) creator_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 鍩烘湰淇℃伅 title = db.Column(db.String(200), nullable=False) description = db.Column(db.Text) category = db.Column(db.String(50), default='stock') # stock/event/market # 甯傚満鏁版嵁 yes_total_shares = db.Column(db.Integer, default=0, nullable=False) # YES鏂规€讳唤棰? no_total_shares = db.Column(db.Integer, default=0, nullable=False) # NO鏂规€讳唤棰? yes_price = db.Column(db.Float, default=500.0, nullable=False) # YES鏂逛环鏍硷紙0-1000锛? no_price = db.Column(db.Float, default=500.0, nullable=False) # NO鏂逛环鏍硷紙0-1000锛? # 濂栨睜 total_pool = db.Column(db.Float, default=0.0, nullable=False) # 鎬诲姹狅紙2%浜ゆ槗绋庣疮绉級 # 棰嗕富淇℃伅 yes_lord_id = db.Column(db.Integer, db.ForeignKey('user.id')) # YES鏂归涓? no_lord_id = db.Column(db.Integer, db.ForeignKey('user.id')) # NO鏂归涓? # 鐘舵€? status = db.Column(db.String(20), default='active', nullable=False) # active/settled/cancelled result = db.Column(db.String(10)) # yes/no/draw锛堢粨绠楃粨鏋滐級 # 鏃堕棿 deadline = db.Column(db.DateTime, nullable=False) # 鎴鏃堕棿 settled_at = db.Column(db.DateTime) # 缁撶畻鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 缁熻 views_count = db.Column(db.Integer, default=0) comments_count = db.Column(db.Integer, default=0) participants_count = db.Column(db.Integer, default=0) # 鍏崇郴 creator = db.relationship('User', foreign_keys=[creator_id], backref='created_topics') yes_lord = db.relationship('User', foreign_keys=[yes_lord_id], backref='yes_lord_topics') no_lord = db.relationship('User', foreign_keys=[no_lord_id], backref='no_lord_topics') positions = db.relationship('PredictionPosition', backref='topic', lazy='dynamic') transactions = db.relationship('PredictionTransaction', backref='topic', lazy='dynamic') comments = db.relationship('TopicComment', backref='topic', lazy='dynamic') def __repr__(self): return f'' class PredictionPosition(db.Model): """鐢ㄦ埛鎸佷粨""" __tablename__ = 'prediction_position' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) topic_id = db.Column(db.Integer, db.ForeignKey('prediction_topic.id'), nullable=False) # 鎸佷粨淇℃伅 direction = db.Column(db.String(3), nullable=False) # yes/no shares = db.Column(db.Integer, default=0, nullable=False) # 鎸佹湁浠介 avg_cost = db.Column(db.Float, default=0.0, nullable=False) # 骞冲潎鎴愭湰 total_invested = db.Column(db.Float, default=0.0, nullable=False) # 鎬绘姇鍏? # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 鍏崇郴 user = db.relationship('User', backref='prediction_positions') # 鍞竴绾︽潫锛氭瘡涓敤鎴峰湪姣忎釜璇濋鐨勬瘡涓柟鍚戝彧鑳芥湁涓€涓寔浠? __table_args__ = (db.UniqueConstraint('user_id', 'topic_id', 'direction'),) def __repr__(self): return f'' class PredictionTransaction(db.Model): """棰勬祴浜ゆ槗璁板綍""" __tablename__ = 'prediction_transaction' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) topic_id = db.Column(db.Integer, db.ForeignKey('prediction_topic.id'), nullable=False) # 浜ゆ槗淇℃伅 trade_type = db.Column(db.String(10), nullable=False) # buy/sell direction = db.Column(db.String(3), nullable=False) # yes/no shares = db.Column(db.Integer, nullable=False) # 浠介鏁伴噺 price = db.Column(db.Float, nullable=False) # 鎴愪氦浠锋牸 # 璐圭敤 amount = db.Column(db.Float, nullable=False) # 浜ゆ槗閲戦 tax = db.Column(db.Float, default=0.0, nullable=False) # 鎵嬬画璐癸紙2%锛? total_cost = db.Column(db.Float, nullable=False) # 鎬绘垚鏈紙amount + tax锛? # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) # 鍏崇郴 user = db.relationship('User', backref='prediction_transactions') def __repr__(self): return f'' class CreditTransaction(db.Model): """绉垎浜ゆ槗璁板綍""" __tablename__ = 'credit_transaction' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 浜ゆ槗淇℃伅 transaction_type = db.Column(db.String(30), nullable=False) # prediction_buy/prediction_sell/daily_bonus/create_topic/settle_win amount = db.Column(db.Float, nullable=False) # 閲戦锛堟鏁?澧炲姞锛岃礋鏁?鍑忓皯锛? balance_after = db.Column(db.Float, nullable=False) # 浜ゆ槗鍚庝綑棰? # 鍏宠仈 related_topic_id = db.Column(db.Integer, db.ForeignKey('prediction_topic.id')) # 鐩稿叧璇濋 related_transaction_id = db.Column(db.Integer, db.ForeignKey('prediction_transaction.id')) # 鐩稿叧棰勬祴浜ゆ槗 # 鎻忚堪 description = db.Column(db.String(200)) # 浜ゆ槗鎻忚堪 # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) # 鍏崇郴 user = db.relationship('User', backref='credit_transactions') related_topic = db.relationship('PredictionTopic', backref='credit_transactions') def __repr__(self): return f'' class TopicComment(db.Model): """璇濋璇勮""" __tablename__ = 'topic_comment' id = db.Column(db.Integer, primary_key=True) topic_id = db.Column(db.Integer, db.ForeignKey('prediction_topic.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 鍐呭 content = db.Column(db.Text, nullable=False) parent_id = db.Column(db.Integer, db.ForeignKey('topic_comment.id')) # 鐖惰瘎璁篒D锛堝洖澶嶅姛鑳斤級 # 鐘舵€? is_pinned = db.Column(db.Boolean, default=False, nullable=False) # 鏄惁缃《锛堥涓荤壒鏉冿級 status = db.Column(db.String(20), default='active') # active/hidden/deleted # 缁熻 likes_count = db.Column(db.Integer, default=0, nullable=False) # 瑙傜偣IPO 鐩稿叧 total_investment = db.Column(db.Integer, default=0, nullable=False) # 鎬绘姇璧勯 investor_count = db.Column(db.Integer, default=0, nullable=False) # 鎶曡祫浜烘暟 is_verified = db.Column(db.Boolean, default=False, nullable=False) # 鏄惁宸查獙璇? verification_result = db.Column(db.String(20)) # 楠岃瘉缁撴灉锛歝orrect/incorrect/null position_rank = db.Column(db.Integer) # 璇勮浣嶇疆鎺掑悕锛堢敤浜庨鍙戞潈鎷嶅崠锛? # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 鍏崇郴 user = db.relationship('User', backref='topic_comments') replies = db.relationship('TopicComment', backref=db.backref('parent', remote_side=[id]), lazy='dynamic') likes = db.relationship('TopicCommentLike', backref='comment', lazy='dynamic') def __repr__(self): return f'' class TopicCommentLike(db.Model): """璇濋璇勮鐐硅禐""" __tablename__ = 'topic_comment_like' id = db.Column(db.Integer, primary_key=True) comment_id = db.Column(db.Integer, db.ForeignKey('topic_comment.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) # 鍏崇郴 user = db.relationship('User', backref='topic_comment_likes') # 鍞竴绾︽潫 __table_args__ = (db.UniqueConstraint('comment_id', 'user_id'),) def __repr__(self): return f'' class CommentInvestment(db.Model): """璇勮鎶曡祫璁板綍锛堣鐐笽PO锛?"" __tablename__ = 'comment_investment' id = db.Column(db.Integer, primary_key=True) comment_id = db.Column(db.Integer, db.ForeignKey('topic_comment.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 鎶曡祫鏁版嵁 shares = db.Column(db.Integer, nullable=False) # 鎶曡祫浠介 amount = db.Column(db.Integer, nullable=False) # 鎶曡祫閲戦 avg_price = db.Column(db.Float, nullable=False) # 骞冲潎浠锋牸 # 鐘舵€? status = db.Column(db.String(20), default='active', nullable=False) # active/settled # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) # 鍏崇郴 user = db.relationship('User', backref='comment_investments') comment = db.relationship('TopicComment', backref='investments') def __repr__(self): return f'' class CommentPositionBid(db.Model): """璇勮浣嶇疆绔炴媿璁板綍锛堥鍙戞潈鎷嶅崠锛?"" __tablename__ = 'comment_position_bid' id = db.Column(db.Integer, primary_key=True) topic_id = db.Column(db.Integer, db.ForeignKey('prediction_topic.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 绔炴媿鏁版嵁 position = db.Column(db.Integer, nullable=False) # 浣嶇疆锛?/2/3 bid_amount = db.Column(db.Integer, nullable=False) # 鍑轰环閲戦 status = db.Column(db.String(20), default='pending', nullable=False) # pending/won/lost # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) expires_at = db.Column(db.DateTime, nullable=False) # 绔炴媿鎴鏃堕棿 # 鍏崇郴 user = db.relationship('User', backref='comment_position_bids') topic = db.relationship('PredictionTopic', backref='position_bids') def __repr__(self): return f'' class TimeCapsuleTopic(db.Model): """鏃堕棿鑳跺泭璇濋锛堥暱鏈熼娴嬶級""" __tablename__ = 'time_capsule_topic' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 璇濋鍐呭 title = db.Column(db.String(200), nullable=False) description = db.Column(db.Text) encrypted_content = db.Column(db.Text) # 鍔犲瘑鐨勯娴嬪唴瀹? encryption_key = db.Column(db.String(500)) # 鍔犲瘑瀵嗛挜锛堝悗绔瓨鍌級 # 鏃堕棿鑼冨洿 start_year = db.Column(db.Integer, nullable=False) # 璧峰骞翠唤 end_year = db.Column(db.Integer, nullable=False) # 缁撴潫骞翠唤 # 鐘舵€? status = db.Column(db.String(20), default='active', nullable=False) # active/settled is_decrypted = db.Column(db.Boolean, default=False, nullable=False) # 鏄惁宸茶В瀵? actual_happened_year = db.Column(db.Integer) # 瀹為檯鍙戠敓骞翠唤 # 缁熻 total_pool = db.Column(db.Integer, default=0, nullable=False) # 鎬诲姹? # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 鍏崇郴 user = db.relationship('User', backref='time_capsule_topics') time_slots = db.relationship('TimeCapsuleTimeSlot', backref='topic', lazy='dynamic') def __repr__(self): return f'' class TimeCapsuleTimeSlot(db.Model): """鏃堕棿鑳跺泭鏃堕棿娈?"" __tablename__ = 'time_capsule_time_slot' id = db.Column(db.Integer, primary_key=True) topic_id = db.Column(db.Integer, db.ForeignKey('time_capsule_topic.id'), nullable=False) # 鏃堕棿娈? year_start = db.Column(db.Integer, nullable=False) year_end = db.Column(db.Integer, nullable=False) # 绔炴媿鏁版嵁 current_holder_id = db.Column(db.Integer, db.ForeignKey('user.id')) # 褰撳墠鎸佹湁鑰? current_price = db.Column(db.Integer, default=100, nullable=False) # 褰撳墠浠锋牸 total_bids = db.Column(db.Integer, default=0, nullable=False) # 鎬荤珵鎷嶆鏁? # 鐘舵€? status = db.Column(db.String(20), default='active', nullable=False) # active/won/expired # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 鍏崇郴 current_holder = db.relationship('User', foreign_keys=[current_holder_id]) bids = db.relationship('TimeSlotBid', backref='time_slot', lazy='dynamic') def __repr__(self): return f'' class TimeSlotBid(db.Model): """鏃堕棿娈电珵鎷嶈褰?"" __tablename__ = 'time_slot_bid' id = db.Column(db.Integer, primary_key=True) slot_id = db.Column(db.Integer, db.ForeignKey('time_capsule_time_slot.id'), nullable=False) user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) # 绔炴媿鏁版嵁 bid_amount = db.Column(db.Integer, nullable=False) status = db.Column(db.String(20), default='outbid', nullable=False) # outbid/holding/won # 鏃堕棿 created_at = db.Column(db.DateTime, default=beijing_now, nullable=False) # 鍏崇郴 user = db.relationship('User', backref='time_slot_bids') def __repr__(self): return f'' class Event(db.Model): """浜嬩欢妯″瀷""" id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(200), nullable=False) description = db.Column(db.Text) # 浜嬩欢绫诲瀷涓庣姸鎬? event_type = db.Column(db.String(50)) status = db.Column(db.String(20), default='active') # 鏃堕棿鐩稿叧 start_time = db.Column(db.DateTime, default=beijing_now) end_time = db.Column(db.DateTime) created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now) # 鐑害涓庣粺璁? hot_score = db.Column(db.Float, default=0) view_count = db.Column(db.Integer, default=0) trending_score = db.Column(db.Float, default=0) post_count = db.Column(db.Integer, default=0) follower_count = db.Column(db.Integer, default=0) # 鍏宠仈淇℃伅 related_industries = db.Column(db.String(20)) # 鐢充竾琛屼笟浠g爜锛屽 "S640701" keywords = db.Column(db.JSON) files = db.Column(db.JSON) importance = db.Column(db.String(20)) related_avg_chg = db.Column(db.Float, default=0) related_max_chg = db.Column(db.Float, default=0) related_week_chg = db.Column(db.Float, default=0) # 鏂板瀛楁 invest_score = db.Column(db.Integer) # 瓒呴鏈熷緱鍒? expectation_surprise_score = db.Column(db.Integer) # 鍒涘缓鑰呬俊鎭? creator_id = db.Column(db.Integer, db.ForeignKey('user.id')) creator = db.relationship('User', backref='created_events') # 鍏崇郴 posts = db.relationship('Post', backref='event', lazy='dynamic') followers = db.relationship('EventFollow', backref='event', lazy='dynamic') related_stocks = db.relationship('RelatedStock', backref='event', lazy='dynamic') historical_events = db.relationship('HistoricalEvent', backref='event', lazy='dynamic') related_data = db.relationship('RelatedData', backref='event', lazy='dynamic') related_concepts = db.relationship('RelatedConcepts', backref='event', lazy='dynamic') @property def keywords_list(self): """杩斿洖瑙f瀽鍚庣殑鍏抽敭璇嶅垪琛?"" if not self.keywords: return [] if isinstance(self.keywords, list): return self.keywords try: # 濡傛灉鏄瓧绗︿覆锛屽皾璇曡В鏋怞SON if isinstance(self.keywords, str): decoded = json.loads(self.keywords) # 澶勭悊Unicode缂栫爜鐨勬儏鍐? if isinstance(decoded, list): return [ keyword.encode('utf-8').decode('unicode_escape') if isinstance(keyword, str) and '\\u' in keyword else keyword for keyword in decoded ] return [] # 濡傛灉宸茬粡鏄瓧鍏告垨鍏朵粬鏍煎紡锛屽皾璇曡浆鎹负鍒楄〃 return list(self.keywords) except (json.JSONDecodeError, AttributeError, TypeError): return [] def set_keywords(self, keywords): """璁剧疆鍏抽敭璇嶅垪琛?"" if isinstance(keywords, list): self.keywords = json.dumps(keywords, ensure_ascii=False) elif isinstance(keywords, str): try: # 灏濊瘯瑙f瀽JSON瀛楃涓? parsed = json.loads(keywords) if isinstance(parsed, list): self.keywords = json.dumps(parsed, ensure_ascii=False) else: self.keywords = json.dumps([keywords], ensure_ascii=False) except json.JSONDecodeError: # 濡傛灉涓嶆槸鏈夋晥鐨凧SON锛屽皢鍏朵綔涓哄崟涓叧閿瘝 self.keywords = json.dumps([keywords], ensure_ascii=False) class RelatedStock(db.Model): """鐩稿叧鏍囩殑妯″瀷""" id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id')) stock_code = db.Column(db.String(20)) # 鑲$エ浠g爜 stock_name = db.Column(db.String(100)) # 鑲$エ鍚嶇О sector = db.Column(db.String(100)) # 鍏宠仈绫诲瀷 relation_desc = db.Column(db.String(1024)) # 鍏宠仈鍘熷洜鎻忚堪 created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) correlation = db.Column(db.Float()) momentum = db.Column(db.String(1024)) # 鍔ㄩ噺 retrieved_sources = db.Column(db.JSON) # 鍔ㄩ噺 class RelatedData(db.Model): """鍏宠仈鏁版嵁妯″瀷""" id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id')) title = db.Column(db.String(200)) # 鏁版嵁鏍囬 data_type = db.Column(db.String(50)) # 鏁版嵁绫诲瀷 data_content = db.Column(db.JSON) # 鏁版嵁鍐呭(JSON鏍煎紡) description = db.Column(db.Text) # 鏁版嵁鎻忚堪 created_at = db.Column(db.DateTime, default=beijing_now) class RelatedConcepts(db.Model): """浜嬩欢鍏宠仈姒傚康妯″瀷""" __tablename__ = 'related_concepts' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id')) concept = db.Column(db.String(255)) # 姒傚康鍚嶇О reason = db.Column(db.Text) # 鍏宠仈鍘熷洜锛圓I 鍒嗘瀽锛? created_at = db.Column(db.DateTime, default=beijing_now) class EventHotHistory(db.Model): """浜嬩欢鐑害鍘嗗彶璁板綍""" id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id')) score = db.Column(db.Float) # 鎬诲垎 interaction_score = db.Column(db.Float) # 浜掑姩鍒嗘暟 follow_score = db.Column(db.Float) # 鍏虫敞搴﹀垎鏁? view_score = db.Column(db.Float) # 娴忚閲忓垎鏁? recent_activity_score = db.Column(db.Float) # 鏈€杩戞椿璺冨害鍒嗘暟 time_decay = db.Column(db.Float) # 鏃堕棿琛板噺鍥犲瓙 created_at = db.Column(db.DateTime, default=beijing_now) event = db.relationship('Event', backref='hot_history') class EventTransmissionNode(db.Model): """浜嬩欢浼犲鑺傜偣妯″瀷""" __tablename__ = 'event_transmission_nodes' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False) node_type = db.Column(db.Enum('company', 'industry', 'policy', 'technology', 'market', 'event', 'other'), nullable=False) node_name = db.Column(db.String(200), nullable=False) node_description = db.Column(db.Text) importance_score = db.Column(db.Integer, default=50) stock_code = db.Column(db.String(20)) is_main_event = db.Column(db.Boolean, default=False) created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # Relationships event = db.relationship('Event', backref='transmission_nodes') outgoing_edges = db.relationship('EventTransmissionEdge', foreign_keys='EventTransmissionEdge.from_node_id', backref='from_node', cascade='all, delete-orphan') incoming_edges = db.relationship('EventTransmissionEdge', foreign_keys='EventTransmissionEdge.to_node_id', backref='to_node', cascade='all, delete-orphan') __table_args__ = ( db.Index('idx_event_id', 'event_id'), db.Index('idx_node_type', 'node_type'), db.Index('idx_main_event', 'is_main_event'), ) class EventTransmissionEdge(db.Model): """浜嬩欢浼犲杈规ā鍨?"" __tablename__ = 'event_transmission_edges' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False) from_node_id = db.Column(db.Integer, db.ForeignKey('event_transmission_nodes.id'), nullable=False) to_node_id = db.Column(db.Integer, db.ForeignKey('event_transmission_nodes.id'), nullable=False) transmission_type = db.Column(db.Enum('supply_chain', 'competition', 'policy', 'technology', 'capital_flow', 'expectation', 'cyclic_effect', 'other'), nullable=False) transmission_mechanism = db.Column(db.Text) direction = db.Column(db.Enum('positive', 'negative', 'neutral', 'mixed'), default='neutral') strength = db.Column(db.Integer, default=50) impact = db.Column(db.Text) is_circular = db.Column(db.Boolean, default=False) created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # Relationship event = db.relationship('Event', backref='transmission_edges') __table_args__ = ( db.Index('idx_event_id', 'event_id'), db.Index('idx_strength', 'strength'), db.Index('idx_from_to', 'from_node_id', 'to_node_id'), db.Index('idx_circular', 'is_circular'), ) # 鍦?paste-2.txt 鐨勬ā鍨嬪畾涔夐儴鍒嗘坊鍔? class EventSankeyFlow(db.Model): """浜嬩欢妗戝熀娴佹ā鍨?"" __tablename__ = 'event_sankey_flows' id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False) # 娴佺殑鍩烘湰淇℃伅 source_node = db.Column(db.String(200), nullable=False) source_type = db.Column(db.Enum('event', 'policy', 'technology', 'industry', 'company', 'product'), nullable=False) source_level = db.Column(db.Integer, nullable=False, default=0) target_node = db.Column(db.String(200), nullable=False) target_type = db.Column(db.Enum('policy', 'technology', 'industry', 'company', 'product'), nullable=False) target_level = db.Column(db.Integer, nullable=False, default=1) # 娴侀噺淇℃伅 flow_value = db.Column(db.Numeric(10, 2), nullable=False) flow_ratio = db.Column(db.Numeric(5, 4), nullable=False) # 浼犲鏈哄埗 transmission_path = db.Column(db.String(500)) impact_description = db.Column(db.Text) evidence_strength = db.Column(db.Integer, default=50) # 鏃堕棿鎴? created_at = db.Column(db.DateTime, default=beijing_now) updated_at = db.Column(db.DateTime, default=beijing_now, onupdate=beijing_now) # 鍏崇郴 event = db.relationship('Event', backref='sankey_flows') __table_args__ = ( db.Index('idx_event_id', 'event_id'), db.Index('idx_source_target', 'source_node', 'target_node'), db.Index('idx_levels', 'source_level', 'target_level'), db.Index('idx_flow_value', 'flow_value'), ) class HistoricalEvent(db.Model): """鍘嗗彶浜嬩欢妯″瀷""" id = db.Column(db.Integer, primary_key=True) event_id = db.Column(db.Integer, db.ForeignKey('event.id')) title = db.Column(db.String(200)) content = db.Column(db.Text) event_date = db.Column(db.DateTime) relevance = db.Column(db.Integer) # 鐩稿叧鎬? importance = db.Column(db.Integer) # 閲嶈绋嬪害 related_stock = db.Column(db.JSON) # 淇濈暀JSON瀛楁 created_at = db.Column(db.DateTime, default=beijing_now) # 鏂板鍏崇郴 stocks = db.relationship('HistoricalEventStock', backref='historical_event', lazy='dynamic', cascade='all, delete-orphan') class HistoricalEventStock(db.Model): """鍘嗗彶浜嬩欢鐩稿叧鑲$エ妯″瀷""" __tablename__ = 'historical_event_stocks' id = db.Column(db.Integer, primary_key=True) historical_event_id = db.Column(db.Integer, db.ForeignKey('historical_event.id'), nullable=False) stock_code = db.Column(db.String(20), nullable=False) stock_name = db.Column(db.String(50)) relation_desc = db.Column(db.Text) correlation = db.Column(db.Float, default=0.5) sector = db.Column(db.String(100)) created_at = db.Column(db.DateTime, default=beijing_now) __table_args__ = ( db.UniqueConstraint('historical_event_id', 'stock_code', name='unique_event_stock'), ) # === 鑲$エ鐩堝埄棰勬祴锛堣嚜鏈夎〃锛?=== class StockForecastData(db.Model): """鑲$エ鐩堝埄棰勬祴鏁版嵁 婧愪簬鏈湴琛?stock_forecast_data锛岀敱鐙珛绂荤嚎绋嬪簭鍐欏叆銆? 瀛楁涓庤〃缁撴瀯淇濇寔涓€鑷达紝浠呯敤浜庤鍙栬仛鍚堝悗杈撳嚭鍓嶇鎶ヨ〃鎵€闇€鐨勭粨鏋勩€? """ __tablename__ = 'stock_forecast_data' id = db.Column(db.Integer, primary_key=True) stock_code = db.Column(db.String(6), nullable=False) indicator_name = db.Column(db.String(50), nullable=False) year_2022a = db.Column(db.Numeric(15, 2)) year_2023a = db.Column(db.Numeric(15, 2)) year_2024a = db.Column(db.Numeric(15, 2)) year_2025e = db.Column(db.Numeric(15, 2)) year_2026e = db.Column(db.Numeric(15, 2)) year_2027e = db.Column(db.Numeric(15, 2)) process_time = db.Column(db.DateTime, nullable=False) __table_args__ = ( db.UniqueConstraint('stock_code', 'indicator_name', name='unique_stock_indicator'), ) def values_by_year(self): years = ['2022A', '2023A', '2024A', '2025E', '2026E', '2027E'] vals = [self.year_2022a, self.year_2023a, self.year_2024a, self.year_2025e, self.year_2026e, self.year_2027e] def _to_float(x): try: return float(x) if x is not None else None except Exception: return None return years, [_to_float(v) for v in vals] @app.route('/api/events/', methods=['GET']) def get_event_detail(event_id): """鑾峰彇浜嬩欢璇︽儏""" try: event = Event.query.get_or_404(event_id) # 澧炲姞娴忚璁℃暟 event.view_count += 1 db.session.commit() return jsonify({ 'success': True, 'data': { 'id': event.id, 'title': event.title, 'description': event.description, 'event_type': event.event_type, 'status': event.status, 'start_time': event.start_time.isoformat() if event.start_time else None, 'end_time': event.end_time.isoformat() if event.end_time else None, 'created_at': event.created_at.isoformat() if event.created_at else None, 'hot_score': event.hot_score, 'view_count': event.view_count, 'trending_score': event.trending_score, 'post_count': event.post_count, 'follower_count': event.follower_count, 'related_industries': event.related_industries, 'keywords': event.keywords_list, 'importance': event.importance, 'related_avg_chg': event.related_avg_chg, 'related_max_chg': event.related_max_chg, 'related_week_chg': event.related_week_chg, 'invest_score': event.invest_score, 'expectation_surprise_score': event.expectation_surprise_score, 'creator_id': event.creator_id, 'has_chain_analysis': ( EventTransmissionNode.query.filter_by(event_id=event_id).first() is not None or EventSankeyFlow.query.filter_by(event_id=event_id).first() is not None ), 'is_following': False, # 闇€瑕佹牴鎹綋鍓嶇敤鎴风姸鎬佸垽鏂? } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//stocks', methods=['GET']) def get_related_stocks(event_id): """鑾峰彇鐩稿叧鑲$エ鍒楄〃""" try: # 璁㈤槄鎺у埗锛氱浉鍏虫爣鐨勯渶瑕?Pro 鍙婁互涓? if not _has_required_level('pro'): return jsonify({'success': False, 'error': '闇€瑕丳ro璁㈤槄', 'required_level': 'pro'}), 403 event = Event.query.get_or_404(event_id) stocks = event.related_stocks.order_by(RelatedStock.correlation.desc()).all() stocks_data = [] for stock in stocks: # 澶勭悊 relation_desc锛氬彧鏈夊綋 retrieved_sources 鏄暟缁勬椂鎵嶄娇鐢ㄦ柊鏍煎紡 if stock.retrieved_sources is not None and isinstance(stock.retrieved_sources, list): # retrieved_sources 鏄湁鏁堟暟缁勶紝浣跨敤鏂版牸寮? relation_desc_value = {"data": stock.retrieved_sources} else: # retrieved_sources 涓嶆槸鏁扮粍锛堝彲鑳芥槸 {"raw": "..."} 绛夊紓甯告牸寮忥級锛屽洖閫€鍒板師濮嬫枃鏈? relation_desc_value = stock.relation_desc stocks_data.append({ 'id': stock.id, 'stock_code': stock.stock_code, 'stock_name': stock.stock_name, 'sector': stock.sector, 'relation_desc': relation_desc_value, 'retrieved_sources': stock.retrieved_sources, 'correlation': stock.correlation, 'momentum': stock.momentum, 'created_at': stock.created_at.isoformat() if stock.created_at else None, 'updated_at': stock.updated_at.isoformat() if stock.updated_at else None }) return jsonify({ 'success': True, 'data': stocks_data }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//stocks', methods=['POST']) def add_related_stock(event_id): """娣诲姞鐩稿叧鑲$エ""" try: event = Event.query.get_or_404(event_id) data = request.get_json() # 楠岃瘉蹇呰瀛楁 if not data.get('stock_code') or not data.get('relation_desc'): return jsonify({'success': False, 'error': '缂哄皯蹇呰瀛楁'}), 400 # 妫€鏌ユ槸鍚﹀凡瀛樺湪 existing = RelatedStock.query.filter_by( event_id=event_id, stock_code=data['stock_code'] ).first() if existing: return jsonify({'success': False, 'error': '璇ヨ偂绁ㄥ凡瀛樺湪'}), 400 # 鍒涘缓鏂扮殑鐩稿叧鑲$エ璁板綍 new_stock = RelatedStock( event_id=event_id, stock_code=data['stock_code'], stock_name=data.get('stock_name', ''), sector=data.get('sector', ''), relation_desc=data['relation_desc'], correlation=data.get('correlation', 0.5), momentum=data.get('momentum', '') ) db.session.add(new_stock) db.session.commit() return jsonify({ 'success': True, 'data': { 'id': new_stock.id, 'stock_code': new_stock.stock_code, 'relation_desc': new_stock.relation_desc } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stocks/', methods=['DELETE']) def delete_related_stock(stock_id): """鍒犻櫎鐩稿叧鑲$エ""" try: stock = RelatedStock.query.get_or_404(stock_id) db.session.delete(stock) db.session.commit() return jsonify({'success': True, 'message': '鍒犻櫎鎴愬姛'}) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events/by-stocks', methods=['POST']) def get_events_by_stocks(): """ 閫氳繃鑲$エ浠g爜鍒楄〃鑾峰彇鍏宠仈鐨勪簨浠讹紙鏂伴椈锛? 鐢ㄤ簬姒傚康涓績鏃堕棿杞达細鑱氬悎姒傚康涓嬫墍鏈夎偂绁ㄧ殑鐩稿叧鏂伴椈 璇锋眰浣? { "stock_codes": ["000001.SZ", "600000.SH", ...], # 鑲$エ浠g爜鍒楄〃 "start_date": "2024-01-01", # 鍙€夛紝寮€濮嬫棩鏈? "end_date": "2024-12-31", # 鍙€夛紝缁撴潫鏃ユ湡 "limit": 100 # 鍙€夛紝闄愬埗杩斿洖鏁伴噺锛岄粯璁?00 } """ try: data = request.get_json() stock_codes = data.get('stock_codes', []) start_date_str = data.get('start_date') end_date_str = data.get('end_date') limit = data.get('limit', 100) if not stock_codes: return jsonify({'success': False, 'error': '缂哄皯鑲$エ浠g爜鍒楄〃'}), 400 # 杞崲鑲$エ浠g爜鏍煎紡锛氭蹇礎PI杩斿洖鐨勬槸涓嶅甫鍚庣紑鐨勶紙濡?00000锛夛紝 # 浣唕elated_stock琛ㄤ腑瀛樺偍鐨勬槸甯﹀悗缂€鐨勶紙濡?00000.SH锛? def normalize_stock_code(code): """灏嗚偂绁ㄤ唬鐮佹爣鍑嗗寲涓哄甫鍚庣紑鐨勬牸寮?"" if not code: return code # 濡傛灉宸茬粡甯﹀悗缂€锛岀洿鎺ヨ繑鍥? if '.' in str(code): return code code = str(code).strip() # 鏍规嵁浠g爜鍓嶇紑鍒ゆ柇浜ゆ槗鎵€ if code.startswith('6'): return f"{code}.SH" # 涓婃捣 elif code.startswith('0') or code.startswith('3'): return f"{code}.SZ" # 娣卞湷 elif code.startswith('8') or code.startswith('4'): return f"{code}.BJ" # 鍖椾氦鎵€ else: return code # 鏈煡鏍煎紡锛屼繚鎸佸師鏍? # 鍚屾椂鍖呭惈甯﹀悗缂€鍜屼笉甯﹀悗缂€鐨勭増鏈紝鎻愰珮鍖归厤鐜? normalized_codes = set() for code in stock_codes: if code: normalized_codes.add(str(code)) # 鍘熷鏍煎紡 normalized_codes.add(normalize_stock_code(code)) # 甯﹀悗缂€鏍煎紡 # 濡傛灉鍘熷甯﹀悗缂€锛屼篃鍔犲叆涓嶅甫鍚庣紑鐨勭増鏈? if '.' in str(code): normalized_codes.add(str(code).split('.')[0]) # 鏋勫缓鏌ヨ锛氶€氳繃 RelatedStock 琛ㄦ壘鍒板叧鑱旂殑浜嬩欢 query = db.session.query(Event).join( RelatedStock, Event.id == RelatedStock.event_id ).filter( RelatedStock.stock_code.in_(list(normalized_codes)) ) # 鏃ユ湡杩囨护锛堜娇鐢?start_time 瀛楁锛? if start_date_str: try: start_date = datetime.strptime(start_date_str, '%Y-%m-%d') query = query.filter(Event.start_time >= start_date) except ValueError: pass if end_date_str: try: end_date = datetime.strptime(end_date_str, '%Y-%m-%d') # 璁剧疆涓哄綋澶╃粨鏉? end_date = end_date.replace(hour=23, minute=59, second=59) query = query.filter(Event.start_time <= end_date) except ValueError: pass # 鍘婚噸骞舵帓搴忥紙浣跨敤 start_time 瀛楁锛? query = query.distinct().order_by(Event.start_time.desc()) # 闄愬埗鏁伴噺 if limit: query = query.limit(limit) events = query.all() # 鏋勫缓杩斿洖鏁版嵁 events_data = [] for event in events: # 鑾峰彇璇ヤ簨浠跺叧鑱旂殑鑲$エ淇℃伅锛堝湪璇锋眰鐨勮偂绁ㄥ垪琛ㄤ腑鐨勶級 related_stocks_in_list = [ { 'stock_code': rs.stock_code, 'stock_name': rs.stock_name, 'sector': rs.sector } for rs in event.related_stocks if rs.stock_code in stock_codes ] events_data.append({ 'id': event.id, 'title': event.title, 'description': event.description, 'event_date': event.start_time.isoformat() if event.start_time else None, 'published_time': event.start_time.strftime('%Y-%m-%d %H:%M:%S') if event.start_time else None, 'source': 'event', # 鏍囪鏉ユ簮涓轰簨浠剁郴缁? 'importance': event.importance, 'view_count': event.view_count, 'hot_score': event.hot_score, 'related_stocks': related_stocks_in_list, 'event_type': event.event_type, 'created_at': event.created_at.isoformat() if event.created_at else None }) return jsonify({ 'success': True, 'data': events_data, 'total': len(events_data) }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//concepts', methods=['GET']) def get_related_concepts(event_id): """鑾峰彇鐩稿叧姒傚康鍒楄〃锛堜粠 related_concepts 琛級""" try: # 璁㈤槄鎺у埗锛氱浉鍏虫蹇甸渶瑕?Pro 鍙婁互涓? if not _has_required_level('pro'): return jsonify({'success': False, 'error': '闇€瑕丳ro璁㈤槄', 'required_level': 'pro'}), 403 event = Event.query.get_or_404(event_id) concepts = event.related_concepts.all() concepts_data = [] for concept in concepts: concepts_data.append({ 'id': concept.id, 'concept': concept.concept, 'reason': concept.reason, 'created_at': concept.created_at.isoformat() if concept.created_at else None }) return jsonify({ 'success': True, 'data': concepts_data }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//historical', methods=['GET']) def get_historical_events(event_id): """鑾峰彇鍘嗗彶浜嬩欢瀵规瘮""" try: event = Event.query.get_or_404(event_id) historical_events = event.historical_events.order_by(HistoricalEvent.event_date.desc()).all() events_data = [] for hist_event in historical_events: events_data.append({ 'id': hist_event.id, 'title': hist_event.title, 'content': hist_event.content, 'event_date': hist_event.event_date.isoformat() if hist_event.event_date else None, 'importance': hist_event.importance, 'relevance': hist_event.relevance, 'created_at': hist_event.created_at.isoformat() if hist_event.created_at else None }) # 璁㈤槄鎺у埗锛氬厤璐圭敤鎴蜂粎杩斿洖鍓?鏉★紱Pro/Max杩斿洖鍏ㄩ儴 info = _get_current_subscription_info() sub_type = (info.get('type') or 'free').lower() if sub_type == 'free': return jsonify({ 'success': True, 'data': events_data[:2], 'truncated': len(events_data) > 2, 'required_level': 'pro' }) return jsonify({'success': True, 'data': events_data}) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/historical-events//stocks', methods=['GET']) def get_historical_event_stocks(event_id): """鑾峰彇鍘嗗彶浜嬩欢鐩稿叧鑲$エ鍒楄〃""" try: # 鐩存帴鏌ヨ鍘嗗彶浜嬩欢锛屼笉闇€瑕侀€氳繃涓讳簨浠? hist_event = HistoricalEvent.query.get_or_404(event_id) stocks = hist_event.stocks.order_by(HistoricalEventStock.correlation.desc()).all() # 鑾峰彇浜嬩欢瀵瑰簲鐨勪氦鏄撴棩 event_trading_date = None if hist_event.event_date: event_trading_date = get_trading_day_near_date(hist_event.event_date) stocks_data = [] for stock in stocks: stock_data = { 'id': stock.id, 'stock_code': stock.stock_code, 'stock_name': stock.stock_name, 'sector': stock.sector, 'relation_desc': stock.relation_desc, 'correlation': stock.correlation, 'created_at': stock.created_at.isoformat() if stock.created_at else None } # 娣诲姞娑ㄥ箙鏁版嵁 if event_trading_date: try: # 鏌ヨ鑲$エ鍦ㄤ簨浠跺搴斾氦鏄撴棩鐨勬暟鎹? # ea_trade 琛ㄥ瓧娈碉細F007N=鏈€杩戞垚浜や环(鏀剁洏浠?, F010N=娑ㄨ穼骞? base_stock_code = stock.stock_code.split('.')[0] if stock.stock_code else '' # 鏃ユ湡鏍煎紡杞崲涓?YYYYMMDD 鏁存暟锛坋a_trade.TRADEDATE 鏄?int 绫诲瀷锛? if hasattr(event_trading_date, 'strftime'): trade_date_int = int(event_trading_date.strftime('%Y%m%d')) else: trade_date_int = int(str(event_trading_date).replace('-', '')) with engine.connect() as conn: query = text(""" SELECT F007N as close_price, F010N as change_pct FROM ea_trade WHERE SECCODE = :stock_code AND TRADEDATE = :trading_date LIMIT 1 """) result = conn.execute(query, { 'stock_code': base_stock_code, 'trading_date': trade_date_int }).fetchone() if result: stock_data['event_day_close'] = float(result[0]) if result[0] else None stock_data['event_day_change_pct'] = float(result[1]) if result[1] else None print(f"[DEBUG] 鑲$エ{base_stock_code}鍦▄trade_date_int}: close={result[0]}, change_pct={result[1]}") else: stock_data['event_day_close'] = None stock_data['event_day_change_pct'] = None except Exception as e: print(f"鏌ヨ鑲$エ{stock.stock_code}鍦▄event_trading_date}鐨勬暟鎹け璐? {e}") stock_data['event_day_close'] = None stock_data['event_day_change_pct'] = None else: stock_data['event_day_close'] = None stock_data['event_day_change_pct'] = None stocks_data.append(stock_data) return jsonify({ 'success': True, 'data': stocks_data, 'event_trading_date': event_trading_date.isoformat() if event_trading_date else None }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//expectation-score', methods=['GET']) def get_expectation_score(event_id): """鑾峰彇瓒呴鏈熷緱鍒?"" try: event = Event.query.get_or_404(event_id) # 濡傛灉浜嬩欢鏈夎秴棰勬湡寰楀垎锛岀洿鎺ヨ繑鍥? if event.expectation_surprise_score is not None: score = event.expectation_surprise_score else: # 濡傛灉娌℃湁锛屾牴鎹巻鍙蹭簨浠惰绠椾竴涓ā鎷熷緱鍒? historical_events = event.historical_events.all() if historical_events: # 鍩轰簬鍘嗗彶浜嬩欢鏁伴噺鍜岄噸瑕佹€ц绠楀緱鍒? total_importance = sum(ev.importance or 0 for ev in historical_events) avg_importance = total_importance / len(historical_events) if historical_events else 0 score = min(100, max(0, int(avg_importance * 20 + len(historical_events) * 5))) else: # 榛樿寰楀垎 score = 65 return jsonify({ 'success': True, 'data': { 'score': score, 'description': '鍩轰簬鍘嗗彶浜嬩欢鍒ゆ柇褰撳墠浜嬩欢鐨勮秴棰勬湡鎯呭喌锛屾弧鍒?00鍒? } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//follow', methods=['POST']) def toggle_event_follow(event_id): """鍒囨崲浜嬩欢鍏虫敞鐘舵€侊紙闇€鐧诲綍锛?"" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 try: event = Event.query.get_or_404(event_id) user_id = session['user_id'] existing = EventFollow.query.filter_by(user_id=user_id, event_id=event_id).first() if existing: # 鍙栨秷鍏虫敞 db.session.delete(existing) event.follower_count = max(0, (event.follower_count or 0) - 1) db.session.commit() return jsonify({'success': True, 'data': {'is_following': False, 'follower_count': event.follower_count}}) else: # 鍏虫敞 follow = EventFollow(user_id=user_id, event_id=event_id) db.session.add(follow) event.follower_count = (event.follower_count or 0) + 1 db.session.commit() return jsonify({'success': True, 'data': {'is_following': True, 'follower_count': event.follower_count}}) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//transmission', methods=['GET']) def get_transmission_chain(event_id): try: # 璁㈤槄鎺у埗锛氫紶瀵奸摼鍒嗘瀽闇€瑕?Max 鍙婁互涓? if not _has_required_level('max'): return jsonify({'success': False, 'error': '闇€瑕丮ax璁㈤槄', 'required_level': 'max'}), 403 # 纭繚鏁版嵁搴撹繛鎺ユ槸娲昏穬鐨? db.session.execute(text('SELECT 1')) event = Event.query.get_or_404(event_id) nodes = EventTransmissionNode.query.filter_by(event_id=event_id).all() edges = EventTransmissionEdge.query.filter_by(event_id=event_id).all() # 杩囨护瀛ょ珛鑺傜偣 connected_node_ids = set() for edge in edges: connected_node_ids.add(edge.from_node_id) connected_node_ids.add(edge.to_node_id) # 鍙繚鐣欐湁杩炴帴鐨勮妭鐐? connected_nodes = [node for node in nodes if node.id in connected_node_ids] # 濡傛灉娌℃湁涓讳簨浠惰妭鐐癸紝涔熶繚鐣欎富浜嬩欢鑺傜偣 main_event_node = next((node for node in nodes if node.is_main_event), None) if main_event_node and main_event_node not in connected_nodes: connected_nodes.append(main_event_node) if not connected_nodes: return jsonify({'success': False, 'message': '鏆傛棤浼犲閾惧垎鏋愭暟鎹?}) # 鑺傜偣绫诲瀷鍒颁腑鏂囩被鍒殑鏄犲皠 categories = { 'event': "浜嬩欢", 'industry': "琛屼笟", 'company': "鍏徃", 'policy': "鏀跨瓥", 'technology': "鎶€鏈?, 'market': "甯傚満", 'other': "鍏朵粬" } nodes_data = [] for node in connected_nodes: node_category = categories.get(node.node_type, "鍏朵粬") nodes_data.append({ 'id': str(node.id), # 杞崲涓哄瓧绗︿覆浠ヤ繚鎸佷竴鑷存€? 'name': node.node_name, 'category': node_category, 'value': node.importance_score or 20, 'extra': { 'node_type': node.node_type, 'description': node.node_description, 'importance_score': node.importance_score, 'stock_code': node.stock_code, 'is_main_event': node.is_main_event } }) edges_data = [] for edge in edges: # 纭繚杈圭殑涓ょ鑺傜偣閮藉湪杩炴帴鑺傜偣鍒楄〃涓? if edge.from_node_id in connected_node_ids and edge.to_node_id in connected_node_ids: edges_data.append({ 'source': str(edge.from_node_id), # 杞崲涓哄瓧绗︿覆浠ヤ繚鎸佷竴鑷存€? 'target': str(edge.to_node_id), # 杞崲涓哄瓧绗︿覆浠ヤ繚鎸佷竴鑷存€? 'value': edge.strength or 50, 'extra': { 'transmission_type': edge.transmission_type, 'transmission_mechanism': edge.transmission_mechanism, 'direction': edge.direction, 'strength': edge.strength, 'impact': edge.impact, 'is_circular': edge.is_circular, } }) return jsonify({ 'success': True, 'data': { 'nodes': nodes_data, 'edges': edges_data } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 # 淇鑲$エ鎶ヤ环API - 鏀寔GET鍜孭OST鏂规硶 @app.route('/api/stock/quotes', methods=['GET', 'POST']) def get_stock_quotes(): """ 鑾峰彇鑲$エ琛屾儏鏁版嵁锛堜娇鐢ㄥ叏灞€浜ゆ槗鏃ユ暟鎹紝涓?batch-kline 淇濇寔涓€鑷达級 - 鑲$エ鍚嶇О锛氫粠 MySQL ea_stocklist 鏌ヨ - 浜ゆ槗鏃ユ暟鎹細浣跨敤鍏ㄥ眬 trading_days锛堜粠 tdays.csv 鍔犺浇锛? - 鍓嶄竴浜ゆ槗鏃ユ敹鐩樹环锛氫粠 MySQL ea_trade 鏌ヨ - 瀹炴椂浠锋牸锛氫粠 ClickHouse stock_minute 鏌ヨ """ try: if request.method == 'GET': codes_str = request.args.get('codes', '') codes = [code.strip() for code in codes_str.split(',') if code.strip()] event_time_str = request.args.get('event_time') else: codes = request.json.get('codes', []) event_time_str = request.json.get('event_time') if not codes: return jsonify({'success': False, 'error': '璇锋彁渚涜偂绁ㄤ唬鐮?}), 400 # 鏍囧噯鍖栬偂绁ㄤ唬鐮? def normalize_stock_code(code): if '.' in code: return code if code.startswith(('6',)): return f"{code}.SH" elif code.startswith(('8', '9', '4')): return f"{code}.BJ" else: return f"{code}.SZ" original_codes = codes normalized_codes = [normalize_stock_code(code) for code in codes] code_mapping = dict(zip(original_codes, normalized_codes)) # 澶勭悊浜嬩欢鏃堕棿 if event_time_str: try: event_time = datetime.fromisoformat(event_time_str.replace('Z', '+00:00')) except: event_time = datetime.now() else: event_time = datetime.now() current_time = datetime.now() # ==================== 鏌ヨ鑲$エ鍚嶇О锛堜娇鐢?Redis 缂撳瓨锛?==================== base_codes = list(set([code.split('.')[0] for code in codes])) stock_names = get_stock_names(base_codes) # 鏋勫缓瀹屾暣鐨勫悕绉版槧灏? full_stock_names = {} for orig_code, norm_code in code_mapping.items(): base_code = orig_code.split('.')[0] name = stock_names.get(base_code, f"鑲$エ{base_code}") full_stock_names[orig_code] = name full_stock_names[norm_code] = name # ==================== 浣跨敤鍏ㄥ眬浜ゆ槗鏃ユ暟鎹紙澶勭悊璺ㄥ懆鏈満鏅級 ==================== # 浣跨敤鏂扮殑杈呭姪鍑芥暟澶勭悊璺ㄥ懆鏈満鏅細 # - 鍛ㄤ簲15:00鍚庡埌鍛ㄤ竴15:00鍓嶏紝鍒嗘椂鍥炬樉绀哄懆涓€琛屾儏锛屾定璺屽箙鍩轰簬鍛ㄤ簲鏀剁洏浠? target_date, prev_trading_day = get_target_and_prev_trading_day(event_time) if not target_date: return jsonify({ 'success': True, 'data': {code: {'name': full_stock_names.get(code, f'鑲$エ{code}'), 'price': None, 'change': None} for code in original_codes} }) start_datetime = datetime.combine(target_date, dt_time(9, 30)) end_datetime = datetime.combine(target_date, dt_time(15, 0)) results = {} print(f"鎵归噺澶勭悊 {len(codes)} 鍙偂绁? {codes[:5]}{'...' if len(codes) > 5 else ''}, 鐩爣浜ゆ槗鏃? {target_date}, 娑ㄨ穼骞呭熀鍑嗘棩: {prev_trading_day}, 鏃堕棿鑼冨洿: {start_datetime} - {end_datetime}") # 鍒濆鍖?ClickHouse 瀹㈡埛绔? client = get_clickhouse_client() # ==================== 鏌ヨ鍓嶄竴浜ゆ槗鏃ユ敹鐩樹环锛堜娇鐢?Redis 缂撳瓨锛?==================== try: prev_close_map = {} if prev_trading_day: # ea_trade 琛ㄧ殑 TRADEDATE 鏍煎紡鏄?YYYYMMDD锛堟棤杩炲瓧绗︼級 prev_day_str = prev_trading_day.strftime('%Y%m%d') if hasattr(prev_trading_day, 'strftime') else str(prev_trading_day).replace('-', '') base_codes = list(set([code.split('.')[0] for code in codes])) # 浣跨敤 Redis 缂撳瓨鑾峰彇鍓嶆敹鐩樹环 base_close_map = get_prev_close(base_codes, prev_day_str) print(f"鍓嶄竴浜ゆ槗鏃?{prev_day_str})鏀剁洏浠? 鑾峰彇鍒?{len(base_close_map)} 鏉★紙Redis缂撳瓨锛?) # 涓烘瘡涓爣鍑嗗寲浠g爜鍒嗛厤鏀剁洏浠? for norm_code in normalized_codes: base_code = norm_code.split('.')[0] if base_code in base_close_map: prev_close_map[norm_code] = base_close_map[base_code] # 鎵归噺鏌ヨ褰撳墠浠锋牸鏁版嵁锛堜粠 ClickHouse锛? # 浣跨敤 argMax 鍑芥暟鑾峰彇鏈€鏂颁环鏍硷紝姣旂獥鍙e嚱鏁版晥鐜囬珮寰堝 batch_price_query = """ SELECT code, argMax(close, timestamp) as last_price FROM stock_minute WHERE code IN %(codes)s AND timestamp >= %(start)s AND timestamp <= %(end)s GROUP BY code """ batch_data = client.execute(batch_price_query, { 'codes': normalized_codes, 'start': start_datetime, 'end': end_datetime }) print(f"鎵归噺鏌ヨ杩斿洖 {len(batch_data)} 鏉′环鏍兼暟鎹?) # 瑙f瀽鎵归噺鏌ヨ缁撴灉 price_data_map = {} for row in batch_data: code = row[0] last_price = float(row[1]) if row[1] is not None else None prev_close = prev_close_map.get(code) # 璁$畻娑ㄨ穼骞? change_pct = None if last_price is not None and prev_close is not None and prev_close > 0: change_pct = (last_price - prev_close) / prev_close * 100 price_data_map[code] = { 'price': last_price, 'change': change_pct } # 缁勮缁撴灉 for orig_code in original_codes: norm_code = code_mapping[orig_code] price_info = price_data_map.get(norm_code) if price_info: results[orig_code] = { 'price': price_info['price'], 'change': price_info['change'], 'name': full_stock_names.get(orig_code, f'鑲$エ{orig_code.split(".")[0]}') } else: results[orig_code] = { 'price': None, 'change': None, 'name': full_stock_names.get(orig_code, f'鑲$エ{orig_code.split(".")[0]}') } except Exception as e: print(f"鎵归噺鏌ヨ澶辫触: {e}锛屽洖閫€鍒伴€愬彧鏌ヨ") # 闄嶇骇鏂规锛氶€愬彧鑲$エ鏌ヨ for orig_code in original_codes: norm_code = code_mapping[orig_code] try: # 鏌ヨ褰撳墠浠锋牸 current_data = client.execute(""" SELECT close FROM stock_minute WHERE code = %(code)s AND timestamp >= %(start)s AND timestamp <= %(end)s ORDER BY timestamp DESC LIMIT 1 """, {'code': norm_code, 'start': start_datetime, 'end': end_datetime}) last_price = float(current_data[0][0]) if current_data and current_data[0] and current_data[0][0] else None # 鏌ヨ鍓嶄竴浜ゆ槗鏃ユ敹鐩樹环 prev_close = None if prev_trading_day and last_price is not None: base_code = orig_code.split('.')[0] # ea_trade 琛ㄧ殑 TRADEDATE 鏍煎紡鏄?YYYYMMDD锛堟棤杩炲瓧绗︼級 prev_day_str = prev_trading_day.strftime('%Y%m%d') if hasattr(prev_trading_day, 'strftime') else str(prev_trading_day).replace('-', '') with engine.connect() as conn: prev_result = conn.execute(text(""" SELECT F007N as close_price FROM ea_trade WHERE SECCODE = :code AND TRADEDATE = :trade_date """), {'code': base_code, 'trade_date': prev_day_str}).fetchone() prev_close = float(prev_result[0]) if prev_result and prev_result[0] else None # 璁$畻娑ㄨ穼骞? change_pct = None if last_price is not None and prev_close is not None and prev_close > 0: change_pct = (last_price - prev_close) / prev_close * 100 results[orig_code] = { 'price': last_price, 'change': change_pct, 'name': full_stock_names.get(orig_code, f'鑲$エ{orig_code.split(".")[0]}') } except Exception as inner_e: print(f"Error processing stock {orig_code}: {inner_e}") results[orig_code] = {'price': None, 'change': None, 'name': full_stock_names.get(orig_code, f'鑲$エ{orig_code.split(".")[0]}')} # 杩斿洖鏍囧噯鏍煎紡 return jsonify({'success': True, 'data': results}) except Exception as e: print(f"Stock quotes API error: {e}") return jsonify({'success': False, 'error': str(e)}), 500 # ==================== ClickHouse 杩炴帴姹狅紙鍗曚緥妯″紡锛?==================== _clickhouse_client = None _clickhouse_client_lock = threading.Lock() def get_clickhouse_client(): """鑾峰彇 ClickHouse 瀹㈡埛绔紙鍗曚緥妯″紡锛岄伩鍏嶉噸澶嶅垱寤鸿繛鎺ワ級""" global _clickhouse_client if _clickhouse_client is None: with _clickhouse_client_lock: if _clickhouse_client is None: _clickhouse_client = Cclient( host='127.0.0.1', port=9000, user='default', password='Zzl33818!', database='stock' ) print("[ClickHouse] 鍒涘缓鏂拌繛鎺ワ紙鍗曚緥锛?) return _clickhouse_client @app.route('/api/account/calendar/events', methods=['GET', 'POST']) def account_calendar_events(): """杩斿洖褰撳墠鐢ㄦ埛鐨勬姇璧勮鍒掍笌鍏虫敞鐨勬湭鏉ヤ簨浠讹紙鍚堝苟锛夈€? GET: 鍙寜鏃ユ湡鑼冨洿/鏈堜唤杩囨护锛汸OST: 鏂板鎶曡祫璁″垝锛堝啓鍏?InvestmentPlan锛夈€? """ try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 if request.method == 'POST': data = request.get_json() or {} title = data.get('title') event_date_str = data.get('event_date') or data.get('date') plan_type = data.get('type') or 'plan' description = data.get('description') or data.get('content') or '' stocks = data.get('stocks') or [] if not title or not event_date_str: return jsonify({'success': False, 'error': '缂哄皯蹇呭~瀛楁'}), 400 try: event_date = datetime.fromisoformat(event_date_str).date() except Exception: return jsonify({'success': False, 'error': '鏃ユ湡鏍煎紡閿欒'}), 400 plan = InvestmentPlan( user_id=session['user_id'], date=event_date, title=title, content=description, type=plan_type, stocks=json.dumps(stocks), tags=json.dumps(data.get('tags', [])), status=data.get('status', 'active') ) db.session.add(plan) db.session.commit() return jsonify({'success': True, 'data': { 'id': plan.id, 'title': plan.title, 'event_date': plan.date.isoformat(), 'type': plan.type, 'description': plan.content, 'stocks': json.loads(plan.stocks) if plan.stocks else [], 'source': 'plan' }}) # GET # 瑙f瀽杩囨护鍙傛暟锛歞ate 鎴?(year, month) 鎴?(start_date, end_date) date_str = request.args.get('date') year = request.args.get('year', type=int) month = request.args.get('month', type=int) start_date_str = request.args.get('start_date') end_date_str = request.args.get('end_date') start_date = None end_date = None if date_str: try: d = datetime.fromisoformat(date_str).date() start_date = d end_date = d except Exception: pass elif year and month: # 鏈堜唤鑼冨洿 start_date = datetime(year, month, 1).date() if month == 12: end_date = datetime(year + 1, 1, 1).date() - timedelta(days=1) else: end_date = datetime(year, month + 1, 1).date() - timedelta(days=1) elif start_date_str and end_date_str: try: start_date = datetime.fromisoformat(start_date_str).date() end_date = datetime.fromisoformat(end_date_str).date() except Exception: start_date = None end_date = None # 鏌ヨ鎶曡祫璁″垝 plans_query = InvestmentPlan.query.filter_by(user_id=session['user_id']) if start_date and end_date: plans_query = plans_query.filter(InvestmentPlan.date >= start_date, InvestmentPlan.date <= end_date) elif start_date: plans_query = plans_query.filter(InvestmentPlan.date == start_date) plans = plans_query.order_by(InvestmentPlan.date.asc()).all() plan_events = [{ 'id': p.id, 'title': p.title, 'event_date': p.date.isoformat(), 'type': p.type or 'plan', 'description': p.content, 'importance': 3, 'stocks': json.loads(p.stocks) if p.stocks else [], 'source': 'plan' } for p in plans] # 鏌ヨ鍏虫敞鐨勬湭鏉ヤ簨浠? follows = FutureEventFollow.query.filter_by(user_id=session['user_id']).all() future_event_ids = [f.future_event_id for f in follows] future_events = [] if future_event_ids: # 浣跨敤 SELECT * 浠ヤ究鑾峰彇鎵€鏈夊瓧娈碉紙鍖呮嫭鏂板瓧娈碉級 base_sql = """ SELECT * FROM future_events WHERE data_id IN :event_ids \ """ params = {'event_ids': tuple(future_event_ids)} # 鏃ユ湡杩囨护锛堟寜 calendar_time 鐨勬棩鏈燂級 if start_date and end_date: base_sql += " AND DATE(calendar_time) BETWEEN :start_date AND :end_date" params.update({'start_date': start_date, 'end_date': end_date}) elif start_date: base_sql += " AND DATE(calendar_time) = :start_date" params.update({'start_date': start_date}) base_sql += " ORDER BY calendar_time" result = db.session.execute(text(base_sql), params) for row in result: # 浣跨敤鏂板瓧娈靛洖閫€閫昏緫鑾峰彇 former former_value = get_future_event_field(row, 'second_modified_text', 'former') # 鑾峰彇 related_stocks锛屼紭鍏堜娇鐢?best_matches best_matches = getattr(row, 'best_matches', None) if hasattr(row, 'best_matches') else None if best_matches and str(best_matches).strip(): rs = parse_best_matches(best_matches) else: rs = parse_json_field(getattr(row, 'related_stocks', None)) # 鐢熸垚鑲$エ鏍囩鍒楄〃 stock_tags = [] try: for it in rs: if isinstance(it, dict): # 鏂扮粨鏋? stock_tags.append(f"{it.get('code', '')} {it.get('name', '')}") elif isinstance(it, (list, tuple)) and len(it) >= 2: stock_tags.append(f"{it[0]} {it[1]}") elif isinstance(it, str): stock_tags.append(it) except Exception: pass future_events.append({ 'id': row.data_id, 'title': row.title, 'event_date': (row.calendar_time.date().isoformat() if row.calendar_time else None), 'type': 'future_event', 'importance': int(row.star) if getattr(row, 'star', None) is not None else 3, 'description': former_value or '', 'stocks': stock_tags, 'is_following': True, 'source': 'future' }) return jsonify({'success': True, 'data': plan_events + future_events}) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/account/calendar/events/', methods=['DELETE']) def delete_account_calendar_event(event_id): """鍒犻櫎鐢ㄦ埛鍒涘缓鐨勬姇璧勮鍒掍簨浠讹紙涓嶅奖鍝嶅叧娉ㄧ殑鏈潵浜嬩欢锛夈€?"" try: if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 plan = InvestmentPlan.query.filter_by(id=event_id, user_id=session['user_id']).first() if not plan: return jsonify({'success': False, 'error': '鏈壘鍒拌璁板綍'}), 404 db.session.delete(plan) db.session.commit() return jsonify({'success': True}) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 # ==================== 鐏垫椿灞忓疄鏃惰鎯?API ==================== # 浠?ClickHouse 瀹炴椂琛屾儏琛ㄨ幏鍙栨渶鏂版暟鎹紙鐢ㄤ簬鐩樺悗/WebSocket 鏃犳暟鎹椂鐨勫洖閫€锛? @app.route('/api/flex-screen/quotes', methods=['POST']) def get_flex_screen_quotes(): """ 鑾峰彇鐏垫椿灞忚鎯呮暟鎹? 浼樺厛浠庡疄鏃惰鎯呰〃鏌ヨ锛屽鏋滄病鏈夊垯浠庡垎閽熺嚎琛ㄦ煡璇? 璇锋眰浣? { "codes": ["000001.SZ", "399001.SZ", "600519.SH"], "include_order_book": false // 鏄惁鍖呭惈浜旀。鐩樺彛 } 杩斿洖: { "success": true, "data": { "000001.SZ": { "security_id": "000001", "name": "骞冲畨閾惰", "last_px": 10.50, "prev_close_px": 10.20, "open_px": 10.30, "high_px": 10.55, "low_px": 10.15, "total_volume_trade": 1000000, "total_value_trade": 10500000, "change": 0.30, "change_pct": 2.94, "bid_prices": [10.49, 10.48, ...], "bid_volumes": [1000, 2000, ...], "ask_prices": [10.50, 10.51, ...], "ask_volumes": [800, 1200, ...], "update_time": "2024-12-11 15:00:00" }, ... }, "source": "realtime" | "minute" // 鏁版嵁鏉ユ簮 } """ try: data = request.json or {} codes = data.get('codes', []) include_order_book = data.get('include_order_book', False) if not codes: return jsonify({'success': False, 'error': '璇锋彁渚涜偂绁ㄤ唬鐮?}), 400 client = get_clickhouse_client() results = {} source = 'realtime' # 鍒嗙涓婁氦鎵€鍜屾繁浜ゆ墍浠g爜 sse_codes = [] # 涓婁氦鎵€ szse_stock_codes = [] # 娣变氦鎵€鑲$エ szse_index_codes = [] # 娣变氦鎵€鎸囨暟 for code in codes: base_code = code.split('.')[0] if code.endswith('.SH'): sse_codes.append(base_code) elif code.endswith('.SZ'): # 399 寮€澶存槸鎸囨暟 if base_code.startswith('399'): szse_index_codes.append(base_code) else: szse_stock_codes.append(base_code) # 鑾峰彇鑲$エ鍚嶇О stock_names = {} with engine.connect() as conn: base_codes = list(set([code.split('.')[0] for code in codes])) if base_codes: placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} result = conn.execute(text( f"SELECT SECCODE, SECNAME FROM ea_stocklist WHERE SECCODE IN ({placeholders})" ), params).fetchall() stock_names = {row[0]: row[1] for row in result} # 鏌ヨ娣变氦鎵€鑲$エ瀹炴椂琛屾儏 if szse_stock_codes: try: order_book_cols = "" if include_order_book: order_book_cols = """, bid_price1, bid_volume1, bid_price2, bid_volume2, bid_price3, bid_volume3, bid_price4, bid_volume4, bid_price5, bid_volume5, ask_price1, ask_volume1, ask_price2, ask_volume2, ask_price3, ask_volume3, ask_price4, ask_volume4, ask_price5, ask_volume5""" szse_stock_query = f""" SELECT security_id, last_price, prev_close, open_price, high_price, low_price, volume, amount, num_trades, upper_limit_price, lower_limit_price, trading_phase_code, trade_time {order_book_cols} FROM stock.szse_stock_realtime WHERE trade_date = today() AND security_id IN %(codes)s ORDER BY security_id, trade_time DESC LIMIT 1 BY security_id """ szse_stock_data = client.execute(szse_stock_query, {'codes': szse_stock_codes}) for row in szse_stock_data: security_id = row[0] full_code = f"{security_id}.SZ" last_px = float(row[1]) if row[1] else 0 prev_close = float(row[2]) if row[2] else 0 change = last_px - prev_close if last_px and prev_close else 0 change_pct = (change / prev_close * 100) if prev_close else 0 quote = { 'security_id': security_id, 'name': stock_names.get(security_id, ''), 'last_px': last_px, 'prev_close_px': prev_close, 'open_px': float(row[3]) if row[3] else 0, 'high_px': float(row[4]) if row[4] else 0, 'low_px': float(row[5]) if row[5] else 0, 'total_volume_trade': float(row[6]) if row[6] else 0, 'total_value_trade': float(row[7]) if row[7] else 0, 'num_trades': int(row[8]) if row[8] else 0, 'upper_limit_px': float(row[9]) if row[9] else None, 'lower_limit_px': float(row[10]) if row[10] else None, 'trading_phase_code': row[11], 'change': change, 'change_pct': change_pct, 'update_time': str(row[12]) if row[12] else None, } if include_order_book and len(row) > 13: quote['bid_prices'] = [float(row[i]) if row[i] else 0 for i in range(13, 23, 2)] quote['bid_volumes'] = [float(row[i]) if row[i] else 0 for i in range(14, 24, 2)] quote['ask_prices'] = [float(row[i]) if row[i] else 0 for i in range(23, 33, 2)] quote['ask_volumes'] = [float(row[i]) if row[i] else 0 for i in range(24, 34, 2)] results[full_code] = quote except Exception as e: print(f"鏌ヨ娣变氦鎵€瀹炴椂琛屾儏澶辫触: {e}") # 鏌ヨ娣变氦鎵€鎸囨暟瀹炴椂琛屾儏 if szse_index_codes: try: szse_index_query = """ SELECT security_id, current_index, prev_close, open_index, high_index, low_index, close_index, volume, amount, num_trades, trade_time FROM stock.szse_index_realtime WHERE trade_date = today() AND security_id IN %(codes)s ORDER BY security_id, trade_time DESC LIMIT 1 BY security_id """ szse_index_data = client.execute(szse_index_query, {'codes': szse_index_codes}) for row in szse_index_data: security_id = row[0] full_code = f"{security_id}.SZ" current_index = float(row[1]) if row[1] else 0 prev_close = float(row[2]) if row[2] else 0 change = current_index - prev_close if current_index and prev_close else 0 change_pct = (change / prev_close * 100) if prev_close else 0 results[full_code] = { 'security_id': security_id, 'name': stock_names.get(security_id, ''), 'last_px': current_index, 'prev_close_px': prev_close, 'open_px': float(row[3]) if row[3] else 0, 'high_px': float(row[4]) if row[4] else 0, 'low_px': float(row[5]) if row[5] else 0, 'close_px': float(row[6]) if row[6] else None, 'total_volume_trade': float(row[7]) if row[7] else 0, 'total_value_trade': float(row[8]) if row[8] else 0, 'num_trades': int(row[9]) if row[9] else 0, 'change': change, 'change_pct': change_pct, 'update_time': str(row[10]) if row[10] else None, 'bid_prices': [], 'bid_volumes': [], 'ask_prices': [], 'ask_volumes': [], } except Exception as e: print(f"鏌ヨ娣变氦鎵€鎸囨暟瀹炴椂琛屾儏澶辫触: {e}") # 鏌ヨ涓婁氦鎵€瀹炴椂琛屾儏锛堝鏋滄湁 sse_stock_realtime 琛級 if sse_codes: try: sse_query = """ SELECT security_id, last_price, prev_close, open_price, high_price, low_price, volume, amount, trade_time FROM stock.sse_stock_realtime WHERE trade_date = today() AND security_id IN %(codes)s ORDER BY security_id, trade_time DESC LIMIT 1 BY security_id """ sse_data = client.execute(sse_query, {'codes': sse_codes}) for row in sse_data: security_id = row[0] full_code = f"{security_id}.SH" last_px = float(row[1]) if row[1] else 0 prev_close = float(row[2]) if row[2] else 0 change = last_px - prev_close if last_px and prev_close else 0 change_pct = (change / prev_close * 100) if prev_close else 0 results[full_code] = { 'security_id': security_id, 'name': stock_names.get(security_id, ''), 'last_px': last_px, 'prev_close_px': prev_close, 'open_px': float(row[3]) if row[3] else 0, 'high_px': float(row[4]) if row[4] else 0, 'low_px': float(row[5]) if row[5] else 0, 'total_volume_trade': float(row[6]) if row[6] else 0, 'total_value_trade': float(row[7]) if row[7] else 0, 'change': change, 'change_pct': change_pct, 'update_time': str(row[8]) if row[8] else None, 'bid_prices': [], 'bid_volumes': [], 'ask_prices': [], 'ask_volumes': [], } except Exception as e: print(f"鏌ヨ涓婁氦鎵€瀹炴椂琛屾儏澶辫触: {e}锛屽皾璇曚粠鍒嗛挓绾胯〃鏌ヨ") # 瀵逛簬瀹炴椂琛ㄤ腑娌℃湁鏁版嵁鐨勮偂绁紝浠庡垎閽熺嚎琛ㄦ煡璇? missing_codes = [code for code in codes if code not in results] if missing_codes: source = 'minute' if not results else 'mixed' try: # 浠庡垎閽熺嚎琛ㄦ煡璇㈡渶鏂版暟鎹? minute_query = """ SELECT code, close, open, high, low, volume, amt, timestamp FROM stock.stock_minute WHERE toDate(timestamp) = today() AND code IN %(codes)s ORDER BY code, timestamp DESC LIMIT 1 BY code """ minute_data = client.execute(minute_query, {'codes': missing_codes}) # 鑾峰彇鏄ㄦ敹浠? prev_close_map = {} with engine.connect() as conn: base_codes = list(set([code.split('.')[0] for code in missing_codes])) if base_codes: # 鑾峰彇涓婁竴浜ゆ槗鏃? prev_day_result = conn.execute(text(""" SELECT EXCHANGE_DATE FROM trading_days WHERE EXCHANGE_DATE < CURDATE() ORDER BY EXCHANGE_DATE DESC LIMIT 1 """)).fetchone() if prev_day_result: prev_day = prev_day_result[0] placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} params['trade_date'] = prev_day prev_result = conn.execute(text(f""" SELECT SECCODE, F007N as close_price FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE = :trade_date """), params).fetchall() prev_close_map = {row[0]: float(row[1]) if row[1] else 0 for row in prev_result} for row in minute_data: code = row[0] base_code = code.split('.')[0] last_px = float(row[1]) if row[1] else 0 prev_close = prev_close_map.get(base_code, 0) change = last_px - prev_close if last_px and prev_close else 0 change_pct = (change / prev_close * 100) if prev_close else 0 results[code] = { 'security_id': base_code, 'name': stock_names.get(base_code, ''), 'last_px': last_px, 'prev_close_px': prev_close, 'open_px': float(row[2]) if row[2] else 0, 'high_px': float(row[3]) if row[3] else 0, 'low_px': float(row[4]) if row[4] else 0, 'total_volume_trade': float(row[5]) if row[5] else 0, 'total_value_trade': float(row[6]) if row[6] else 0, 'change': change, 'change_pct': change_pct, 'update_time': str(row[7]) if row[7] else None, 'bid_prices': [], 'bid_volumes': [], 'ask_prices': [], 'ask_volumes': [], } except Exception as e: print(f"鏌ヨ鍒嗛挓绾挎暟鎹け璐? {e}") return jsonify({ 'success': True, 'data': results, 'source': source }) except Exception as e: print(f"鐏垫椿灞忚鎯呮煡璇㈠け璐? {e}") import traceback traceback.print_exc() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//kline') def get_stock_kline(stock_code): chart_type = request.args.get('type', 'minute') event_time = request.args.get('event_time') # 鏄惁璺宠繃"涓嬩竴涓氦鏄撴棩"閫昏緫锛? # - 濡傛灉娌℃湁浼?event_time锛堢伒娲诲睆绛夊疄鏃惰鎯呭満鏅級锛岀洏鍚庡簲鏄剧ず褰撳ぉ鏁版嵁 # - 濡傛灉浼犱簡 event_time锛圕ommunity 浜嬩欢绛夊満鏅級锛屼娇鐢ㄥ師閫昏緫 skip_next_day = event_time is None try: event_datetime = datetime.fromisoformat(event_time) if event_time else datetime.now() except ValueError: return jsonify({'error': 'Invalid event_time format'}), 400 # 纭繚鑲$エ浠g爜鍖呭惈鍚庣紑锛圕lickHouse 涓暟鎹甫鍚庣紑锛? if '.' not in stock_code: if stock_code.startswith('6'): stock_code = f"{stock_code}.SH" # 涓婃捣 elif stock_code.startswith(('8', '9', '4')): stock_code = f"{stock_code}.BJ" # 鍖椾氦鎵€ else: stock_code = f"{stock_code}.SZ" # 娣卞湷 # 鑾峰彇鑲$エ鍚嶇О with engine.connect() as conn: result = conn.execute(text( "SELECT SECNAME FROM ea_stocklist WHERE SECCODE = :code" ), {"code": stock_code.split('.')[0]}).fetchone() stock_name = result[0] if result else 'Unknown' if chart_type == 'daily': return get_daily_kline(stock_code, event_datetime, stock_name) elif chart_type == 'minute': return get_minute_kline(stock_code, event_datetime, stock_name, skip_next_day=skip_next_day) elif chart_type == 'timeline': return get_timeline_data(stock_code, event_datetime, stock_name) else: # 瀵逛簬鏈煡鐨勭被鍨嬶紝杩斿洖閿欒 return jsonify({'error': f'Unsupported chart type: {chart_type}'}), 400 @app.route('/api/stock/batch-kline', methods=['POST']) def get_batch_kline_data(): """鎵归噺鑾峰彇澶氬彧鑲$エ鐨凨绾?鍒嗘椂鏁版嵁 璇锋眰浣擄細{ codes: string[], type: 'timeline'|'daily', event_time?: string, days_before?: number, # 鏌ヨ浜嬩欢鏃ユ湡鍓嶅灏戝ぉ鐨勬暟鎹紝榛樿60锛屾渶澶?65 end_date?: string # 鍒嗛〉鍔犺浇鏃舵寚瀹氱粨鏉熸棩鏈燂紙鐢ㄤ簬鍔犺浇鏇存棭鐨勬暟鎹級 } 杩斿洖锛歿 success: true, data: { [code]: { data: [], trade_date: '', ... } }, has_more: boolean } """ try: data = request.json codes = data.get('codes', []) chart_type = data.get('type', 'timeline') event_time = data.get('event_time') days_before = min(int(data.get('days_before', 60)), 365) # 榛樿60澶╋紝鏈€澶?65澶? custom_end_date = data.get('end_date') # 鐢ㄤ簬鍒嗛〉鍔犺浇鏇存棭鏁版嵁 if not codes: return jsonify({'success': False, 'error': '璇锋彁渚涜偂绁ㄤ唬鐮佸垪琛?}), 400 if len(codes) > 50: return jsonify({'success': False, 'error': '鍗曟鏈€澶氭煡璇?0鍙偂绁?}), 400 # 鏍囧噯鍖栬偂绁ㄤ唬鐮侊紙纭繚甯﹀悗缂€锛岀敤浜?ClickHouse 鏌ヨ锛? def normalize_stock_code(code): """灏嗚偂绁ㄤ唬鐮佹爣鍑嗗寲涓哄甫鍚庣紑鏍煎紡锛堝 300274.SZ锛?"" if '.' in code: return code # 宸茬粡甯﹀悗缂€ # 鏍规嵁浠g爜瑙勫垯娣诲姞鍚庣紑 if code.startswith('6'): return f"{code}.SH" # 涓婃捣 elif code.startswith(('8', '9', '4')): return f"{code}.BJ" # 鍖椾氦鎵€ else: return f"{code}.SZ" # 娣卞湷 # 淇濈暀鍘熷浠g爜鐢ㄤ簬杩斿洖缁撴灉锛屽悓鏃跺垱寤烘爣鍑嗗寲浠g爜鐢ㄤ簬 ClickHouse 鏌ヨ original_codes = codes normalized_codes = [normalize_stock_code(code) for code in codes] code_mapping = dict(zip(original_codes, normalized_codes)) reverse_mapping = dict(zip(normalized_codes, original_codes)) try: event_datetime = datetime.fromisoformat(event_time) if event_time else datetime.now() except ValueError: return jsonify({'success': False, 'error': 'Invalid event_time format'}), 400 client = get_clickhouse_client() # 鎵归噺鑾峰彇鑲$エ鍚嶇О锛堜娇鐢?Redis 缂撳瓨锛? base_codes = list(set([code.split('.')[0] for code in codes])) stock_names = get_stock_names(base_codes) # 纭畾鐩爣浜ゆ槗鏃ュ拰娑ㄨ穼骞呭熀鍑嗘棩锛堝鐞嗚法鍛ㄦ湯鍦烘櫙锛? # - 鍛ㄤ簲15:00鍚庡埌鍛ㄤ竴15:00鍓嶏紝鍒嗘椂鍥炬樉绀哄懆涓€琛屾儏锛屾定璺屽箙鍩轰簬鍛ㄤ簲鏀剁洏浠? target_date, prev_trading_day = get_target_and_prev_trading_day(event_datetime) if not target_date: # 杩斿洖绌烘暟鎹紙浣跨敤鍘熷浠g爜浣滀负 key锛? return jsonify({ 'success': True, 'data': {code: {'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': chart_type} for code in original_codes} }) start_time = datetime.combine(target_date, dt_time(9, 30)) end_time = datetime.combine(target_date, dt_time(15, 0)) results = {} if chart_type == 'timeline': # 鎵归噺鑾峰彇鍓嶆敹鐩樹环锛堜娇鐢?Redis 缂撳瓨锛? # 浣跨敤 prev_trading_day 浣滀负鍩哄噯鏃ユ湡锛堝鐞嗚法鍛ㄦ湯鍦烘櫙锛? prev_close_map = {} if prev_trading_day: prev_date_str = prev_trading_day.strftime('%Y%m%d') base_codes = list(set([code.split('.')[0] for code in codes])) prev_close_map = get_prev_close(base_codes, prev_date_str) print(f"鍒嗘椂鍥惧熀鍑嗘棩鏈? {prev_trading_day}, 鑾峰彇鍒?{len(prev_close_map)} 鏉″墠鏀剁洏浠凤紙Redis缂撳瓨锛?) # 鎵归噺鏌ヨ鍒嗘椂鏁版嵁锛堜娇鐢ㄦ爣鍑嗗寲浠g爜鏌ヨ ClickHouse锛? batch_data = client.execute(""" SELECT code, timestamp, close, volume FROM stock_minute WHERE code IN %(codes)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY code, timestamp """, { 'codes': normalized_codes, # 浣跨敤鏍囧噯鍖栦唬鐮? 'start': start_time, 'end': end_time }) # 鎸夎偂绁ㄤ唬鐮佸垎缁勶紝鍚屾椂璁$畻鍧囦环鍜屾定璺屽箙 stock_data = {} stock_accum = {} # 鐢ㄤ簬璁$畻鍧囦环鐨勭疮璁″€? for row in batch_data: norm_code = row[0] base_code = norm_code.split('.')[0] price = float(row[2]) volume = float(row[3]) if norm_code not in stock_data: stock_data[norm_code] = [] stock_accum[norm_code] = {'total_amount': 0, 'total_volume': 0} # 绱璁$畻鍧囦环 stock_accum[norm_code]['total_amount'] += price * volume stock_accum[norm_code]['total_volume'] += volume total_vol = stock_accum[norm_code]['total_volume'] avg_price = stock_accum[norm_code]['total_amount'] / total_vol if total_vol > 0 else price # 璁$畻娑ㄨ穼骞? prev_close = prev_close_map.get(base_code) change_percent = ((price - prev_close) / prev_close * 100) if prev_close and prev_close > 0 else 0 stock_data[norm_code].append({ 'time': row[1].strftime('%H:%M'), 'price': price, 'avg_price': round(avg_price, 2), 'volume': volume, 'change_percent': round(change_percent, 2) }) # 缁勮缁撴灉锛堜娇鐢ㄥ師濮嬩唬鐮佷綔涓?key 杩斿洖锛? for orig_code in original_codes: norm_code = code_mapping[orig_code] base_code = orig_code.split('.')[0] stock_name = stock_names.get(base_code, f'鑲$エ{base_code}') data_list = stock_data.get(norm_code, []) prev_close = prev_close_map.get(base_code) results[orig_code] = { 'code': orig_code, 'name': stock_name, 'data': data_list, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'timeline', 'prev_close': prev_close } elif chart_type == 'daily': # 鎵归噺鏌ヨ鏃ョ嚎鏁版嵁锛堜粠MySQL ea_trade琛級 with engine.connect() as conn: base_codes = list(set([code.split('.')[0] for code in codes])) if base_codes: placeholders = ','.join([f':code{i}' for i in range(len(base_codes))]) params = {f'code{i}': code for i, code in enumerate(base_codes)} # 纭畾鏌ヨ鐨勬棩鏈熻寖鍥? # 濡傛灉鎸囧畾浜?custom_end_date锛岀敤浜庡垎椤靛姞杞芥洿鏃╃殑鏁版嵁 if custom_end_date: try: end_date_obj = datetime.strptime(custom_end_date, '%Y-%m-%d').date() except ValueError: end_date_obj = target_date else: end_date_obj = target_date # TRADEDATE 鏄暣鏁版牸寮?YYYYMMDD锛岄渶瑕佽浆鎹㈡棩鏈熸牸寮? start_date = end_date_obj - timedelta(days=days_before) params['start_date'] = int(start_date.strftime('%Y%m%d')) params['end_date'] = int(end_date_obj.strftime('%Y%m%d')) daily_result = conn.execute(text(f""" SELECT SECCODE, TRADEDATE, F003N as open, F005N as high, F006N as low, F007N as close, F004N as volume FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE BETWEEN :start_date AND :end_date ORDER BY SECCODE, TRADEDATE """), params).fetchall() # 鎸夎偂绁ㄤ唬鐮佸垎缁? stock_data = {} for row in daily_result: code_base = row[0] if code_base not in stock_data: stock_data[code_base] = [] # 鏃ユ湡鏍煎紡澶勭悊锛歍RADEDATE 鍙兘鏄?datetime 鎴?int(YYYYMMDD) trade_date_val = row[1] if hasattr(trade_date_val, 'strftime'): date_str = trade_date_val.strftime('%Y-%m-%d') elif isinstance(trade_date_val, int): # 鏁存暟鏍煎紡 YYYYMMDD -> YYYY-MM-DD date_str = f"{str(trade_date_val)[:4]}-{str(trade_date_val)[4:6]}-{str(trade_date_val)[6:8]}" else: date_str = str(trade_date_val) stock_data[code_base].append({ 'time': date_str, # 缁熶竴浣跨敤 time 瀛楁锛屼笌鍓嶇鏈熸湜涓€鑷? 'open': float(row[2]) if row[2] else 0, 'high': float(row[3]) if row[3] else 0, 'low': float(row[4]) if row[4] else 0, 'close': float(row[5]) if row[5] else 0, 'volume': float(row[6]) if row[6] else 0 }) # 缁勮缁撴灉锛堜娇鐢ㄥ師濮嬩唬鐮佷綔涓?key 杩斿洖锛? # 鍚屾椂璁$畻鏈€鏃╂棩鏈燂紝鐢ㄤ簬鍒ゆ柇鏄惁杩樻湁鏇村鏁版嵁 earliest_dates = {} for orig_code in original_codes: base_code = orig_code.split('.')[0] stock_name = stock_names.get(base_code, f'鑲$エ{base_code}') data_list = stock_data.get(base_code, []) # 璁板綍姣忓彧鑲$エ鐨勬渶鏃╂棩鏈? if data_list: earliest_dates[orig_code] = data_list[0]['time'] results[orig_code] = { 'code': orig_code, 'name': stock_name, 'data': data_list, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'daily', 'earliest_date': data_list[0]['time'] if data_list else None } # 璁$畻鏄惁杩樻湁鏇村鍘嗗彶鏁版嵁锛堝熀浜庝簨浠舵棩鏈熷線鍓嶆帹365澶╋級 event_date = event_datetime.date() one_year_ago = event_date - timedelta(days=365) # 濡傛灉褰撳墠鏌ヨ鐨勮捣濮嬫棩鏈熻繕娌″埌涓€骞村墠锛屽垯杩樻湁鏇村鏁版嵁 has_more = start_date > one_year_ago if chart_type == 'daily' else False print(f"鎵归噺K绾挎煡璇㈠畬鎴? {len(codes)} 鍙偂绁? 绫诲瀷: {chart_type}, 浜ゆ槗鏃? {target_date}, days_before: {days_before}, has_more: {has_more}") return jsonify({ 'success': True, 'data': results, 'has_more': has_more, 'query_start_date': start_date.strftime('%Y-%m-%d') if chart_type == 'daily' else None, 'query_end_date': end_date_obj.strftime('%Y-%m-%d') if chart_type == 'daily' else None }) except Exception as e: print(f"鎵归噺K绾挎煡璇㈤敊璇? {e}") return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//latest-minute', methods=['GET']) def get_latest_minute_data(stock_code): """鑾峰彇鏈€鏂颁氦鏄撴棩鐨勫垎閽熼鏁版嵁""" client = get_clickhouse_client() # 纭繚鑲$エ浠g爜鍖呭惈鍚庣紑 if '.' not in stock_code: if stock_code.startswith('6'): stock_code = f"{stock_code}.SH" # 涓婃捣 elif stock_code.startswith(('8', '9', '4')): stock_code = f"{stock_code}.BJ" # 鍖椾氦鎵€ else: stock_code = f"{stock_code}.SZ" # 娣卞湷 # 鑾峰彇鑲$エ鍚嶇О with engine.connect() as conn: result = conn.execute(text( "SELECT SECNAME FROM ea_stocklist WHERE SECCODE = :code" ), {"code": stock_code.split('.')[0]}).fetchone() stock_name = result[0] if result else 'Unknown' # 鏌ユ壘鏈€杩?0澶╁唴鏈夋暟鎹殑鏈€鏂颁氦鏄撴棩 target_date = None current_date = datetime.now().date() for i in range(30): check_date = current_date - timedelta(days=i) trading_day = get_trading_day_near_date(check_date) if trading_day and trading_day <= current_date: # 妫€鏌ヨ繖涓氦鏄撴棩鏄惁鏈夊垎閽熸暟鎹? test_data = client.execute(""" SELECT COUNT(*) FROM stock_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s LIMIT 1 """, { 'code': stock_code, 'start': datetime.combine(trading_day, dt_time(9, 30)), 'end': datetime.combine(trading_day, dt_time(15, 0)) }) if test_data and test_data[0][0] > 0: target_date = trading_day break if not target_date: return jsonify({ 'error': 'No data available', 'code': stock_code, 'name': stock_name, 'data': [], 'trade_date': current_date.strftime('%Y-%m-%d'), 'type': 'minute' }) # 鑾峰彇鐩爣鏃ユ湡鐨勫畬鏁翠氦鏄撴椂娈垫暟鎹? data = client.execute(""" SELECT timestamp, open, high, low, close, volume, amt FROM stock_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY timestamp """, { 'code': stock_code, 'start': datetime.combine(target_date, dt_time(9, 30)), 'end': datetime.combine(target_date, dt_time(15, 0)) }) kline_data = [{ 'time': row[0].strftime('%H:%M'), 'open': float(row[1]), 'high': float(row[2]), 'low': float(row[3]), 'close': float(row[4]), 'volume': float(row[5]), 'amount': float(row[6]) } for row in data] return jsonify({ 'code': stock_code, 'name': stock_name, 'data': kline_data, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'minute', 'is_latest': True }) @app.route('/api/stock//forecast-report', methods=['GET']) def get_stock_forecast_report(stock_code): """鍩轰簬 stock_forecast_data 杈撳嚭鎶ヨ〃鎵€闇€鏁版嵁缁撴瀯 杩斿洖锛? - income_profit_trend: 钀ヤ笟鏀跺叆/褰掓瘝鍑€鍒╂鼎瓒嬪娍 - growth_bars: 澧為暱鐜囨煴鐘跺浘鏁版嵁锛堝熀浜庤惀涓氭敹鍏ュ悓姣旓級 - eps_trend: EPS 鎶樼嚎 - pe_peg_axes: PE/PEG 鍙岃酱 - detail_table: 璇︾粏鏁版嵁琛ㄦ牸锛堜笌闄勪欢缁撴瀯涓€鑷达級 """ try: # 璇诲彇璇ヨ偂绁ㄦ墍鏈夋寚鏍? rows = StockForecastData.query.filter_by(stock_code=stock_code).all() if not rows: return jsonify({'success': False, 'error': 'no_data'}), 404 # 灏嗘寚鏍囨槧灏勪负瀛楀吀 indicators = {} for r in rows: years, vals = r.values_by_year() indicators[r.indicator_name] = dict(zip(years, vals)) def safe(x): return x if x is not None else None years = ['2022A', '2023A', '2024A', '2025E', '2026E', '2027E'] # 钀ヤ笟鏀跺叆涓庡噣鍒╂鼎瓒嬪娍 income = indicators.get('钀ヤ笟鎬绘敹鍏?鐧句竾鍏?', {}) profit = indicators.get('褰掓瘝鍑€鍒╂鼎(鐧句竾鍏?', {}) income_profit_trend = { 'years': years, 'income': [safe(income.get(y)) for y in years], 'profit': [safe(profit.get(y)) for y in years] } # 澧為暱鐜囨煴鐘讹紙鑻ヨ〃鍐呭凡鏈?澧為暱鐜?%)"锛岀洿鎺ヤ娇鐢紱鍚﹀垯鎸夎惀涓氭敹鍏ュ悓姣旇绠楋級 growth = indicators.get('澧為暱鐜?%)') if growth is None: # 璁$畻鍚屾瘮锛?(curr - prev)/prev*100 growth_vals = [] prev = None for y in years: curr = income.get(y) if prev is not None and prev not in (None, 0) and curr is not None: growth_vals.append(round((float(curr) - float(prev)) / float(prev) * 100, 2)) else: growth_vals.append(None) prev = curr else: growth_vals = [safe(growth.get(y)) for y in years] growth_bars = { 'years': years, 'revenue_growth_pct': growth_vals, 'net_profit_growth_pct': None # 濡傚悗缁渶瑕佸彲鎵╁睍 } # EPS 瓒嬪娍 eps = indicators.get('EPS(绋€閲?') or indicators.get('EPS(鍏?鑲?') or {} eps_trend = { 'years': years, 'eps': [safe(eps.get(y)) for y in years] } # PE / PEG 鍙岃酱 pe = indicators.get('PE') or {} peg = indicators.get('PEG') or {} pe_peg_axes = { 'years': years, 'pe': [safe(pe.get(y)) for y in years], 'peg': [safe(peg.get(y)) for y in years] } # 璇︾粏鏁版嵁琛ㄦ牸锛堝垪椤哄簭鍥哄畾锛? def fmt(val): try: return None if val is None else round(float(val), 2) except Exception: return None detail_rows = [ { '鎸囨爣': '钀ヤ笟鎬绘敹鍏?鐧句竾鍏?', **{y: fmt(income.get(y)) for y in years}, }, { '鎸囨爣': '澧為暱鐜?%)', **{y: fmt(v) for y, v in zip(years, growth_vals)}, }, { '鎸囨爣': '褰掓瘝鍑€鍒╂鼎(鐧句竾鍏?', **{y: fmt(profit.get(y)) for y in years}, }, { '鎸囨爣': 'EPS(绋€閲?', **{y: fmt(eps.get(y)) for y in years}, }, { '鎸囨爣': 'PE', **{y: fmt(pe.get(y)) for y in years}, }, { '鎸囨爣': 'PEG', **{y: fmt(peg.get(y)) for y in years}, }, ] return jsonify({ 'success': True, 'data': { 'income_profit_trend': income_profit_trend, 'growth_bars': growth_bars, 'eps_trend': eps_trend, 'pe_peg_axes': pe_peg_axes, 'detail_table': { 'years': years, 'rows': detail_rows } } }) except Exception as e: app.logger.error(f"forecast report error: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//basic-info', methods=['GET']) def get_stock_basic_info(stock_code): """鑾峰彇鑲$エ鍩烘湰淇℃伅锛堟潵鑷猠a_baseinfo琛級""" try: with engine.connect() as conn: query = text(""" SELECT SECCODE, SECNAME, ORGNAME, F001V as en_name, F002V as en_short_name, F003V as legal_representative, F004V as reg_address, F005V as office_address, F006V as post_code, F007N as reg_capital, F009V as currency, F010D as establish_date, F011V as website, F012V as email, F013V as tel, F014V as fax, F015V as main_business, F016V as business_scope, F017V as company_intro, F018V as secretary, F019V as secretary_tel, F020V as secretary_fax, F021V as secretary_email, F024V as listing_status, F026V as province, F028V as city, F030V as industry_l1, F032V as industry_l2, F034V as sw_industry_l1, F036V as sw_industry_l2, F038V as sw_industry_l3, F039V as accounting_firm, F040V as law_firm, F041V as chairman, F042V as general_manager, F043V as independent_directors, F050V as credit_code, F054V as company_size, UPDATE_DATE FROM ea_baseinfo WHERE SECCODE = :stock_code LIMIT 1 """) result = conn.execute(query, {'stock_code': stock_code}).fetchone() if not result: return jsonify({ 'success': False, 'error': f'鏈壘鍒拌偂绁ㄤ唬鐮?{stock_code} 鐨勫熀鏈俊鎭? }), 404 # 杞崲涓哄瓧鍏? basic_info = {} result_dict = row_to_dict(result) for key, value in result_dict.items(): if isinstance(value, datetime): basic_info[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): basic_info[key] = float(value) else: basic_info[key] = value return jsonify({ 'success': True, 'data': basic_info }) except Exception as e: app.logger.error(f"Error getting stock basic info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//quote-detail', methods=['GET']) def get_stock_quote_detail(stock_code): """鑾峰彇鑲$エ瀹屾暣琛屾儏鏁版嵁 - 渚?StockQuoteCard 浣跨敤 杩斿洖鏁版嵁鍖呮嫭锛? - 鍩虹淇℃伅锛氬悕绉般€佷唬鐮併€佽涓氬垎绫? - 浠锋牸淇℃伅锛氱幇浠枫€佹定璺屽箙銆佸紑鐩樸€佹敹鐩樸€佹渶楂樸€佹渶浣? - 鍏抽敭鎸囨爣锛氬競鐩堢巼銆佸競鍑€鐜囥€佹祦閫氬競鍊笺€?2鍛ㄩ珮浣? - 涓诲姏鍔ㄦ€侊細涓诲姏鍑€娴佸叆銆佹満鏋勬寔浠擄紙濡傛湁锛? """ try: # 鏍囧噯鍖栬偂绁ㄤ唬鐮侊紙鍘婚櫎鍚庣紑锛? base_code = stock_code.split('.')[0] if '.' in stock_code else stock_code result_data = { 'code': stock_code, 'name': '', 'industry': '', 'industry_l1': '', 'sw_industry_l1': '', 'sw_industry_l2': '', # 浠锋牸淇℃伅 'current_price': None, 'change_percent': None, 'today_open': None, 'yesterday_close': None, 'today_high': None, 'today_low': None, # 鍏抽敭鎸囨爣 'pe': None, 'pb': None, 'eps': None, 'market_cap': None, 'circ_mv': None, 'total_shares': None, # 鍙戣鎬昏偂鏈紙浜胯偂锛? 'float_shares': None, # 娴侀€氳偂鏈紙浜胯偂锛? 'turnover_rate': None, 'week52_high': None, 'week52_low': None, # 涓诲姏鍔ㄦ€侊紙棰勭暀瀛楁锛? 'main_net_inflow': None, 'institution_holding': None, 'buy_ratio': None, 'sell_ratio': None, 'update_time': None } with engine.connect() as conn: # 1. 鑾峰彇鏈€鏂颁氦鏄撴暟鎹紙鏉ヨ嚜 ea_trade锛? trade_query = text(""" SELECT t.SECCODE, t.SECNAME, t.TRADEDATE, t.F002N as pre_close, t.F003N as open_price, t.F004N as volume, t.F005N as high, t.F006N as low, t.F007N as close_price, t.F010N as change_pct, t.F011N as amount, t.F012N as turnover_rate, t.F020N as total_shares, t.F021N as float_shares, t.F026N as pe_ratio, b.F034V as sw_industry_l1, b.F036V as sw_industry_l2, b.F030V as industry_l1 FROM ea_trade t LEFT JOIN ea_baseinfo b ON t.SECCODE = b.SECCODE WHERE t.SECCODE = :stock_code ORDER BY t.TRADEDATE DESC LIMIT 1 """) trade_result = conn.execute(trade_query, {'stock_code': base_code}).fetchone() if trade_result: row = row_to_dict(trade_result) # 璋冭瘯鏃ュ織锛氭墦鍗版墍鏈夊瓧娈? app.logger.info(f"[quote-detail] stock={base_code}, row keys={list(row.keys())}") app.logger.info(f"[quote-detail] total_shares={row.get('total_shares')}, float_shares={row.get('float_shares')}, pe_ratio={row.get('pe_ratio')}") result_data['name'] = row.get('SECNAME') or '' result_data['current_price'] = float(row.get('close_price') or 0) result_data['change_percent'] = float(row.get('change_pct') or 0) result_data['today_open'] = float(row.get('open_price') or 0) result_data['yesterday_close'] = float(row.get('pre_close') or 0) result_data['today_high'] = float(row.get('high') or 0) result_data['today_low'] = float(row.get('low') or 0) pe_value = row.get('pe_ratio') or row.get('F026N') result_data['pe'] = float(pe_value) if pe_value else None result_data['turnover_rate'] = float(row.get('turnover_rate') or 0) result_data['sw_industry_l1'] = row.get('sw_industry_l1') or '' result_data['sw_industry_l2'] = row.get('sw_industry_l2') or '' result_data['industry_l1'] = row.get('industry_l1') or '' result_data['industry'] = row.get('sw_industry_l2') or row.get('sw_industry_l1') or '' # 璁$畻鑲℃湰鍜屽競鍊硷紙鍏煎鍒悕鍜屽師濮嬪瓧娈靛悕锛? total_shares = float(row.get('total_shares') or row.get('F020N') or 0) float_shares = float(row.get('float_shares') or row.get('F021N') or 0) close_price = float(row.get('close_price') or row.get('F007N') or 0) app.logger.info(f"[quote-detail] calculated: total_shares={total_shares}, float_shares={float_shares}") # 鍙戣鎬昏偂鏈紙浜胯偂锛? if total_shares > 0: total_shares_yi = total_shares / 100000000 # 杞负浜胯偂 result_data['total_shares'] = round(total_shares_yi, 2) # 娴侀€氳偂鏈紙浜胯偂锛? if float_shares > 0: float_shares_yi = float_shares / 100000000 # 杞负浜胯偂 result_data['float_shares'] = round(float_shares_yi, 2) # 璁$畻娴侀€氬競鍊硷紙浜垮厓锛? if float_shares > 0 and close_price > 0: circ_mv = (float_shares * close_price) / 100000000 # 杞负浜? result_data['circ_mv'] = round(circ_mv, 2) result_data['market_cap'] = f"{round(circ_mv, 2)}浜? trade_date = row.get('TRADEDATE') if trade_date: if hasattr(trade_date, 'strftime'): result_data['update_time'] = trade_date.strftime('%Y-%m-%d') else: result_data['update_time'] = str(trade_date) # 2. 鑾峰彇52鍛ㄩ珮浣庝环 week52_query = text(""" SELECT MAX(F005N) as week52_high, MIN(F006N) as week52_low FROM ea_trade WHERE SECCODE = :stock_code AND TRADEDATE >= DATE_SUB(CURDATE(), INTERVAL 52 WEEK) AND F005N > 0 AND F006N > 0 """) week52_result = conn.execute(week52_query, {'stock_code': base_code}).fetchone() if week52_result: w52 = row_to_dict(week52_result) result_data['week52_high'] = float(w52.get('week52_high') or 0) result_data['week52_low'] = float(w52.get('week52_low') or 0) return jsonify({ 'success': True, 'data': result_data }) except Exception as e: app.logger.error(f"Error getting stock quote detail: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//announcements', methods=['GET']) def get_stock_announcements(stock_code): """鑾峰彇鑲$エ鍏憡鍒楄〃""" try: limit = request.args.get('limit', 50, type=int) with engine.connect() as conn: query = text(""" SELECT F001D as announce_date, F002V as title, F003V as url, F004V as format, F005N as file_size, F006V as info_type, UPDATE_DATE FROM ea_baseinfolist WHERE SECCODE = :stock_code ORDER BY F001D DESC LIMIT :limit """) result = conn.execute(query, {'stock_code': stock_code, 'limit': limit}).fetchall() announcements = [] for row in result: announcement = {} for key, value in row_to_dict(row).items(): if value is None: announcement[key] = None elif isinstance(value, datetime): announcement[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): announcement[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): announcement[key] = float(value) else: announcement[key] = value announcements.append(announcement) return jsonify({ 'success': True, 'data': announcements, 'total': len(announcements) }) except Exception as e: app.logger.error(f"Error getting stock announcements: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//disclosure-schedule', methods=['GET']) def get_stock_disclosure_schedule(stock_code): """鑾峰彇鑲$エ璐㈡姤棰勬姭闇叉椂闂磋〃""" try: with engine.connect() as conn: query = text(""" SELECT distinct F001D as report_period, F002D as scheduled_date, F003D as change_date1, F004D as change_date2, F005D as change_date3, F006D as actual_date, F007D as change_date4, F008D as change_date5, MODTIME as mod_time FROM ea_pretime WHERE SECCODE = :stock_code ORDER BY F001D DESC LIMIT 20 """) result = conn.execute(query, {'stock_code': stock_code}).fetchall() schedules = [] for row in result: schedule = {} for key, value in row_to_dict(row).items(): if value is None: schedule[key] = None elif isinstance(value, datetime): schedule[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): schedule[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): schedule[key] = float(value) else: schedule[key] = value # 璁$畻鏈€鏂扮殑棰勭害鏃ユ湡 latest_scheduled = schedule.get('scheduled_date') for change_field in ['change_date5', 'change_date4', 'change_date3', 'change_date2', 'change_date1']: if schedule.get(change_field): latest_scheduled = schedule[change_field] break schedule['latest_scheduled_date'] = latest_scheduled schedule['is_disclosed'] = bool(schedule.get('actual_date')) # 鏍煎紡鍖栨姤鍛婃湡鍚嶇О if schedule.get('report_period'): period_date = schedule['report_period'] if period_date.endswith('-03-31'): schedule['report_name'] = f"{period_date[:4]}骞翠竴瀛f姤" elif period_date.endswith('-06-30'): schedule['report_name'] = f"{period_date[:4]}骞翠腑鎶? elif period_date.endswith('-09-30'): schedule['report_name'] = f"{period_date[:4]}骞翠笁瀛f姤" elif period_date.endswith('-12-31'): schedule['report_name'] = f"{period_date[:4]}骞村勾鎶? else: schedule['report_name'] = period_date schedules.append(schedule) return jsonify({ 'success': True, 'data': schedules, 'total': len(schedules) }) except Exception as e: app.logger.error(f"Error getting disclosure schedule: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//actual-control', methods=['GET']) def get_stock_actual_control(stock_code): """鑾峰彇鑲$エ瀹為檯鎺у埗浜轰俊鎭?"" try: with engine.connect() as conn: query = text(""" SELECT DECLAREDATE as declare_date, ENDDATE as end_date, F001V as direct_holder_id, F002V as direct_holder_name, F003V as actual_controller_id, F004V as actual_controller_name, F005N as holding_shares, F006N as holding_ratio, F007V as control_type_code, F008V as control_type, F012V as direct_controller_id, F013V as direct_controller_name, F014V as controller_type, ORGNAME as org_name, SECCODE as sec_code, SECNAME as sec_name FROM ea_actualcon WHERE SECCODE = :stock_code ORDER BY ENDDATE DESC, DECLAREDATE DESC LIMIT 20 """) result = conn.execute(query, {'stock_code': stock_code}).fetchall() control_info = [] for row in result: control_record = {} for key, value in row_to_dict(row).items(): if value is None: control_record[key] = None elif isinstance(value, datetime): control_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): control_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): control_record[key] = float(value) else: control_record[key] = value control_info.append(control_record) return jsonify({ 'success': True, 'data': control_info, 'total': len(control_info) }) except Exception as e: app.logger.error(f"Error getting actual control info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//concentration', methods=['GET']) def get_stock_concentration(stock_code): """鑾峰彇鑲$エ鑲℃潈闆嗕腑搴︿俊鎭?"" try: with engine.connect() as conn: query = text(""" SELECT ENDDATE as end_date, F001V as stat_item, F002N as holding_shares, F003N as holding_ratio, F004N as ratio_change, ORGNAME as org_name, SECCODE as sec_code, SECNAME as sec_name FROM ea_concentration WHERE SECCODE = :stock_code ORDER BY ENDDATE DESC LIMIT 20 """) result = conn.execute(query, {'stock_code': stock_code}).fetchall() concentration_info = [] for row in result: concentration_record = {} for key, value in row_to_dict(row).items(): if value is None: concentration_record[key] = None elif isinstance(value, datetime): concentration_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): concentration_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): concentration_record[key] = float(value) else: concentration_record[key] = value concentration_info.append(concentration_record) return jsonify({ 'success': True, 'data': concentration_info, 'total': len(concentration_info) }) except Exception as e: app.logger.error(f"Error getting concentration info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//management', methods=['GET']) def get_stock_management(stock_code): """鑾峰彇鑲$エ绠$悊灞備俊鎭?"" try: # 鑾峰彇鏄惁鍙樉绀哄湪鑱屼汉鍛樺弬鏁? active_only = request.args.get('active_only', 'true').lower() == 'true' with engine.connect() as conn: base_query = """ SELECT DECLAREDATE as declare_date, \ F001V as person_id, \ F002V as name, \ F007D as start_date, \ F008D as end_date, \ F009V as position_name, \ F010V as gender, \ F011V as education, \ F012V as birth_year, \ F013V as nationality, \ F014V as position_category_code, \ F015V as position_category, \ F016V as position_code, \ F017V as highest_degree, \ F019V as resume, \ F020C as is_active, \ ORGNAME as org_name, \ SECCODE as sec_code, \ SECNAME as sec_name FROM ea_management WHERE SECCODE = :stock_code \ """ if active_only: base_query += " AND F020C = '1'" base_query += " ORDER BY DECLAREDATE DESC, F007D DESC" query = text(base_query) result = conn.execute(query, {'stock_code': stock_code}).fetchall() management_info = [] for row in result: management_record = {} for key, value in row_to_dict(row).items(): if value is None: management_record[key] = None elif isinstance(value, datetime): management_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): management_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): management_record[key] = float(value) else: management_record[key] = value management_info.append(management_record) return jsonify({ 'success': True, 'data': management_info, 'total': len(management_info) }) except Exception as e: app.logger.error(f"Error getting management info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//top-circulation-shareholders', methods=['GET']) def get_stock_top_circulation_shareholders(stock_code): """鑾峰彇鑲$エ鍗佸ぇ娴侀€氳偂涓滀俊鎭?"" try: limit = request.args.get('limit', 10, type=int) with engine.connect() as conn: query = text(""" SELECT DECLAREDATE as declare_date, ENDDATE as end_date, F001N as shareholder_rank, F002V as shareholder_id, F003V as shareholder_name, F004V as shareholder_type, F005N as holding_shares, F006N as total_share_ratio, F007N as circulation_share_ratio, F011V as share_nature, F012N as b_shares, F013N as h_shares, F014N as other_shares, ORGNAME as org_name, SECCODE as sec_code, SECNAME as sec_name FROM ea_tencirculation WHERE SECCODE = :stock_code ORDER BY ENDDATE DESC, F001N ASC LIMIT :limit """) result = conn.execute(query, {'stock_code': stock_code, 'limit': limit}).fetchall() shareholders_info = [] for row in result: shareholder_record = {} for key, value in row_to_dict(row).items(): if value is None: shareholder_record[key] = None elif isinstance(value, datetime): shareholder_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): shareholder_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): shareholder_record[key] = float(value) else: shareholder_record[key] = value shareholders_info.append(shareholder_record) return jsonify({ 'success': True, 'data': shareholders_info, 'total': len(shareholders_info) }) except Exception as e: app.logger.error(f"Error getting top circulation shareholders: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//top-shareholders', methods=['GET']) def get_stock_top_shareholders(stock_code): """鑾峰彇鑲$エ鍗佸ぇ鑲′笢淇℃伅""" try: limit = request.args.get('limit', 10, type=int) with engine.connect() as conn: query = text(""" SELECT DECLAREDATE as declare_date, ENDDATE as end_date, F001N as shareholder_rank, F002V as shareholder_name, F003V as shareholder_id, F004V as shareholder_type, F005N as holding_shares, F006N as total_share_ratio, F007N as circulation_share_ratio, F011V as share_nature, F016N as restricted_shares, F017V as concert_party_group, F018N as circulation_shares, ORGNAME as org_name, SECCODE as sec_code, SECNAME as sec_name FROM ea_tenshareholder WHERE SECCODE = :stock_code ORDER BY ENDDATE DESC, F001N ASC LIMIT :limit """) result = conn.execute(query, {'stock_code': stock_code, 'limit': limit}).fetchall() shareholders_info = [] for row in result: shareholder_record = {} for key, value in row_to_dict(row).items(): if value is None: shareholder_record[key] = None elif isinstance(value, datetime): shareholder_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): shareholder_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): shareholder_record[key] = float(value) else: shareholder_record[key] = value shareholders_info.append(shareholder_record) return jsonify({ 'success': True, 'data': shareholders_info, 'total': len(shareholders_info) }) except Exception as e: app.logger.error(f"Error getting top shareholders: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//branches', methods=['GET']) def get_stock_branches(stock_code): """鑾峰彇鑲$エ鍒嗘敮鏈烘瀯淇℃伅""" try: with engine.connect() as conn: query = text(""" SELECT CRECODE as cre_code, F001V as branch_name, F002V as register_capital, F003V as business_status, F004D as register_date, F005N as related_company_count, F006V as legal_person, ORGNAME as org_name, SECCODE as sec_code, SECNAME as sec_name FROM ea_branch WHERE SECCODE = :stock_code ORDER BY F004D DESC """) result = conn.execute(query, {'stock_code': stock_code}).fetchall() branches_info = [] for row in result: branch_record = {} for key, value in row_to_dict(row).items(): if value is None: branch_record[key] = None elif isinstance(value, datetime): branch_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): branch_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): branch_record[key] = float(value) else: branch_record[key] = value branches_info.append(branch_record) return jsonify({ 'success': True, 'data': branches_info, 'total': len(branches_info) }) except Exception as e: app.logger.error(f"Error getting branches info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/stock//patents', methods=['GET']) def get_stock_patents(stock_code): """鑾峰彇鑲$エ涓撳埄淇℃伅""" try: limit = request.args.get('limit', 50, type=int) patent_type = request.args.get('type', None) # 涓撳埄绫诲瀷绛涢€? with engine.connect() as conn: base_query = """ SELECT CRECODE as cre_code, \ F001V as patent_name, \ F002V as application_number, \ F003V as publication_number, \ F004V as classification_number, \ F005D as publication_date, \ F006D as application_date, \ F007V as patent_type, \ F008V as applicant, \ F009V as inventor, \ ID as id, \ ORGNAME as org_name, \ SECCODE as sec_code, \ SECNAME as sec_name FROM ea_patent WHERE SECCODE = :stock_code \ """ params = {'stock_code': stock_code, 'limit': limit} if patent_type: base_query += " AND F007V = :patent_type" params['patent_type'] = patent_type base_query += " ORDER BY F006D DESC, F005D DESC LIMIT :limit" query = text(base_query) result = conn.execute(query, params).fetchall() patents_info = [] for row in result: patent_record = {} for key, value in row_to_dict(row).items(): if value is None: patent_record[key] = None elif isinstance(value, datetime): patent_record[key] = value.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(value, date): patent_record[key] = value.strftime('%Y-%m-%d') elif isinstance(value, Decimal): patent_record[key] = float(value) else: patent_record[key] = value patents_info.append(patent_record) return jsonify({ 'success': True, 'data': patents_info, 'total': len(patents_info) }) except Exception as e: app.logger.error(f"Error getting patents info: {e}", exc_info=True) return jsonify({'success': False, 'error': str(e)}), 500 def get_daily_kline(stock_code, event_datetime, stock_name): """澶勭悊鏃绾挎暟鎹?"" stock_code = stock_code.split('.')[0] with engine.connect() as conn: # 鑾峰彇浜嬩欢鏃ユ湡鍓嶅悗鐨勬暟鎹紙鍓?65澶?1骞达紝鍚?0澶╋級 kline_sql = """ WITH date_range AS (SELECT TRADEDATE \ FROM ea_trade \ WHERE SECCODE = :stock_code \ AND TRADEDATE BETWEEN DATE_SUB(:trade_date, INTERVAL 365 DAY) \ AND DATE_ADD(:trade_date, INTERVAL 30 DAY) \ GROUP BY TRADEDATE \ ORDER BY TRADEDATE) SELECT t.TRADEDATE, CAST(t.F003N AS FLOAT) as open, CAST(t.F007N AS FLOAT) as close, CAST(t.F005N AS FLOAT) as high, CAST(t.F006N AS FLOAT) as low, CAST(t.F004N AS FLOAT) as volume FROM ea_trade t JOIN date_range d \ ON t.TRADEDATE = d.TRADEDATE WHERE t.SECCODE = :stock_code ORDER BY t.TRADEDATE \ """ result = conn.execute(text(kline_sql), { "stock_code": stock_code, "trade_date": event_datetime.date() }).fetchall() if not result: return jsonify({ 'error': 'No data available', 'code': stock_code, 'name': stock_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'daily' }) kline_data = [{ 'time': row.TRADEDATE.strftime('%Y-%m-%d'), 'open': float(row.open), 'high': float(row.high), 'low': float(row.low), 'close': float(row.close), 'volume': float(row.volume) } for row in result] return jsonify({ 'code': stock_code, 'name': stock_name, 'data': kline_data, 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'daily', 'is_history': True }) def get_minute_kline(stock_code, event_datetime, stock_name, skip_next_day=False): """澶勭悊鍒嗛挓K绾挎暟鎹? Args: stock_code: 鑲$エ浠g爜 event_datetime: 浜嬩欢鏃堕棿 stock_name: 鑲$エ鍚嶇О skip_next_day: 鏄惁璺宠繃"涓嬩竴涓氦鏄撴棩"閫昏緫锛堢敤浜庣伒娲诲睆鐩樺悗鏌ョ湅褰撳ぉ鏁版嵁锛? """ client = get_clickhouse_client() target_date = get_trading_day_near_date(event_datetime.date()) is_after_market = event_datetime.time() > dt_time(15, 0) # 鍙湁鍦ㄦ寚瀹氫簡 event_time 鍙傛暟鏃讹紙濡?Community 椤甸潰浜嬩欢锛夋墠璺宠浆鍒颁笅涓€涓氦鏄撴棩 # 鐏垫椿灞忕瓑瀹炴椂琛屾儏鍦烘櫙锛岀洏鍚庡簲鏄剧ず褰撳ぉ鏁版嵁 if target_date and is_after_market and not skip_next_day: # 濡傛灉鏄氦鏄撴棩涓斿凡鏀剁洏锛屾煡鎵句笅涓€涓氦鏄撴棩 next_trade_date = get_trading_day_near_date(target_date + timedelta(days=1)) if next_trade_date: target_date = next_trade_date if not target_date: return jsonify({ 'error': 'No data available', 'code': stock_code, 'name': stock_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'minute' }) # 鑾峰彇鐩爣鏃ユ湡鐨勫畬鏁翠氦鏄撴椂娈垫暟鎹? data = client.execute(""" SELECT timestamp, open, high, low, close, volume, amt FROM stock_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY timestamp """, { 'code': stock_code, 'start': datetime.combine(target_date, dt_time(9, 30)), 'end': datetime.combine(target_date, dt_time(15, 0)) }) kline_data = [{ 'time': row[0].strftime('%H:%M'), 'open': float(row[1]), 'high': float(row[2]), 'low': float(row[3]), 'close': float(row[4]), 'volume': float(row[5]), 'amount': float(row[6]) } for row in data] return jsonify({ 'code': stock_code, 'name': stock_name, 'data': kline_data, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'minute', 'is_history': target_date < event_datetime.date() }) def get_timeline_data(stock_code, event_datetime, stock_name): """澶勭悊鍒嗘椂鍧囦环绾挎暟鎹紙timeline锛夈€? 瑙勫垯锛? - 鑻ヤ簨浠舵椂闂村湪浜ゆ槗鏃ョ殑15:00涔嬪悗锛屽垯灞曠ず涓嬩竴涓氦鏄撴棩鐨勫垎鏃舵暟鎹紱 - 鑻ヤ簨浠舵棩闈炰氦鏄撴棩锛屼紭鍏堝睍绀轰笅涓€涓氦鏄撴棩锛涘鏃狅紝鍒欏洖閫€鍒版渶杩戜竴涓氦鏄撴棩锛? - 鏁版嵁鍖洪棿鍥哄畾涓?09:30-15:00銆? """ client = get_clickhouse_client() target_date = get_trading_day_near_date(event_datetime.date()) is_after_market = event_datetime.time() > dt_time(15, 0) # 涓庡垎閽烱閫昏緫淇濇寔涓€鑷寸殑鏃ユ湡閫夋嫨瑙勫垯 if target_date and is_after_market: next_trade_date = get_trading_day_near_date(target_date + timedelta(days=1)) if next_trade_date: target_date = next_trade_date if not target_date: return jsonify({ 'error': 'No data available', 'code': stock_code, 'name': stock_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'timeline' }) # 鑾峰彇鏄ㄦ敹鐩樹环 - 浼樺厛浠?MySQL ea_trade 琛ㄨ幏鍙栵紙鏇村彲闈狅級 prev_close = None base_code = stock_code.split('.')[0] target_date_str = target_date.strftime('%Y%m%d') try: with engine.connect() as conn: # F007N 鏄槰鏀朵环瀛楁 result = conn.execute(text(""" SELECT F007N FROM ea_trade WHERE SECCODE = :code AND TRADEDATE = :trade_date AND F007N > 0 """), {'code': base_code, 'trade_date': target_date_str}).fetchone() if result and result[0]: prev_close = float(result[0]) except Exception as e: logger.warning(f"浠?ea_trade 鑾峰彇鏄ㄦ敹浠峰け璐? {e}") # 濡傛灉 MySQL 娌℃湁鏁版嵁锛屽洖閫€鍒?ClickHouse if prev_close is None: prev_close_query = """ SELECT close FROM stock_minute WHERE code = %(code)s AND timestamp < %(start)s ORDER BY timestamp DESC LIMIT 1 """ prev_close_result = client.execute(prev_close_query, { 'code': stock_code, 'start': datetime.combine(target_date, dt_time(9, 30)) }) if prev_close_result: prev_close = float(prev_close_result[0][0]) data = client.execute( """ SELECT timestamp, close, volume FROM stock_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY timestamp """, { 'code': stock_code, 'start': datetime.combine(target_date, dt_time(9, 30)), 'end': datetime.combine(target_date, dt_time(15, 0)), } ) timeline_data = [] total_amount = 0 total_volume = 0 for row in data: price = float(row[1]) volume = float(row[2]) total_amount += price * volume total_volume += volume avg_price = total_amount / total_volume if total_volume > 0 else price # 璁$畻娑ㄨ穼骞? change_percent = ((price - prev_close) / prev_close * 100) if prev_close else 0 timeline_data.append({ 'time': row[0].strftime('%H:%M'), 'price': price, 'avg_price': avg_price, 'volume': volume, 'change_percent': change_percent, }) return jsonify({ 'code': stock_code, 'name': stock_name, 'data': timeline_data, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'timeline', 'is_history': target_date < event_datetime.date(), 'prev_close': prev_close, }) # ==================== 鎸囨暟琛屾儏API锛堜笌鑲$エ閫昏緫涓€鑷达紝鏁版嵁琛ㄤ负 index_minute锛?==================== @app.route('/api/index//realtime') def get_index_realtime(index_code): """ 鑾峰彇鎸囨暟瀹炴椂琛屾儏锛堢敤浜庝氦鏄撴椂闂村唴鐨勮鎯呮洿鏂帮級 浠?index_minute 琛ㄨ幏鍙栨渶鏂扮殑鍒嗛挓鏁版嵁 杩斿洖: 鏈€鏂颁环銆佹定璺屽箙銆佹定璺岄銆佸紑鐩樹环銆佹渶楂樹环銆佹渶浣庝环銆佹槰鏀朵环 """ # 纭繚鎸囨暟浠g爜鍖呭惈鍚庣紑锛圕lickHouse 涓瓨鍌ㄧ殑鏄甫鍚庣紑鐨勪唬鐮侊級 # 涓婅瘉鎸囨暟: 000xxx.SH, 娣辫瘉鎸囨暟: 399xxx.SZ if '.' not in index_code: if index_code.startswith('399'): index_code = f"{index_code}.SZ" else: # 000寮€澶寸殑涓婅瘉鎸囨暟锛屼互鍙婂叾浠栨寚鏁伴粯璁や笂娴? index_code = f"{index_code}.SH" client = get_clickhouse_client() today = date.today() # 鍒ゆ柇浠婂ぉ鏄惁鏄氦鏄撴棩 if today not in trading_days_set: # 闈炰氦鏄撴棩锛岃幏鍙栨渶杩戜竴涓氦鏄撴棩鐨勬敹鐩樻暟鎹? target_date = get_trading_day_near_date(today) if not target_date: return jsonify({ 'success': False, 'error': 'No trading day found', 'data': None }) is_trading = False else: target_date = today # 鍒ゆ柇鏄惁鍦ㄤ氦鏄撴椂闂村唴 now = datetime.now() current_minutes = now.hour * 60 + now.minute # 9:30-11:30 = 570-690, 13:00-15:00 = 780-900 is_trading = (570 <= current_minutes <= 690) or (780 <= current_minutes <= 900) try: # 鑾峰彇褰撳ぉ/鏈€杩戜氦鏄撴棩鐨勭涓€鏉℃暟鎹紙寮€鐩樹环锛夊拰鏈€鍚庝竴鏉℃暟鎹紙鏈€鏂颁环锛? # 鍚屾椂鑾峰彇鏈€楂樹环鍜屾渶浣庝环 data = client.execute( """ SELECT min(open) as first_open, max(high) as day_high, min(low) as day_low, argMax(close, timestamp) as latest_close, argMax(timestamp, timestamp) as latest_time FROM index_minute WHERE code = %(code)s AND toDate(timestamp) = %(date)s """, { 'code': index_code, 'date': target_date, } ) if not data or not data[0] or data[0][3] is None: return jsonify({ 'success': False, 'error': 'No data available', 'data': None }) row = data[0] first_open = float(row[0]) if row[0] else None day_high = float(row[1]) if row[1] else None day_low = float(row[2]) if row[2] else None latest_close = float(row[3]) if row[3] else None latest_time = row[4] # 鑾峰彇鏄ㄦ敹浠凤紙浠?MySQL ea_exchangetrade 琛級 code_no_suffix = index_code.split('.')[0] prev_close = None with engine.connect() as conn: # 鑾峰彇鍓嶄竴涓氦鏄撴棩鐨勬敹鐩樹环 prev_result = conn.execute(text( """ SELECT F006N FROM ea_exchangetrade WHERE INDEXCODE = :code AND TRADEDATE < :today ORDER BY TRADEDATE DESC LIMIT 1 """ ), { 'code': code_no_suffix, 'today': datetime.combine(target_date, dt_time(0, 0, 0)) }).fetchone() if prev_result and prev_result[0]: prev_close = float(prev_result[0]) # 璁$畻娑ㄨ穼棰濆拰娑ㄨ穼骞? change_amount = None change_pct = None if latest_close is not None and prev_close is not None and prev_close > 0: change_amount = latest_close - prev_close change_pct = (change_amount / prev_close) * 100 return jsonify({ 'success': True, 'data': { 'code': index_code, 'price': latest_close, 'open': first_open, 'high': day_high, 'low': day_low, 'prev_close': prev_close, 'change': change_amount, 'change_pct': change_pct, 'update_time': latest_time.strftime('%H:%M:%S') if latest_time else None, 'trade_date': target_date.strftime('%Y-%m-%d'), 'is_trading': is_trading, } }) except Exception as e: app.logger.error(f"鑾峰彇鎸囨暟瀹炴椂琛屾儏澶辫触: {index_code}, 閿欒: {str(e)}") return jsonify({ 'success': False, 'error': str(e), 'data': None }), 500 @app.route('/api/index//kline') def get_index_kline(index_code): chart_type = request.args.get('type', 'minute') event_time = request.args.get('event_time') try: event_datetime = datetime.fromisoformat(event_time) if event_time else datetime.now() except ValueError: return jsonify({'error': 'Invalid event_time format'}), 400 # 纭繚鎸囨暟浠g爜鍖呭惈鍚庣紑锛圕lickHouse 涓暟鎹甫鍚庣紑锛? # 399xxx -> 娣变氦鎵€, 鍏朵粬锛?00xxx绛夛級-> 涓婁氦鎵€ if '.' not in index_code: index_code = f"{index_code}.SZ" if index_code.startswith('39') else f"{index_code}.SH" # 鎸囨暟鍚嶇О锛堟殏鏃犵储寮曡〃锛屽厛杩斿洖浠g爜鏈韩锛? index_name = index_code.split('.')[0] if chart_type == 'minute': return get_index_minute_kline(index_code, event_datetime, index_name) elif chart_type == 'timeline': return get_index_timeline_data(index_code, event_datetime, index_name) elif chart_type == 'daily': return get_index_daily_kline(index_code, event_datetime, index_name) else: return jsonify({'error': f'Unsupported chart type: {chart_type}'}), 400 def get_index_minute_kline(index_code, event_datetime, index_name): client = get_clickhouse_client() target_date = get_trading_day_near_date(event_datetime.date()) if not target_date: return jsonify({ 'error': 'No data available', 'code': index_code, 'name': index_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'minute' }) data = client.execute( """ SELECT timestamp, open, high, low, close, volume, amt FROM index_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY timestamp """, { 'code': index_code, 'start': datetime.combine(target_date, dt_time(9, 30)), 'end': datetime.combine(target_date, dt_time(15, 0)), } ) kline_data = [{ 'time': row[0].strftime('%H:%M'), 'open': float(row[1]), 'high': float(row[2]), 'low': float(row[3]), 'close': float(row[4]), 'volume': float(row[5]), 'amount': float(row[6]), } for row in data] return jsonify({ 'code': index_code, 'name': index_name, 'data': kline_data, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'minute', 'is_history': target_date < event_datetime.date(), }) def get_index_timeline_data(index_code, event_datetime, index_name): client = get_clickhouse_client() target_date = get_trading_day_near_date(event_datetime.date()) if not target_date: return jsonify({ 'error': 'No data available', 'code': index_code, 'name': index_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'timeline' }) data = client.execute( """ SELECT timestamp, close, volume FROM index_minute WHERE code = %(code)s AND timestamp BETWEEN %(start)s AND %(end)s ORDER BY timestamp """, { 'code': index_code, 'start': datetime.combine(target_date, dt_time(9, 30)), 'end': datetime.combine(target_date, dt_time(15, 0)), } ) timeline = [] total_amount = 0 total_volume = 0 for row in data: price = float(row[1]) volume = float(row[2]) total_amount += price * volume total_volume += volume avg_price = total_amount / total_volume if total_volume > 0 else price timeline.append({ 'time': row[0].strftime('%H:%M'), 'price': price, 'avg_price': avg_price, 'volume': volume, }) return jsonify({ 'code': index_code, 'name': index_name, 'data': timeline, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'timeline', 'is_history': target_date < event_datetime.date(), }) def get_index_daily_kline(index_code, event_datetime, index_name): """浠?MySQL 鐨?stock.ea_exchangetrade 鑾峰彇鎸囨暟鏃ョ嚎 娉ㄦ剰锛氳〃涓?INDEXCODE 鏃犲悗缂€锛屼緥濡?000001.SH -> 000001 瀛楁锛? F003N 寮€甯傛寚鏁?-> open F004N 鏈€楂樻寚鏁?-> high F005N 鏈€浣庢寚鏁?-> low F006N 鏈€杩戞寚鏁?-> close锛堜綔涓哄綋鏃ユ敹鐩樻垨鏈€杩戜环浣跨敤锛? F007N 鏄ㄦ棩鏀跺競鎸囨暟 -> prev_close """ # 鍘绘帀鍚庣紑 code_no_suffix = index_code.split('.')[0] # 閫夋嫨灞曠ず鐨勬渶鍚庝氦鏄撴棩 target_date = get_trading_day_near_date(event_datetime.date()) if not target_date: return jsonify({ 'error': 'No data available', 'code': index_code, 'name': index_name, 'data': [], 'trade_date': event_datetime.date().strftime('%Y-%m-%d'), 'type': 'daily' }) # 鍙栨渶杩戜竴娈垫椂闂寸殑鏃ョ嚎锛堝€掑簭鍐嶅弽杞负鍗囧簭锛? with engine.connect() as conn: rows = conn.execute(text( """ SELECT TRADEDATE, F003N, F004N, F005N, F006N, F007N FROM ea_exchangetrade WHERE INDEXCODE = :code AND TRADEDATE <= :end_dt ORDER BY TRADEDATE DESC LIMIT 180 """ ), { 'code': code_no_suffix, 'end_dt': datetime.combine(target_date, dt_time(23, 59, 59)) }).fetchall() # 鍙嶈浆涓烘椂闂村崌搴? rows = list(reversed(rows)) daily = [] for i, r in enumerate(rows): trade_dt = r[0] open_v = r[1] high_v = r[2] low_v = r[3] last_v = r[4] prev_close_v = r[5] # 姝g‘鐨勫墠鏀剁洏浠烽€昏緫锛氫娇鐢ㄥ墠涓€涓氦鏄撴棩鐨凢006N锛堟敹鐩樹环锛? calculated_prev_close = None if i > 0 and rows[i - 1][4] is not None: # 浣跨敤鍓嶄竴涓氦鏄撴棩鐨勬敹鐩樹环浣滀负鍓嶆敹鐩樹环 calculated_prev_close = float(rows[i - 1][4]) else: # 绗竴鏉¤褰曪紝灏濊瘯浣跨敤F007N瀛楁浣滀负澶囬€? if prev_close_v is not None and prev_close_v > 0: calculated_prev_close = float(prev_close_v) daily.append({ 'time': trade_dt.strftime('%Y-%m-%d') if hasattr(trade_dt, 'strftime') else str(trade_dt), 'open': float(open_v) if open_v is not None else None, 'high': float(high_v) if high_v is not None else None, 'low': float(low_v) if low_v is not None else None, 'close': float(last_v) if last_v is not None else None, 'prev_close': calculated_prev_close, }) return jsonify({ 'code': index_code, 'name': index_name, 'data': daily, 'trade_date': target_date.strftime('%Y-%m-%d'), 'type': 'daily', 'is_history': target_date < event_datetime.date(), }) # ==================== 鏃ュ巻API ==================== @app.route('/api/v1/calendar/event-counts', methods=['GET']) def get_event_counts(): """鑾峰彇鏃ュ巻浜嬩欢鏁伴噺缁熻""" try: # 鑾峰彇鏈堜唤鍙傛暟 year = request.args.get('year', datetime.now().year, type=int) month = request.args.get('month', datetime.now().month, type=int) # 璁$畻鏈堜唤鐨勫紑濮嬪拰缁撴潫鏃ユ湡 start_date = datetime(year, month, 1) if month == 12: end_date = datetime(year + 1, 1, 1) else: end_date = datetime(year, month + 1, 1) # 鏌ヨ浜嬩欢鏁伴噺 query = """ SELECT DATE(calendar_time) as date, COUNT(*) as count FROM future_events WHERE calendar_time BETWEEN :start_date AND :end_date AND type = 'event' GROUP BY DATE(calendar_time) """ result = db.session.execute(text(query), { 'start_date': start_date, 'end_date': end_date }) # 鏍煎紡鍖栫粨鏋? events = [] for day in result: events.append({ 'date': day.date.isoformat(), 'count': day.count, 'className': get_event_class(day.count) }) return jsonify({ 'success': True, 'data': events }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/v1/calendar/events', methods=['GET']) def get_calendar_events(): """鑾峰彇鎸囧畾鏃ユ湡鐨勪簨浠跺垪琛?"" date_str = request.args.get('date') event_type = request.args.get('type', 'all') if not date_str: return jsonify({ 'success': False, 'error': 'Date parameter required' }), 400 try: date = datetime.strptime(date_str, '%Y-%m-%d') except ValueError: return jsonify({ 'success': False, 'error': 'Invalid date format' }), 400 # 淇SQL璇硶锛氬幓鎺夊嚱鏁板悕鍚庣殑绌烘牸锛屽幓鎺夊弬鏁板墠鐨勭┖鏍? query = """ SELECT * FROM future_events WHERE DATE(calendar_time) = :date """ params = {'date': date} if event_type != 'all': query += " AND type = :type" params['type'] = event_type query += " ORDER BY calendar_time" result = db.session.execute(text(query), params) events = [] user_following_ids = set() if 'user_id' in session: follows = FutureEventFollow.query.filter_by(user_id=session['user_id']).all() user_following_ids = {f.future_event_id for f in follows} for row in result: # 浣跨敤缁熶竴鐨勫鐞嗗嚱鏁帮紝鏀寔鏂板瓧娈靛洖閫€鍜?best_matches 瑙f瀽 event_data = process_future_event_row(row, user_following_ids) events.append(event_data) return jsonify({ 'success': True, 'data': events }) @app.route('/api/v1/calendar/events/', methods=['GET']) def get_calendar_event_detail(event_id): """鑾峰彇鏃ュ巻浜嬩欢璇︽儏""" try: sql = """ SELECT * FROM future_events WHERE data_id = :event_id \ """ result = db.session.execute(text(sql), {'event_id': event_id}).first() if not result: return jsonify({ 'success': False, 'error': 'Event not found' }), 404 # 妫€鏌ュ綋鍓嶇敤鎴锋槸鍚﹀叧娉ㄤ簡璇ユ湭鏉ヤ簨浠? user_following_ids = set() if 'user_id' in session: is_following = FutureEventFollow.query.filter_by( user_id=session['user_id'], future_event_id=event_id ).first() is not None if is_following: user_following_ids.add(event_id) # 浣跨敤缁熶竴鐨勫鐞嗗嚱鏁帮紝鏀寔鏂板瓧娈靛洖閫€鍜?best_matches 瑙f瀽 event_data = process_future_event_row(result, user_following_ids) return jsonify({ 'success': True, 'data': event_data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/v1/calendar/events//follow', methods=['POST']) def toggle_future_event_follow(event_id): """鍒囨崲鏈潵浜嬩欢鍏虫敞鐘舵€侊紙闇€鐧诲綍锛?"" if 'user_id' not in session: return jsonify({'success': False, 'error': '鏈櫥褰?}), 401 try: # 妫€鏌ユ湭鏉ヤ簨浠舵槸鍚﹀瓨鍦? sql = """ SELECT data_id \ FROM future_events \ WHERE data_id = :event_id \ """ result = db.session.execute(text(sql), {'event_id': event_id}).first() if not result: return jsonify({'success': False, 'error': '鏈潵浜嬩欢涓嶅瓨鍦?}), 404 user_id = session['user_id'] # 妫€鏌ユ槸鍚﹀凡鍏虫敞 existing = FutureEventFollow.query.filter_by( user_id=user_id, future_event_id=event_id ).first() if existing: # 鍙栨秷鍏虫敞 db.session.delete(existing) db.session.commit() return jsonify({ 'success': True, 'data': {'is_following': False} }) else: # 鍏虫敞 follow = FutureEventFollow( user_id=user_id, future_event_id=event_id ) db.session.add(follow) db.session.commit() return jsonify({ 'success': True, 'data': {'is_following': True} }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 def get_event_class(count): """鏍规嵁浜嬩欢鏁伴噺杩斿洖CSS绫诲悕""" if count >= 10: return 'event-high' elif count >= 5: return 'event-medium' elif count > 0: return 'event-low' return '' def parse_json_field(field_value): """瑙f瀽JSON瀛楁""" if not field_value: return [] try: if isinstance(field_value, str): if field_value.startswith('['): return json.loads(field_value) else: return field_value.split(',') else: return field_value except: return [] def get_future_event_field(row, new_field, old_field): """ 鑾峰彇 future_events 琛ㄥ瓧娈靛€硷紝鏀寔鏂版棫瀛楁鍥為€€ 濡傛灉鏂板瓧娈靛瓨鍦ㄤ笖涓嶄负绌猴紝浣跨敤鏂板瓧娈碉紱鍚﹀垯浣跨敤鏃у瓧娈? """ new_value = getattr(row, new_field, None) if hasattr(row, new_field) else None old_value = getattr(row, old_field, None) if hasattr(row, old_field) else None # 濡傛灉鏂板瓧娈垫湁鍊硷紙涓嶄负绌哄瓧绗︿覆锛夛紝浣跨敤鏂板瓧娈? if new_value is not None and str(new_value).strip(): return new_value return old_value def parse_best_matches(best_matches_value): """ 瑙f瀽鏂扮殑 best_matches 鏁版嵁缁撴瀯锛堝惈鐮旀姤寮曠敤淇℃伅锛? 鏂扮粨鏋勭ず渚? [ { "stock_code": "300451.SZ", "company_name": "鍒涗笟鎱у悍", "original_description": "鏍稿績鏍囩殑锛屽尰鐤椾俊鎭寲...", "best_report_title": "鎶ュ憡鏍囬", "best_report_author": "浣滆€?, "best_report_sentences": "鐩稿叧鍐呭", "best_report_match_score": "濂?, "best_report_match_ratio": 0.9285714285714286, "best_report_declare_date": "2023-04-25T00:00:00", "total_reports": 9, "high_score_reports": 6 }, ... ] 杩斿洖缁熶竴鏍煎紡鐨勮偂绁ㄥ垪琛紝鍏煎鏃ф牸寮? """ if not best_matches_value: return [] try: # 瑙f瀽 JSON if isinstance(best_matches_value, str): data = json.loads(best_matches_value) else: data = best_matches_value if not isinstance(data, list): return [] result = [] for item in data: if isinstance(item, dict): # 鏂扮粨鏋勶細鍖呭惈鐮旀姤淇℃伅鐨勫瓧鍏? stock_info = { 'code': item.get('stock_code', ''), 'name': item.get('company_name', ''), 'description': item.get('original_description', ''), 'score': item.get('best_report_match_ratio', 0), # 鐮旀姤寮曠敤淇℃伅 'report': { 'title': item.get('best_report_title', ''), 'author': item.get('best_report_author', ''), 'sentences': item.get('best_report_sentences', ''), 'match_score': item.get('best_report_match_score', ''), 'match_ratio': item.get('best_report_match_ratio', 0), 'declare_date': item.get('best_report_declare_date', ''), 'total_reports': item.get('total_reports', 0), 'high_score_reports': item.get('high_score_reports', 0) } if item.get('best_report_title') else None } result.append(stock_info) elif isinstance(item, (list, tuple)) and len(item) >= 2: # 鏃х粨鏋勶細[code, name, description, score] result.append({ 'code': item[0], 'name': item[1], 'description': item[2] if len(item) > 2 else '', 'score': item[3] if len(item) > 3 else 0, 'report': None }) return result except Exception as e: print(f"parse_best_matches error: {e}") return [] def process_future_event_row(row, user_following_ids=None): """ 缁熶竴澶勭悊 future_events 琛ㄧ殑琛屾暟鎹? 鏀寔鏂板瓧娈靛洖閫€鍜?best_matches 瑙f瀽 """ if user_following_ids is None: user_following_ids = set() # 鑾峰彇瀛楁鍊硷紝鏀寔鏂版棫鍥為€€ # second_modified_text -> former # second_modified_text.1 -> forecast (MySQL 涓敤鍙嶅紩鍙? former_value = get_future_event_field(row, 'second_modified_text', 'former') # 澶勭悊 second_modified_text.1 瀛楁锛堢壒娈婂瓧娈靛悕锛? forecast_new = None if hasattr(row, 'second_modified_text.1'): forecast_new = getattr(row, 'second_modified_text.1', None) # 灏濊瘯鍏朵粬鍙兘鐨勫睘鎬у悕 for attr_name in ['second_modified_text.1', 'second_modified_text_1']: if hasattr(row, attr_name): val = getattr(row, attr_name, None) if val and str(val).strip(): forecast_new = val break forecast_value = forecast_new if (forecast_new and str(forecast_new).strip()) else getattr(row, 'forecast', None) # best_matches -> related_stocks best_matches = getattr(row, 'best_matches', None) if hasattr(row, 'best_matches') else None if best_matches and str(best_matches).strip(): related_stocks = parse_best_matches(best_matches) else: related_stocks = parse_json_field(getattr(row, 'related_stocks', None)) # 鏋勫缓浜嬩欢鏁版嵁 event_data = { 'id': row.data_id, 'title': row.title, 'type': getattr(row, 'type', None), 'calendar_time': row.calendar_time.isoformat() if row.calendar_time else None, 'star': row.star, 'former': former_value, 'forecast': forecast_value, 'fact': getattr(row, 'fact', None), 'is_following': row.data_id in user_following_ids, 'related_stocks': related_stocks, 'concepts': parse_json_field(getattr(row, 'concepts', None)), 'update_time': getattr(row, 'update_time', None).isoformat() if getattr(row, 'update_time', None) else None } return event_data # ==================== 琛屼笟API ==================== @app.route('/api/classifications', methods=['GET']) def get_classifications(): """鑾峰彇鐢抽摱涓囧浗琛屼笟鍒嗙被鏍戝舰缁撴瀯""" try: # 鏌ヨ鐢抽摱涓囧浗琛屼笟鍒嗙被鐨勬墍鏈夋暟鎹? sql = """ SELECT f003v as code, f004v as level1, f005v as level2, f006v as level3,f007v as level4 FROM ea_sector WHERE f002v = '鐢抽摱涓囧浗琛屼笟鍒嗙被' AND f003v IS NOT NULL AND f004v IS NOT NULL ORDER BY f003v """ result = db.session.execute(text(sql)).all() # 鏋勫缓鏍戝舰缁撴瀯 tree_dict = {} for row in result: code = row.code level1 = row.level1 level2 = row.level2 level3 = row.level3 # 璺宠繃绌烘暟鎹? if not level1: continue # 绗竴灞? if level1 not in tree_dict: # 鑾峰彇绗竴灞傜殑code锛堝彇鍓?浣嶆垨鍓嶇紑锛? level1_code = code[:3] if len(code) >= 3 else code tree_dict[level1] = { 'value': level1_code, 'label': level1, 'children_dict': {} } # 绗簩灞? if level2: if level2 not in tree_dict[level1]['children_dict']: # 鑾峰彇绗簩灞傜殑code锛堝彇鍓?浣嶏級 level2_code = code[:6] if len(code) >= 6 else code tree_dict[level1]['children_dict'][level2] = { 'value': level2_code, 'label': level2, 'children_dict': {} } # 绗笁灞? if level3: if level3 not in tree_dict[level1]['children_dict'][level2]['children_dict']: tree_dict[level1]['children_dict'][level2]['children_dict'][level3] = { 'value': code, 'label': level3 } # 杞崲涓烘渶缁堟牸寮? result_list = [] for level1_name, level1_data in tree_dict.items(): level1_node = { 'value': level1_data['value'], 'label': level1_data['label'] } # 澶勭悊绗簩灞? if level1_data['children_dict']: level1_children = [] for level2_name, level2_data in level1_data['children_dict'].items(): level2_node = { 'value': level2_data['value'], 'label': level2_data['label'] } # 澶勭悊绗笁灞? if level2_data['children_dict']: level2_children = [] for level3_name, level3_data in level2_data['children_dict'].items(): level2_children.append({ 'value': level3_data['value'], 'label': level3_data['label'] }) if level2_children: level2_node['children'] = level2_children level1_children.append(level2_node) if level1_children: level1_node['children'] = level1_children result_list.append(level1_node) return jsonify({ 'success': True, 'data': result_list }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/stocklist', methods=['GET']) def get_stock_list(): """鑾峰彇鑲$エ鍒楄〃""" try: sql = """ SELECT DISTINCT SECCODE as code, SECNAME as name FROM ea_stocklist ORDER BY SECCODE """ result = db.session.execute(text(sql)).all() stocks = [{'code': row.code, 'name': row.name} for row in result] return jsonify(stocks) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/events', methods=['GET'], strict_slashes=False) def api_get_events(): """ 鑾峰彇浜嬩欢鍒楄〃API - 鏀寔绛涢€夈€佹帓搴忋€佸垎椤碉紝鍏煎鍓嶇璋冪敤 """ try: # 鍒嗛〉鍙傛暟 page = max(1, request.args.get('page', 1, type=int)) per_page = min(100, max(1, request.args.get('per_page', 10, type=int))) # 鍩虹绛涢€夊弬鏁? event_type = request.args.get('type', 'all') event_status = request.args.get('status', 'active') importance = request.args.get('importance', 'all') # 鏃ユ湡绛涢€夊弬鏁? start_date = request.args.get('start_date') end_date = request.args.get('end_date') date_range = request.args.get('date_range') recent_days = request.args.get('recent_days', type=int) # 琛屼笟绛涢€夊弬鏁帮紙鍙敮鎸佺敵閾朵竾鍥借涓氬垎绫伙級 industry_code = request.args.get('industry_code') # 鐢充竾琛屼笟浠g爜锛屽 "S370502" # 姒傚康/鏍囩绛涢€夊弬鏁? tag = request.args.get('tag') tags = request.args.get('tags') keywords = request.args.get('keywords') # 鎼滅储鍙傛暟 search_query = request.args.get('q') search_type = request.args.get('search_type', 'topic') search_fields = request.args.get('search_fields', 'title,description').split(',') # 鎺掑簭鍙傛暟 sort_by = request.args.get('sort', 'new') return_type = request.args.get('return_type', 'avg') order = request.args.get('order', 'desc') # 鏀剁泭鐜囩瓫閫夊弬鏁? min_avg_return = request.args.get('min_avg_return', type=float) max_avg_return = request.args.get('max_avg_return', type=float) min_max_return = request.args.get('min_max_return', type=float) max_max_return = request.args.get('max_max_return', type=float) min_week_return = request.args.get('min_week_return', type=float) max_week_return = request.args.get('max_week_return', type=float) # 鍏朵粬绛涢€夊弬鏁? min_hot_score = request.args.get('min_hot_score', type=float) max_hot_score = request.args.get('max_hot_score', type=float) min_view_count = request.args.get('min_view_count', type=int) creator_id = request.args.get('creator_id', type=int) # 杩斿洖鏍煎紡鍙傛暟 include_creator = request.args.get('include_creator', 'true').lower() == 'true' include_stats = request.args.get('include_stats', 'true').lower() == 'true' include_related_data = request.args.get('include_related_data', 'false').lower() == 'true' # ==================== 鏋勫缓鏌ヨ ==================== from sqlalchemy.orm import joinedload # 浣跨敤 joinedload 棰勫姞杞?creator锛岃В鍐?N+1 鏌ヨ闂 query = Event.query.options(joinedload(Event.creator)) # 鍙繑鍥炴湁鍏宠仈鑲$エ鐨勪簨浠讹紙娌℃湁鍏宠仈鑲$エ鐨勪簨浠朵笉璁″叆鍒楄〃锛? from sqlalchemy import exists query = query.filter( exists().where(RelatedStock.event_id == Event.id) ) if event_status != 'all': query = query.filter_by(status=event_status) if event_type != 'all': query = query.filter_by(event_type=event_type) # 鏀寔澶氫釜閲嶈鎬х骇鍒瓫閫夛紝鐢ㄩ€楀彿鍒嗛殧锛堝 importance=S,A锛? if importance != 'all': if ',' in importance: # 澶氫釜閲嶈鎬х骇鍒? importance_list = [imp.strip() for imp in importance.split(',') if imp.strip()] query = query.filter(Event.importance.in_(importance_list)) else: # 鍗曚釜閲嶈鎬х骇鍒? query = query.filter_by(importance=importance) if creator_id: query = query.filter_by(creator_id=creator_id) # 鏂板锛氳涓氫唬鐮佽繃婊わ紙鐢抽摱涓囧浗琛屼笟鍒嗙被锛? 鏀寔鍓嶇紑鍖归厤 # 鐢充竾琛屼笟鍒嗙被灞傜骇锛氫竴绾?Sxx, 浜岀骇 Sxxxx, 涓夌骇 Sxxxxxx # 鎼滅储 S22 浼氬尮閰嶆墍鏈?S22xxxx 鐨勪簨浠讹紙濡?S2203, S220309 绛夛級 # related_industries 鏍煎紡: varchar锛屽 "S640701" if industry_code: # 鍒ゆ柇鏄惁闇€瑕佸墠缂€鍖归厤锛氫竴绾?3瀛楃)鎴栦簩绾?5瀛楃)琛屼笟浠g爜 def is_prefix_code(code): """鍒ゆ柇鏄惁涓洪渶瑕佸墠缂€鍖归厤鐨勮涓氫唬鐮侊紙涓€绾ф垨浜岀骇锛?"" code = code.strip() # 鐢充竾琛屼笟浠g爜鏍煎紡锛歋 + 鏁板瓧 # 涓€绾? S + 2浣嶆暟瀛?(濡?S22) = 3瀛楃 # 浜岀骇: S + 4浣嶆暟瀛?(濡?S2203) = 5瀛楃 # 涓夌骇: S + 6浣嶆暟瀛?(濡?S220309) = 7瀛楃 return len(code) < 7 and code.startswith('S') # 濡傛灉鍖呭惈閫楀彿锛岃鏄庢槸澶氫釜琛屼笟浠g爜 if ',' in industry_code: codes = [code.strip() for code in industry_code.split(',') if code.strip()] conditions = [] for code in codes: if is_prefix_code(code): # 鍓嶇紑鍖归厤锛氫娇鐢?LIKE conditions.append(Event.related_industries.like(f"{code}%")) else: # 绮剧‘鍖归厤锛堜笁绾ц涓氫唬鐮侊級 conditions.append(Event.related_industries == code) query = query.filter(db.or_(*conditions)) else: # 鍗曚釜琛屼笟浠g爜 if is_prefix_code(industry_code): # 鍓嶇紑鍖归厤锛氫娇鐢?LIKE query = query.filter(Event.related_industries.like(f"{industry_code}%")) else: # 绮剧‘鍖归厤锛堜笁绾ц涓氫唬鐮侊級 query = query.filter(Event.related_industries == industry_code) # 鏂板锛氬叧閿瘝/鍏ㄦ枃鎼滅储杩囨护锛圡ySQL JSON锛? if search_query: like_pattern = f"%{search_query}%" # 瀛愭煡璇細鏌ユ壘鍏宠仈鑲$エ涓尮閰嶇殑浜嬩欢ID # stock_code 鏍煎紡锛?00111.SH / 000001.SZ / 830001.BJ锛屾敮鎸佷笉甯﹀悗缂€鎼滅储 stock_subquery = db.session.query(RelatedStock.event_id).filter( db.or_( RelatedStock.stock_code.ilike(like_pattern), # 鏀寔鑲$エ浠g爜鎼滅储 RelatedStock.stock_name.ilike(like_pattern), RelatedStock.relation_desc.ilike(like_pattern) ) ).distinct() # 涓绘煡璇細鎼滅储浜嬩欢鏍囬銆佹弿杩般€佸叧閿瘝鎴栧叧鑱旇偂绁? query = query.filter( db.or_( Event.title.ilike(like_pattern), Event.description.ilike(like_pattern), text(f"JSON_SEARCH(keywords, 'one', '%{search_query}%') IS NOT NULL"), Event.id.in_(stock_subquery) ) ) if recent_days: from datetime import datetime, timedelta cutoff_date = datetime.now() - timedelta(days=recent_days) query = query.filter(Event.created_at >= cutoff_date) else: if date_range and ' 鑷?' in date_range: try: start_date_str, end_date_str = date_range.split(' 鑷?') start_date = start_date_str.strip() end_date = end_date_str.strip() except ValueError: pass if start_date: from datetime import datetime try: if len(start_date) == 10: start_datetime = datetime.strptime(start_date, '%Y-%m-%d') else: start_datetime = datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S') query = query.filter(Event.created_at >= start_datetime) except ValueError: pass if end_date: from datetime import datetime try: if len(end_date) == 10: end_datetime = datetime.strptime(end_date, '%Y-%m-%d') end_datetime = end_datetime.replace(hour=23, minute=59, second=59) else: end_datetime = datetime.strptime(end_date, '%Y-%m-%d %H:%M:%S') query = query.filter(Event.created_at <= end_datetime) except ValueError: pass if min_view_count is not None: query = query.filter(Event.view_count >= min_view_count) # 鎺掑簭 from sqlalchemy import desc, asc, case order_func = desc if order.lower() == 'desc' else asc if sort_by == 'hot': query = query.order_by(order_func(Event.hot_score)) elif sort_by == 'new': query = query.order_by(order_func(Event.created_at)) elif sort_by == 'returns' or sort_by.startswith('returns_'): # 鏀寔涓ょ鏍煎紡锛? # 1. sort=returns + return_type=avg/max/week # 2. sort=returns_avg / sort=returns_max / sort=returns_week effective_return_type = return_type if sort_by.startswith('returns_'): effective_return_type = sort_by.replace('returns_', '') if effective_return_type == 'avg': query = query.order_by(order_func(Event.related_avg_chg)) elif effective_return_type == 'max': query = query.order_by(order_func(Event.related_max_chg)) elif effective_return_type == 'week': query = query.order_by(order_func(Event.related_week_chg)) else: # 榛樿鎸夊钩鍧囨敹鐩婃帓搴? query = query.order_by(order_func(Event.related_avg_chg)) elif sort_by == 'importance': importance_order = case( (Event.importance == 'S', 1), (Event.importance == 'A', 2), (Event.importance == 'B', 3), (Event.importance == 'C', 4), else_=5 ) if order.lower() == 'desc': query = query.order_by(importance_order) else: query = query.order_by(desc(importance_order)) elif sort_by == 'view_count': query = query.order_by(order_func(Event.view_count)) # 鍒嗛〉 paginated = query.paginate(page=page, per_page=per_page, error_out=False) events_data = [] for event in paginated.items: event_dict = { 'id': event.id, 'title': event.title, 'description': event.description, 'event_type': event.event_type, 'importance': event.importance, 'status': event.status, 'created_at': event.created_at.isoformat() if event.created_at else None, 'updated_at': event.updated_at.isoformat() if event.updated_at else None, 'start_time': event.start_time.isoformat() if event.start_time else None, 'end_time': event.end_time.isoformat() if event.end_time else None, } if include_stats: event_dict.update({ 'hot_score': event.hot_score, 'view_count': event.view_count, 'post_count': event.post_count, 'follower_count': event.follower_count, 'related_avg_chg': event.related_avg_chg, 'related_max_chg': event.related_max_chg, 'related_week_chg': event.related_week_chg, 'invest_score': event.invest_score, 'trending_score': event.trending_score, 'expectation_surprise_score': event.expectation_surprise_score, }) if include_creator: event_dict['creator'] = { 'id': event.creator.id if event.creator else None, 'username': event.creator.username if event.creator else 'Anonymous' } event_dict['keywords'] = event.keywords_list if hasattr(event, 'keywords_list') else event.keywords event_dict['related_industries'] = event.related_industries if include_related_data: pass events_data.append(event_dict) applied_filters = {} if event_type != 'all': applied_filters['type'] = event_type if importance != 'all': applied_filters['importance'] = importance if start_date: applied_filters['start_date'] = start_date if end_date: applied_filters['end_date'] = end_date if industry_code: applied_filters['industry_code'] = industry_code if tag: applied_filters['tag'] = tag if tags: applied_filters['tags'] = tags if search_query: applied_filters['search_query'] = search_query applied_filters['search_type'] = search_type return jsonify({ 'success': True, 'data': { 'events': events_data, 'pagination': { 'page': paginated.page, 'per_page': paginated.per_page, 'total': paginated.total, 'pages': paginated.pages, 'has_prev': paginated.has_prev, 'has_next': paginated.has_next }, 'filters': { 'applied_filters': applied_filters, 'total_count': paginated.total } } }) except Exception as e: app.logger.error(f"鑾峰彇浜嬩欢鍒楄〃鍑洪敊: {str(e)}", exc_info=True) return jsonify({ 'success': False, 'error': str(e), 'error_type': type(e).__name__ }), 500 @app.route('/api/events/hot', methods=['GET']) def get_hot_events(): """鑾峰彇鐑偣浜嬩欢""" try: from datetime import datetime, timedelta days = request.args.get('days', 3, type=int) limit = request.args.get('limit', 4, type=int) since_date = datetime.now() - timedelta(days=days) hot_events = Event.query.filter( Event.status == 'active', Event.created_at >= since_date, Event.related_avg_chg != None, Event.related_avg_chg > 0 ).order_by(Event.related_avg_chg.desc()).limit(limit).all() if len(hot_events) < limit: additional_events = Event.query.filter( Event.status == 'active', Event.created_at >= since_date, ~Event.id.in_([event.id for event in hot_events]) ).order_by(Event.hot_score.desc()).limit(limit - len(hot_events)).all() hot_events.extend(additional_events) events_data = [] for event in hot_events: events_data.append({ 'id': event.id, 'title': event.title, 'description': event.description, 'importance': event.importance, 'created_at': event.created_at.isoformat() if event.created_at else None, 'related_avg_chg': event.related_avg_chg, 'related_max_chg': event.related_max_chg, 'expectation_surprise_score': event.expectation_surprise_score, 'creator': { 'username': event.creator.username if event.creator else 'Anonymous' } }) return jsonify({'success': True, 'data': events_data}) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events/keywords/popular', methods=['GET']) def get_popular_keywords(): """鑾峰彇鐑棬鍏抽敭璇?"" try: limit = request.args.get('limit', 20, type=int) sql = ''' WITH RECURSIVE \ numbers AS (SELECT 0 as n \ UNION ALL \ SELECT n + 1 \ FROM numbers \ WHERE n < 100), \ json_array AS (SELECT JSON_UNQUOTE(JSON_EXTRACT(e.keywords, CONCAT('$[', n.n, ']'))) as keyword, \ COUNT(*) as count FROM event e CROSS JOIN numbers n WHERE e.status = 'active' AND JSON_EXTRACT(e.keywords \ , CONCAT('$[' \ , n.n \ , ']')) IS NOT NULL GROUP BY JSON_UNQUOTE(JSON_EXTRACT(e.keywords, CONCAT('$[', n.n, ']'))) HAVING keyword IS NOT NULL ) SELECT keyword, count FROM json_array ORDER BY count DESC, keyword LIMIT :limit \ ''' result = db.session.execute(text(sql), {'limit': limit}).all() keywords_data = [{'keyword': row.keyword, 'count': row.count} for row in result] return jsonify({'success': True, 'data': keywords_data}) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/events//sankey-data') def get_event_sankey_data(event_id): """ 鑾峰彇浜嬩欢妗戝熀鍥炬暟鎹?(鏈€缁堜紭鍖栫増) - 澶勭悊閲嶅悕鑺傜偣 - 妫€娴嬪苟鎵撶牬寰幆渚濊禆 """ flows = EventSankeyFlow.query.filter_by(event_id=event_id).order_by( EventSankeyFlow.source_level, EventSankeyFlow.target_level ).all() if not flows: return jsonify({'success': False, 'message': '鏆傛棤妗戝熀鍥炬暟鎹?}) nodes_map = {} links = [] type_colors = { 'event': '#ff4757', 'policy': '#10ac84', 'technology': '#ee5a6f', 'industry': '#00d2d3', 'company': '#54a0ff', 'product': '#ffd93d' } # --- 1. 璇嗗埆骞跺鐞嗛噸鍚嶈妭鐐?(涓庝笂涓€鐗堢浉鍚? --- all_node_keys = set() name_counts = {} for flow in flows: source_key = f"{flow.source_node}|{flow.source_level}" target_key = f"{flow.target_node}|{flow.target_level}" all_node_keys.add(source_key) all_node_keys.add(target_key) name_counts.setdefault(flow.source_node, set()).add(flow.source_level) name_counts.setdefault(flow.target_node, set()).add(flow.target_level) duplicate_names = {name for name, levels in name_counts.items() if len(levels) > 1} for flow in flows: source_key = f"{flow.source_node}|{flow.source_level}" if source_key not in nodes_map: display_name = f"{flow.source_node} (L{flow.source_level})" if flow.source_node in duplicate_names else flow.source_node nodes_map[source_key] = {'name': display_name, 'type': flow.source_type, 'level': flow.source_level, 'color': type_colors.get(flow.source_type)} target_key = f"{flow.target_node}|{flow.target_level}" if target_key not in nodes_map: display_name = f"{flow.target_node} (L{flow.target_level})" if flow.target_node in duplicate_names else flow.target_node nodes_map[target_key] = {'name': display_name, 'type': flow.target_type, 'level': flow.target_level, 'color': type_colors.get(flow.target_type)} links.append({ 'source_key': source_key, 'target_key': target_key, 'value': float(flow.flow_value), 'ratio': float(flow.flow_ratio), 'transmission_path': flow.transmission_path, 'impact_description': flow.impact_description, 'evidence_strength': flow.evidence_strength }) # --- 2. 寰幆妫€娴嬩笌澶勭悊 --- # 鏋勫缓閭绘帴琛? adj = defaultdict(list) for link in links: adj[link['source_key']].append(link['target_key']) # 娣卞害浼樺厛鎼滅储锛圖FS锛夋潵妫€娴嬪惊鐜? path = set() # 璁板綍褰撳墠閫掑綊璺緞涓婄殑鑺傜偣 visited = set() # 璁板綍鎵€鏈夎闂繃鐨勮妭鐐? back_edges = set() # 璁板綍瀵艰嚧寰幆鐨?鍥炴祦杈? def detect_cycle_util(node): path.add(node) visited.add(node) for neighbour in adj.get(node, []): if neighbour in path: # 鍙戠幇浜嗗惊鐜紝璁板綍杩欐潯鍥炴祦杈?(target, source) back_edges.add((neighbour, node)) elif neighbour not in visited: detect_cycle_util(neighbour) path.remove(node) # 浠庢墍鏈夎妭鐐瑰紑濮嬫娴? for node_key in list(adj.keys()): if node_key not in visited: detect_cycle_util(node_key) # 杩囨护鎺夊鑷村惊鐜殑杈? if back_edges: print(f"妫€娴嬪埌骞剁Щ闄や簡 {len(back_edges)} 鏉″惊鐜竟: {back_edges}") valid_links_no_cycle = [] for link in links: if (link['source_key'], link['target_key']) not in back_edges and \ (link['target_key'], link['source_key']) not in back_edges: # 绉婚櫎闈炰弗鏍兼剰涔変笂鐨勫弻鍚戣竟 valid_links_no_cycle.append(link) # --- 3. 鏋勫缓鏈€缁堢殑 JSON 鍝嶅簲 (涓庝笂涓€鐗堢浉浼? --- node_list = [] node_index_map = {} sorted_node_keys = sorted(nodes_map.keys(), key=lambda k: (nodes_map[k]['level'], nodes_map[k]['name'])) for i, key in enumerate(sorted_node_keys): node_list.append(nodes_map[key]) node_index_map[key] = i final_links = [] for link in valid_links_no_cycle: source_idx = node_index_map.get(link['source_key']) target_idx = node_index_map.get(link['target_key']) if source_idx is not None and target_idx is not None: # 绉婚櫎涓存椂鐨?key锛屽彧淇濈暀 ECharts 闇€瑕佺殑瀛楁 link.pop('source_key', None) link.pop('target_key', None) link['source'] = source_idx link['target'] = target_idx final_links.append(link) # ... (缁熻淇℃伅璁$畻閮ㄥ垎淇濇寔涓嶅彉) ... stats = { 'total_nodes': len(node_list), 'total_flows': len(final_links), 'total_flow_value': sum(link['value'] for link in final_links), 'max_level': max((node['level'] for node in node_list), default=0), 'node_type_counts': {ntype: sum(1 for n in node_list if n['type'] == ntype) for ntype in type_colors} } return jsonify({ 'success': True, 'data': {'nodes': node_list, 'links': final_links, 'stats': stats} }) # 浼樺寲鍚庣殑浼犲閾惧垎鏋?API @app.route('/api/events//chain-analysis') def get_event_chain_analysis(event_id): """鑾峰彇浜嬩欢浼犲閾惧垎鏋愭暟鎹?"" nodes = EventTransmissionNode.query.filter_by(event_id=event_id).all() if not nodes: return jsonify({'success': False, 'message': '鏆傛棤浼犲閾惧垎鏋愭暟鎹?}) edges = EventTransmissionEdge.query.filter_by(event_id=event_id).all() # 杩囨护瀛ょ珛鑺傜偣 connected_node_ids = set() for edge in edges: connected_node_ids.add(edge.from_node_id) connected_node_ids.add(edge.to_node_id) # 鍙繚鐣欐湁杩炴帴鐨勮妭鐐? connected_nodes = [node for node in nodes if node.id in connected_node_ids] if not connected_nodes: return jsonify({'success': False, 'message': '鎵€鏈夎妭鐐归兘鏄绔嬬殑锛屾殏鏃犱紶瀵煎叧绯?}) # 鑺傜偣鍒嗙被锛岀敤浜庡姏瀵煎悜鍥剧殑鍥句緥 categories = { 'event': "浜嬩欢", 'industry': "琛屼笟", 'company': "鍏徃", 'policy': "鏀跨瓥", 'technology': "鎶€鏈?, 'market': "甯傚満", 'other': "鍏朵粬" } # 璁$畻姣忎釜鑺傜偣鐨勮繛鎺ユ暟 node_connection_count = {} for node in connected_nodes: count = sum(1 for edge in edges if edge.from_node_id == node.id or edge.to_node_id == node.id) node_connection_count[node.id] = count nodes_data = [] for node in connected_nodes: connection_count = node_connection_count[node.id] nodes_data.append({ 'id': str(node.id), 'name': node.node_name, 'value': node.importance_score, # 鐢ㄤ簬鎺у埗鑺傜偣澶у皬鐨勫熀纭€鍊? 'category': categories.get(node.node_type), 'extra': { 'node_type': node.node_type, 'description': node.node_description, 'importance_score': node.importance_score, 'stock_code': node.stock_code, 'is_main_event': node.is_main_event, 'connection_count': connection_count, # 娣诲姞杩炴帴鏁颁俊鎭? } }) edges_data = [] for edge in edges: # 纭繚杈圭殑涓ょ鑺傜偣閮藉湪杩炴帴鑺傜偣鍒楄〃涓? if edge.from_node_id in connected_node_ids and edge.to_node_id in connected_node_ids: edges_data.append({ 'source': str(edge.from_node_id), 'target': str(edge.to_node_id), 'value': edge.strength, # 鐢ㄤ簬鎺у埗杈圭殑瀹藉害 'extra': { 'transmission_type': edge.transmission_type, 'transmission_mechanism': edge.transmission_mechanism, 'direction': edge.direction, 'strength': edge.strength, 'impact': edge.impact, 'is_circular': edge.is_circular, } }) # 閲嶆柊璁$畻缁熻淇℃伅锛堝熀浜庤繛鎺ョ殑鑺傜偣鍜岃竟锛? stats = { 'total_nodes': len(connected_nodes), 'total_edges': len(edges_data), 'node_types': {cat: sum(1 for n in connected_nodes if n.node_type == node_type) for node_type, cat in categories.items()}, 'edge_types': {edge.transmission_type: sum(1 for e in edges_data if e['extra']['transmission_type'] == edge.transmission_type) for edge in edges}, 'avg_importance': sum(node.importance_score for node in connected_nodes) / len( connected_nodes) if connected_nodes else 0, 'avg_strength': sum(edge.strength for edge in edges) / len(edges) if edges else 0 } return jsonify({ 'success': True, 'data': { 'nodes': nodes_data, 'edges': edges_data, 'categories': list(categories.values()), 'stats': stats } }) @app.route('/api/events//chain-node/', methods=['GET']) @cross_origin() def get_chain_node_detail(event_id, node_id): """鑾峰彇浼犲閾捐妭鐐瑰強鍏剁洿鎺ュ叧鑱旇妭鐐圭殑璇︾粏淇℃伅""" node = db.session.get(EventTransmissionNode, node_id) if not node or node.event_id != event_id: return jsonify({'success': False, 'message': '鑺傜偣涓嶅瓨鍦?}) # 楠岃瘉鑺傜偣鏄惁涓哄绔嬭妭鐐? total_connections = (EventTransmissionEdge.query.filter_by(from_node_id=node_id).count() + EventTransmissionEdge.query.filter_by(to_node_id=node_id).count()) if total_connections == 0 and not node.is_main_event: return jsonify({'success': False, 'message': '璇ヨ妭鐐逛负瀛ょ珛鑺傜偣锛屾棤杩炴帴鍏崇郴'}) # 鎵惧嚭褰卞搷褰撳墠鑺傜偣鐨勭埗鑺傜偣 parents_info = [] incoming_edges = EventTransmissionEdge.query.filter_by(to_node_id=node_id).all() for edge in incoming_edges: parent = db.session.get(EventTransmissionNode, edge.from_node_id) if parent: parents_info.append({ 'id': parent.id, 'name': parent.node_name, 'type': parent.node_type, 'direction': edge.direction, 'strength': edge.strength, 'transmission_type': edge.transmission_type, 'transmission_mechanism': edge.transmission_mechanism, # 淇瀛楁鍚? 'is_circular': edge.is_circular, 'impact': edge.impact }) # 鎵惧嚭琚綋鍓嶈妭鐐瑰奖鍝嶇殑瀛愯妭鐐? children_info = [] outgoing_edges = EventTransmissionEdge.query.filter_by(from_node_id=node_id).all() for edge in outgoing_edges: child = db.session.get(EventTransmissionNode, edge.to_node_id) if child: children_info.append({ 'id': child.id, 'name': child.node_name, 'type': child.node_type, 'direction': edge.direction, 'strength': edge.strength, 'transmission_type': edge.transmission_type, 'transmission_mechanism': edge.transmission_mechanism, # 淇瀛楁鍚? 'is_circular': edge.is_circular, 'impact': edge.impact }) node_data = { 'id': node.id, 'name': node.node_name, 'type': node.node_type, 'description': node.node_description, 'importance_score': node.importance_score, 'stock_code': node.stock_code, 'is_main_event': node.is_main_event, 'total_connections': total_connections, 'incoming_connections': len(incoming_edges), 'outgoing_connections': len(outgoing_edges) } return jsonify({ 'success': True, 'data': { 'node': node_data, 'parents': parents_info, 'children': children_info } }) @app.route('/api/events//posts', methods=['GET']) def get_event_posts(event_id): """鑾峰彇浜嬩欢涓嬬殑甯栧瓙""" try: sort_type = request.args.get('sort', 'latest') page = request.args.get('page', 1, type=int) per_page = request.args.get('per_page', 20, type=int) # 鏌ヨ浜嬩欢涓嬬殑甯栧瓙 query = Post.query.filter_by(event_id=event_id, status='active') if sort_type == 'hot': query = query.order_by(Post.likes_count.desc(), Post.created_at.desc()) else: # latest query = query.order_by(Post.created_at.desc()) # 鍒嗛〉 pagination = query.paginate(page=page, per_page=per_page, error_out=False) posts = pagination.items posts_data = [] for post in posts: post_dict = { 'id': post.id, 'event_id': post.event_id, 'user_id': post.user_id, 'title': post.title, 'content': post.content, 'content_type': post.content_type, 'created_at': post.created_at.isoformat(), 'updated_at': post.updated_at.isoformat(), 'likes_count': post.likes_count, 'comments_count': post.comments_count, 'view_count': post.view_count, 'is_top': post.is_top, 'user': { 'id': post.user.id, 'username': post.user.username, 'avatar_url': post.user.avatar_url } if post.user else None, 'liked': False # 鍚庣画鍙互鏍规嵁褰撳墠鐢ㄦ埛鍒ゆ柇 } posts_data.append(post_dict) return jsonify({ 'success': True, 'data': posts_data, 'pagination': { 'page': page, 'per_page': per_page, 'total': pagination.total, 'pages': pagination.pages } }) except Exception as e: print(f"鑾峰彇甯栧瓙澶辫触: {e}") return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/events//posts', methods=['POST']) @login_required def create_event_post(event_id): """鍦ㄤ簨浠朵笅鍒涘缓甯栧瓙""" try: data = request.get_json() content = data.get('content', '').strip() title = data.get('title', '').strip() content_type = data.get('content_type', 'text') if not content: return jsonify({ 'success': False, 'message': '甯栧瓙鍐呭涓嶈兘涓虹┖' }), 400 # 鍒涘缓鏂板笘瀛? post = Post( event_id=event_id, user_id=current_user.id, title=title, content=content, content_type=content_type ) db.session.add(post) # 鏇存柊浜嬩欢鐨勫笘瀛愭暟 event = Event.query.get(event_id) if event: event.post_count = Post.query.filter_by(event_id=event_id, status='active').count() # 鏇存柊鐢ㄦ埛鍙戝笘鏁? current_user.post_count = (current_user.post_count or 0) + 1 db.session.commit() return jsonify({ 'success': True, 'data': { 'id': post.id, 'event_id': post.event_id, 'user_id': post.user_id, 'title': post.title, 'content': post.content, 'content_type': post.content_type, 'created_at': post.created_at.isoformat(), 'user': { 'id': current_user.id, 'nickname': current_user.nickname, # 娣诲姞鏄电О锛屼笌瀵艰埅鍖轰繚鎸佷竴鑷? 'username': current_user.username, 'avatar_url': current_user.avatar_url } }, 'message': '甯栧瓙鍙戝竷鎴愬姛' }) except Exception as e: db.session.rollback() print(f"鍒涘缓甯栧瓙澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 @app.route('/api/posts//comments', methods=['GET']) def get_post_comments(post_id): """鑾峰彇甯栧瓙鐨勮瘎璁?"" try: sort_type = request.args.get('sort', 'latest') # 鏌ヨ甯栧瓙鐨勯《绾ц瘎璁猴紙闈炲洖澶嶏級 query = Comment.query.filter_by(post_id=post_id, parent_id=None, status='active') if sort_type == 'hot': comments = query.order_by(Comment.likes_count.desc(), Comment.created_at.desc()).all() else: # latest comments = query.order_by(Comment.created_at.desc()).all() comments_data = [] for comment in comments: comment_dict = { 'id': comment.id, 'post_id': comment.post_id, 'user_id': comment.user_id, 'content': comment.content, 'created_at': comment.created_at.isoformat(), 'updated_at': comment.updated_at.isoformat(), 'likes_count': comment.likes_count, 'user': { 'id': comment.user.id, 'username': comment.user.username, 'avatar_url': comment.user.avatar_url } if comment.user else None, 'replies': [] # 鍔犺浇鍥炲 } # 鍔犺浇鍥炲 replies = Comment.query.filter_by(parent_id=comment.id, status='active').order_by(Comment.created_at).all() for reply in replies: reply_dict = { 'id': reply.id, 'post_id': reply.post_id, 'user_id': reply.user_id, 'content': reply.content, 'parent_id': reply.parent_id, 'created_at': reply.created_at.isoformat(), 'likes_count': reply.likes_count, 'user': { 'id': reply.user.id, 'username': reply.user.username, 'avatar_url': reply.user.avatar_url } if reply.user else None } comment_dict['replies'].append(reply_dict) comments_data.append(comment_dict) return jsonify({ 'success': True, 'data': comments_data }) except Exception as e: print(f"鑾峰彇璇勮澶辫触: {e}") return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/posts//comments', methods=['POST']) @login_required def create_post_comment(post_id): """鍦ㄥ笘瀛愪笅鍒涘缓璇勮""" try: data = request.get_json() content = data.get('content', '').strip() parent_id = data.get('parent_id') if not content: return jsonify({ 'success': False, 'message': '璇勮鍐呭涓嶈兘涓虹┖' }), 400 # 鍒涘缓鏂拌瘎璁? comment = Comment( post_id=post_id, user_id=current_user.id, content=content, parent_id=parent_id ) db.session.add(comment) # 鏇存柊甯栧瓙璇勮鏁? post = Post.query.get(post_id) if post: post.comments_count = Comment.query.filter_by(post_id=post_id, status='active').count() # 鏇存柊鐢ㄦ埛璇勮鏁? current_user.comment_count = (current_user.comment_count or 0) + 1 db.session.commit() return jsonify({ 'success': True, 'data': { 'id': comment.id, 'post_id': comment.post_id, 'user_id': comment.user_id, 'content': comment.content, 'parent_id': comment.parent_id, 'created_at': comment.created_at.isoformat(), 'user': { 'id': current_user.id, 'username': current_user.username, 'avatar_url': current_user.avatar_url } }, 'message': '璇勮鍙戝竷鎴愬姛' }) except Exception as e: db.session.rollback() print(f"鍒涘缓璇勮澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 # 鍏煎鏃х殑璇勮鎺ュ彛锛岃浆鎹负甯栧瓙妯″紡 @app.route('/api/events//comments', methods=['GET']) def get_event_comments(event_id): """鑾峰彇浜嬩欢璇勮锛堝吋瀹规棫鎺ュ彛锛?"" # 灏嗕簨浠惰瘎璁鸿浆鎹负鑾峰彇浜嬩欢涓嬫墍鏈夊笘瀛愮殑璇勮 return get_event_posts(event_id) @app.route('/api/events//comments', methods=['POST']) @login_required def add_event_comment(event_id): """娣诲姞浜嬩欢璇勮锛堝吋瀹规棫鎺ュ彛锛?"" try: data = request.get_json() content = data.get('content', '').strip() parent_id = data.get('parent_id') if not content: return jsonify({ 'success': False, 'message': '璇勮鍐呭涓嶈兘涓虹┖' }), 400 # 濡傛灉鏈?parent_id锛岃鏄庢槸鍥炲锛岄渶瑕佹壘鍒板搴旂殑甯栧瓙 if parent_id: # 杩欐槸涓€涓洖澶嶏紝闇€瑕佸皢鍏惰浆鎹负瀵瑰簲甯栧瓙鐨勮瘎璁? # 棣栧厛闇€瑕佹壘鍒?parent_id 瀵瑰簲鐨勫笘瀛? # 杩欓噷鍋囪鏃х殑 parent_id 鏄箣鍓嶇殑 EventComment id # 闇€瑕佸湪鏁版嵁杩佺Щ鏃跺鐞嗚繖涓槧灏勫叧绯? return jsonify({ 'success': False, 'message': '鍥炲鍔熻兘姝e湪鍗囩骇涓紝璇风◢鍚庡啀璇? }), 503 # 濡傛灉娌℃湁 parent_id锛岃鏄庢槸椤剁骇璇勮锛屽垱寤轰负鏂板笘瀛? post = Post( event_id=event_id, user_id=current_user.id, content=content, content_type='text' ) db.session.add(post) # 鏇存柊浜嬩欢鐨勫笘瀛愭暟 event = Event.query.get(event_id) if event: event.post_count = Post.query.filter_by(event_id=event_id, status='active').count() # 鏇存柊鐢ㄦ埛鍙戝笘鏁? current_user.post_count = (current_user.post_count or 0) + 1 db.session.commit() # 杩斿洖鍏煎鏃ф帴鍙g殑鏁版嵁鏍煎紡 return jsonify({ 'success': True, 'data': { 'id': post.id, 'event_id': post.event_id, 'user_id': post.user_id, 'author': current_user.username, 'content': post.content, 'parent_id': None, 'likes': 0, 'created_at': post.created_at.isoformat(), 'status': 'active', 'user': { 'id': current_user.id, 'username': current_user.username, 'avatar_url': current_user.avatar_url }, 'replies': [] }, 'message': '璇勮鍙戝竷鎴愬姛' }) except Exception as e: db.session.rollback() print(f"娣诲姞浜嬩欢璇勮澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 # ==================== WebSocket 浜嬩欢澶勭悊鍣紙瀹炴椂浜嬩欢鎺ㄩ€侊級 ==================== @socketio.on('connect') def handle_connect(): """瀹㈡埛绔繛鎺ヤ簨浠?"" print(f'\n[WebSocket DEBUG] ========== 瀹㈡埛绔繛鎺?==========') print(f'[WebSocket DEBUG] Socket ID: {request.sid}') print(f'[WebSocket DEBUG] Remote Address: {request.remote_addr if hasattr(request, "remote_addr") else "N/A"}') print(f'[WebSocket] 瀹㈡埛绔凡杩炴帴: {request.sid}') emit('connection_response', { 'status': 'connected', 'sid': request.sid, 'message': '宸茶繛鎺ュ埌浜嬩欢鎺ㄩ€佹湇鍔? }) print(f'[WebSocket DEBUG] 鉁?宸插彂閫?connection_response') print(f'[WebSocket DEBUG] ========== 杩炴帴瀹屾垚 ==========\n') @socketio.on('subscribe_events') def handle_subscribe(data): """ 瀹㈡埛绔闃呬簨浠舵帹閫? data: { 'event_type': 'all' | 'policy' | 'market' | 'tech' | ..., 'importance': 'all' | 'S' | 'A' | 'B' | 'C', 'filters': {...} # 鍙€夌殑鍏朵粬绛涢€夋潯浠? } """ try: print(f'\n[WebSocket DEBUG] ========== 鏀跺埌璁㈤槄璇锋眰 ==========') print(f'[WebSocket DEBUG] Socket ID: {request.sid}') print(f'[WebSocket DEBUG] 璁㈤槄鏁版嵁: {data}') event_type = data.get('event_type', 'all') importance = data.get('importance', 'all') print(f'[WebSocket DEBUG] 浜嬩欢绫诲瀷: {event_type}') print(f'[WebSocket DEBUG] 閲嶈鎬? {importance}') # 鍔犲叆瀵瑰簲鐨勬埧闂? room_name = f"events_{event_type}" print(f'[WebSocket DEBUG] 鍑嗗鍔犲叆鎴块棿: {room_name}') join_room(room_name) print(f'[WebSocket DEBUG] 鉁?宸插姞鍏ユ埧闂? {room_name}') print(f'[WebSocket] 瀹㈡埛绔?{request.sid} 璁㈤槄浜嗘埧闂? {room_name}') response_data = { 'success': True, 'room': room_name, 'event_type': event_type, 'importance': importance, 'message': f'宸茶闃?{event_type} 绫诲瀷鐨勪簨浠舵帹閫? } print(f'[WebSocket DEBUG] 鍑嗗鍙戦€?subscription_confirmed: {response_data}') emit('subscription_confirmed', response_data) print(f'[WebSocket DEBUG] 鉁?宸插彂閫?subscription_confirmed') print(f'[WebSocket DEBUG] ========== 璁㈤槄瀹屾垚 ==========\n') except Exception as e: print(f'[WebSocket ERROR] 璁㈤槄澶辫触: {e}') import traceback traceback.print_exc() emit('subscription_error', { 'success': False, 'error': str(e) }) @socketio.on('unsubscribe_events') def handle_unsubscribe(data): """鍙栨秷璁㈤槄浜嬩欢鎺ㄩ€?"" try: print(f'\n[WebSocket DEBUG] ========== 鏀跺埌鍙栨秷璁㈤槄璇锋眰 ==========') print(f'[WebSocket DEBUG] Socket ID: {request.sid}') print(f'[WebSocket DEBUG] 鏁版嵁: {data}') event_type = data.get('event_type', 'all') room_name = f"events_{event_type}" print(f'[WebSocket DEBUG] 鍑嗗绂诲紑鎴块棿: {room_name}') leave_room(room_name) print(f'[WebSocket DEBUG] 鉁?宸茬寮€鎴块棿: {room_name}') print(f'[WebSocket] 瀹㈡埛绔?{request.sid} 鍙栨秷璁㈤槄鎴块棿: {room_name}') emit('unsubscription_confirmed', { 'success': True, 'room': room_name, 'message': f'宸插彇娑堣闃?{event_type} 绫诲瀷鐨勪簨浠舵帹閫? }) print(f'[WebSocket DEBUG] ========== 鍙栨秷璁㈤槄瀹屾垚 ==========\n') except Exception as e: print(f'[WebSocket ERROR] 鍙栨秷璁㈤槄澶辫触: {e}') import traceback traceback.print_exc() emit('unsubscription_error', { 'success': False, 'error': str(e) }) @socketio.on('disconnect') def handle_disconnect(): """瀹㈡埛绔柇寮€杩炴帴浜嬩欢""" print(f'\n[WebSocket DEBUG] ========== 瀹㈡埛绔柇寮€ ==========') print(f'[WebSocket DEBUG] Socket ID: {request.sid}') print(f'[WebSocket] 瀹㈡埛绔凡鏂紑: {request.sid}') print(f'[WebSocket DEBUG] ========== 鏂紑瀹屾垚 ==========\n') # ==================== WebSocket 杈呭姪鍑芥暟 ==================== def broadcast_new_event(event): """ 骞挎挱鏂颁簨浠跺埌鎵€鏈夎闃呯殑瀹㈡埛绔? 鍦ㄥ垱寤烘柊浜嬩欢鏃惰皟鐢ㄦ鍑芥暟 Args: event: Event 妯″瀷瀹炰緥 """ try: print(f'\n[WebSocket DEBUG] ========== 骞挎挱鏂颁簨浠?==========') print(f'[WebSocket DEBUG] 浜嬩欢ID: {event.id}') print(f'[WebSocket DEBUG] 浜嬩欢鏍囬: {event.title}') print(f'[WebSocket DEBUG] 浜嬩欢绫诲瀷: {event.event_type}') print(f'[WebSocket DEBUG] 閲嶈鎬? {event.importance}') event_data = { 'id': event.id, 'title': event.title, 'description': event.description, 'event_type': event.event_type, 'importance': event.importance, 'status': event.status, 'created_at': event.created_at.isoformat() if event.created_at else None, 'hot_score': event.hot_score, 'view_count': event.view_count, 'related_avg_chg': event.related_avg_chg, 'related_max_chg': event.related_max_chg, 'keywords': event.keywords_list if hasattr(event, 'keywords_list') else event.keywords, } print(f'[WebSocket DEBUG] 鍑嗗鍙戦€佺殑鏁版嵁: {event_data}') # 鍙戦€佸埌鎵€鏈夎闃呰€咃紙all 鎴块棿锛? print(f'[WebSocket DEBUG] 姝e湪鍙戦€佸埌鎴块棿: events_all') socketio.emit('new_event', event_data, room='events_all', namespace='/') print(f'[WebSocket DEBUG] 鉁?宸插彂閫佸埌 events_all') # 鍙戦€佸埌鐗瑰畾绫诲瀷璁㈤槄鑰? if event.event_type: room_name = f"events_{event.event_type}" print(f'[WebSocket DEBUG] 姝e湪鍙戦€佸埌鎴块棿: {room_name}') socketio.emit('new_event', event_data, room=room_name, namespace='/') print(f'[WebSocket DEBUG] 鉁?宸插彂閫佸埌 {room_name}') print(f'[WebSocket] 宸叉帹閫佹柊浜嬩欢鍒版埧闂? events_all, {room_name}') else: print(f'[WebSocket] 宸叉帹閫佹柊浜嬩欢鍒版埧闂? events_all') print(f'[WebSocket DEBUG] ========== 骞挎挱瀹屾垚 ==========\n') except Exception as e: print(f'[WebSocket ERROR] 鎺ㄩ€佹柊浜嬩欢澶辫触: {e}') import traceback traceback.print_exc() # ==================== WebSocket 杞鏈哄埗锛堟娴嬫柊浜嬩欢锛?==================== # Redis Key 鐢ㄤ簬澶?Worker 鍗忚皟 REDIS_KEY_LAST_MAX_EVENT_ID = 'vf:event_polling:last_max_id' REDIS_KEY_POLLING_LOCK = 'vf:event_polling:lock' REDIS_KEY_PENDING_EVENTS = 'vf:event_polling:pending_events' # 寰呮帹閫佷簨浠堕泦鍚堬紙娌℃湁 related_stocks 鐨勪簨浠讹級 # 鏈湴缂撳瓨锛堝噺灏?Redis 鏌ヨ锛? _local_last_max_event_id = 0 _polling_initialized = False def _add_pending_event(event_id): """灏嗕簨浠舵坊鍔犲埌寰呮帹閫佸垪琛?"" try: redis_client.sadd(REDIS_KEY_PENDING_EVENTS, str(event_id)) except Exception as e: print(f'[杞 WARN] 娣诲姞寰呮帹閫佷簨浠跺け璐? {e}') def _remove_pending_event(event_id): """浠庡緟鎺ㄩ€佸垪琛ㄧЩ闄や簨浠?"" try: redis_client.srem(REDIS_KEY_PENDING_EVENTS, str(event_id)) except Exception as e: print(f'[杞 WARN] 绉婚櫎寰呮帹閫佷簨浠跺け璐? {e}') def _get_pending_events(): """鑾峰彇鎵€鏈夊緟鎺ㄩ€佷簨浠禝D""" try: pending = redis_client.smembers(REDIS_KEY_PENDING_EVENTS) return [int(eid) for eid in pending] if pending else [] except Exception as e: print(f'[杞 WARN] 鑾峰彇寰呮帹閫佷簨浠跺け璐? {e}') return [] def _get_last_max_event_id(): """浠?Redis 鑾峰彇鏈€澶т簨浠?ID""" try: val = redis_client.get(REDIS_KEY_LAST_MAX_EVENT_ID) return int(val) if val else 0 except Exception as e: print(f'[杞 WARN] 璇诲彇 Redis 澶辫触: {e}') return _local_last_max_event_id def _set_last_max_event_id(new_id): """璁剧疆鏈€澶т簨浠?ID 鍒?Redis""" global _local_last_max_event_id try: redis_client.set(REDIS_KEY_LAST_MAX_EVENT_ID, str(new_id)) _local_last_max_event_id = new_id except Exception as e: print(f'[杞 WARN] 鍐欏叆 Redis 澶辫触: {e}') _local_last_max_event_id = new_id def poll_new_events(): """ 瀹氭湡杞鏁版嵁搴擄紝妫€鏌ユ槸鍚︽湁鏂颁簨浠? 姣?30 绉掓墽琛屼竴娆? 澶?Worker 鍗忚皟鏈哄埗锛? 1. 浣跨敤 Redis 鍒嗗竷寮忛攣锛岀‘淇濆悓涓€鏃跺埢鍙湁涓€涓?Worker 鎵ц杞 2. 浣跨敤 Redis 瀛樺偍 last_max_event_id锛屾墍鏈?Worker 鍏变韩鐘舵€? 3. 閫氳繃 Redis 娑堟伅闃熷垪骞挎挱鍒版墍鏈?Worker 鐨勫鎴风 寰呮帹閫佷簨浠舵満鍒讹細 - 褰撲簨浠堕娆¤妫€娴嬪埌浣嗘病鏈?related_stocks 鏃讹紝鍔犲叆寰呮帹閫佸垪琛? - 姣忔杞鏃舵鏌ュ緟鎺ㄩ€佸垪琛ㄤ腑鐨勪簨浠舵槸鍚﹀凡鏈?related_stocks - 鏈夊垯鎺ㄩ€佸苟浠庡垪琛ㄧЩ闄わ紝瓒呰繃24灏忔椂鐨勪簨浠惰嚜鍔ㄦ竻鐞? """ import os try: # 灏濊瘯鑾峰彇鍒嗗竷寮忛攣锛?0绉掕秴鏃讹紝闃叉姝婚攣锛? lock_acquired = redis_client.set( REDIS_KEY_POLLING_LOCK, os.getpid(), nx=True, # 鍙湪涓嶅瓨鍦ㄦ椂璁剧疆 ex=30 # 30绉掑悗鑷姩杩囨湡 ) if not lock_acquired: # 鍏朵粬 Worker 姝e湪杞锛岃烦杩囨湰娆? return with app.app_context(): from datetime import datetime, timedelta current_time = datetime.now() last_max_event_id = _get_last_max_event_id() print(f'\n[杞] ========== 寮€濮嬭疆璇?(PID: {os.getpid()}) ==========') print(f'[杞] 褰撳墠鏃堕棿: {current_time.strftime("%Y-%m-%d %H:%M:%S")}') print(f'[杞] 褰撳墠鏈€澶т簨浠禝D: {last_max_event_id}') # 鏌ヨ杩?4灏忔椂鍐呯殑鎵€鏈夋椿璺冧簨浠讹紙鎸変簨浠跺彂鐢熸椂闂?created_at锛? time_24h_ago = current_time - timedelta(hours=24) # 鏌ヨ鎵€鏈夎繎24灏忔椂鍐呯殑娲昏穬浜嬩欢 events_in_24h = Event.query.filter( Event.created_at >= time_24h_ago, Event.status == 'active' ).order_by(Event.id.asc()).all() print(f'[杞] 鏁版嵁搴撴煡璇? 鎵惧埌 {len(events_in_24h)} 涓繎24灏忔椂鍐呯殑浜嬩欢') # 鍒涘缓浜嬩欢ID鍒颁簨浠跺璞$殑鏄犲皠 events_map = {event.id: event for event in events_in_24h} # === 姝ラ1: 妫€鏌ュ緟鎺ㄩ€佸垪琛ㄤ腑鐨勪簨浠?=== pending_event_ids = _get_pending_events() print(f'[杞] 寰呮帹閫佸垪琛? {len(pending_event_ids)} 涓簨浠?) pushed_from_pending = 0 for pending_id in pending_event_ids: if pending_id in events_map: event = events_map[pending_id] related_stocks_count = event.related_stocks.count() if related_stocks_count > 0: # 浜嬩欢鐜板湪鏈?related_stocks 浜嗭紝鎺ㄩ€佸畠 broadcast_new_event(event) _remove_pending_event(pending_id) pushed_from_pending += 1 print(f'[杞] 鉁?寰呮帹閫佷簨浠?ID={pending_id} 鐜板湪鏈?{related_stocks_count} 涓叧鑱旇偂绁紝宸叉帹閫?) else: print(f'[杞] - 寰呮帹閫佷簨浠?ID={pending_id} 浠嶆棤鍏宠仈鑲$エ锛岀户缁瓑寰?) else: # 浜嬩欢宸茶秴杩?4灏忔椂鎴栧凡鍒犻櫎锛屼粠寰呮帹閫佸垪琛ㄧЩ闄? _remove_pending_event(pending_id) print(f'[杞] 脳 寰呮帹閫佷簨浠?ID={pending_id} 宸茶繃鏈熸垨涓嶅瓨鍦紝宸茬Щ闄?) if pushed_from_pending > 0: print(f'[杞] 浠庡緟鎺ㄩ€佸垪琛ㄦ帹閫佷簡 {pushed_from_pending} 涓簨浠?) # === 姝ラ2: 妫€鏌ユ柊浜嬩欢 === # 鎵惧嚭鏂版彃鍏ョ殑浜嬩欢锛圛D > last_max_event_id锛? new_events = [ event for event in events_in_24h if event.id > last_max_event_id ] print(f'[杞] 鏂颁簨浠舵暟閲忥紙ID > {last_max_event_id}锛? {len(new_events)} 涓?) if new_events: print(f'[杞] 鍙戠幇 {len(new_events)} 涓柊浜嬩欢') pushed_count = 0 pending_count = 0 for event in new_events: # 妫€鏌ヤ簨浠舵槸鍚︽湁鍏宠仈鑲$エ锛堝彧鎺ㄩ€佹湁鍏宠仈鑲$エ鐨勪簨浠讹級 related_stocks_count = event.related_stocks.count() print(f'[杞] 浜嬩欢 ID={event.id}: {event.title} (鍏宠仈鑲$エ: {related_stocks_count})') # 鍙帹閫佹湁鍏宠仈鑲$エ鐨勪簨浠? if related_stocks_count > 0: broadcast_new_event(event) pushed_count += 1 print(f'[杞] 鉁?宸叉帹閫佷簨浠?ID={event.id}') else: # 娌℃湁鍏宠仈鑲$エ锛屽姞鍏ュ緟鎺ㄩ€佸垪琛? _add_pending_event(event.id) pending_count += 1 print(f'[杞] 鈫?鍔犲叆寰呮帹閫佸垪琛紙鏆傛棤鍏宠仈鑲$エ锛?) print(f'[杞] 鏈疆: 鎺ㄩ€?{pushed_count} 涓? 鍔犲叆寰呮帹閫?{pending_count} 涓?) # 鏇存柊鏈€澶т簨浠禝D new_max_id = max(event.id for event in events_in_24h) _set_last_max_event_id(new_max_id) print(f'[杞] 鏇存柊鏈€澶т簨浠禝D: {last_max_event_id} -> {new_max_id}') else: # 鍗充娇娌℃湁鏂颁簨浠讹紝涔熻鏇存柊鏈€澶D锛堥槻姝㈢姸鎬佷笉鍚屾锛? if events_in_24h: current_max_id = max(event.id for event in events_in_24h) if current_max_id != last_max_event_id: _set_last_max_event_id(current_max_id) print(f'[杞] ========== 杞缁撴潫 ==========\n') except Exception as e: print(f'[杞 ERROR] 妫€鏌ユ柊浜嬩欢鏃跺嚭閿? {e}') import traceback traceback.print_exc() finally: # 閲婃斁閿? try: redis_client.delete(REDIS_KEY_POLLING_LOCK) except: pass def initialize_event_polling(): """ 鍒濆鍖栦簨浠惰疆璇㈡満鍒? 鍦ㄥ簲鐢ㄥ惎鍔ㄦ椂璋冪敤锛堟敮鎸?gunicorn 澶?Worker锛? """ global _polling_initialized # 闃叉閲嶅鍒濆鍖? if _polling_initialized: print('[杞] 宸茬粡鍒濆鍖栬繃锛岃烦杩?) return try: from datetime import datetime, timedelta import os with app.app_context(): current_time = datetime.now() time_24h_ago = current_time - timedelta(hours=24) print(f'\n[杞] ========== 鍒濆鍖栦簨浠惰疆璇?(PID: {os.getpid()}) ==========') print(f'[杞] 褰撳墠鏃堕棿: {current_time.strftime("%Y-%m-%d %H:%M:%S")}') # 鏌ヨ鏁版嵁搴撲腑鏈€澶х殑浜嬩欢 ID锛堜笉闄愪簬 24 灏忔椂锛? max_event = Event.query.filter_by(status='active').order_by(Event.id.desc()).first() db_max_id = max_event.id if max_event else 0 # 鑾峰彇 Redis 涓綋鍓嶄繚瀛樼殑鏈€澶?ID current_redis_max = _get_last_max_event_id() print(f'[杞] 鏁版嵁搴撴渶澶т簨浠禝D: {db_max_id}') print(f'[杞] Redis 涓殑鏈€澶т簨浠禝D: {current_redis_max}') # 濮嬬粓浣跨敤鏁版嵁搴撲腑鐨勬渶澶?ID锛堥伩鍏嶆帹閫佸巻鍙蹭簨浠讹級 # 鍙湪 Redis 鍊间负 0 鎴栧皬浜庢暟鎹簱鏈€澶у€兼椂鏇存柊 if current_redis_max == 0 or db_max_id > current_redis_max: _set_last_max_event_id(db_max_id) print(f'[杞] 鏇存柊鏈€澶т簨浠禝D涓? {db_max_id}锛堥伩鍏嶆帹閫佸巻鍙蹭簨浠讹級') else: print(f'[杞] 淇濇寔 Redis 涓殑鏈€澶т簨浠禝D: {current_redis_max}') # 缁熻鏁版嵁搴撲腑鐨勪簨浠舵€绘暟 total_events = Event.query.filter_by(status='active').count() events_in_24h_count = Event.query.filter( Event.created_at >= time_24h_ago, Event.status == 'active' ).count() print(f'[杞] 鏁版嵁搴撲腑鍏辨湁 {total_events} 涓椿璺冧簨浠讹紙鍏朵腑杩?4灏忔椂: {events_in_24h_count} 涓級') print(f'[杞] 鍙細鎺ㄩ€?ID > {max(current_redis_max, db_max_id)} 鐨勬柊浜嬩欢') print(f'[杞] ========== 鍒濆鍖栧畬鎴?==========\n') # 妫€娴嬫槸鍚﹀湪 eventlet 鐜涓嬭繍琛? is_eventlet = False try: import eventlet # 妫€鏌?eventlet 鏄惁宸茬粡 monkey patch if hasattr(eventlet, 'patcher') and eventlet.patcher.is_monkey_patched('socket'): is_eventlet = True except ImportError: pass if is_eventlet: # Eventlet 鐜锛氫娇鐢?eventlet 鐨勫崗绋嬪畾鏃跺櫒 print(f'[杞] 妫€娴嬪埌 Eventlet 鐜锛屼娇鐢?eventlet 鍗忕▼璋冨害鍣?) def eventlet_polling_loop(): """Eventlet 鍏煎鐨勮疆璇㈠惊鐜?"" import eventlet while True: try: eventlet.sleep(30) # 绛夊緟 30 绉? poll_new_events() except Exception as e: print(f'[杞 ERROR] Eventlet 杞寰幆鍑洪敊: {e}') import traceback traceback.print_exc() eventlet.sleep(30) # 鍑洪敊鍚庣瓑寰?30 绉掑啀缁х画 # 鍚姩 eventlet 鍗忕▼ eventlet.spawn(eventlet_polling_loop) print(f'[杞] Eventlet 鍗忕▼璋冨害鍣ㄥ凡鍚姩 (PID: {os.getpid()})锛屾瘡 30 绉掓鏌ヤ竴娆℃柊浜嬩欢') else: # 闈?Eventlet 鐜锛氫娇鐢?APScheduler print(f'[杞] 浣跨敤 APScheduler BackgroundScheduler') scheduler = BackgroundScheduler() # 姣?30 绉掓墽琛屼竴娆¤疆璇? scheduler.add_job( func=poll_new_events, trigger='interval', seconds=30, id='poll_new_events', name='妫€鏌ユ柊浜嬩欢骞舵帹閫?, replace_existing=True ) scheduler.start() print(f'[杞] APScheduler 璋冨害鍣ㄥ凡鍚姩 (PID: {os.getpid()})锛屾瘡 30 绉掓鏌ヤ竴娆℃柊浜嬩欢') _polling_initialized = True except Exception as e: print(f'[杞] 鍒濆鍖栧け璐? {e}') import traceback traceback.print_exc() # ==================== Gunicorn 鍏煎锛氳嚜鍔ㄥ垵濮嬪寲杞 ==================== # Redis key 鐢ㄤ簬纭繚鍙湁涓€涓?Worker 鍚姩璋冨害鍣? REDIS_KEY_SCHEDULER_LOCK = 'vf:event_polling:scheduler_lock' def _auto_init_polling(): """ 鑷姩鍒濆鍖栦簨浠惰疆璇紙鍏煎 gunicorn锛? 浣跨敤 Redis 閿佺‘淇濇暣涓泦缇ゅ彧鏈変竴涓?Worker 鍚姩璋冨害鍣? """ global _polling_initialized import os if _polling_initialized: return try: # 灏濊瘯鑾峰彇璋冨害鍣ㄩ攣锛?0鍒嗛挓杩囨湡锛岄槻姝㈡閿侊級 lock_acquired = redis_client.set( REDIS_KEY_SCHEDULER_LOCK, str(os.getpid()), nx=True, # 鍙湪涓嶅瓨鍦ㄦ椂璁剧疆 ex=600 # 10鍒嗛挓杩囨湡 ) if lock_acquired: print(f'[杞] Worker {os.getpid()} 鑾峰緱璋冨害鍣ㄩ攣锛屽惎鍔ㄨ疆璇㈣皟搴﹀櫒') initialize_event_polling() else: # 鍏朵粬 Worker 宸茬粡鍚姩浜嗚皟搴﹀櫒 _polling_initialized = True # 鏍囪涓哄凡鍒濆鍖栵紝閬垮厤閲嶅灏濊瘯 print(f'[杞] Worker {os.getpid()} 璺宠繃璋冨害鍣ㄥ垵濮嬪寲锛堝凡鐢卞叾浠?Worker 鍚姩锛?) except Exception as e: print(f'[杞] 鑷姩鍒濆鍖栧け璐? {e}') # 娉ㄥ唽 before_request 閽╁瓙锛岀‘淇?gunicorn 鍚姩鍚庝篃鑳藉垵濮嬪寲杞 @app.before_request def ensure_polling_initialized(): """纭繚杞鏈哄埗宸插垵濮嬪寲锛堝彧鎵ц涓€娆★級""" global _polling_initialized if not _polling_initialized: _auto_init_polling() # ==================== 缁撴潫 WebSocket 閮ㄥ垎 ==================== @app.route('/api/posts//like', methods=['POST']) @login_required def like_post(post_id): """鐐硅禐/鍙栨秷鐐硅禐甯栧瓙""" try: post = Post.query.get_or_404(post_id) # 妫€鏌ユ槸鍚﹀凡缁忕偣璧? existing_like = PostLike.query.filter_by( post_id=post_id, user_id=current_user.id ).first() if existing_like: # 鍙栨秷鐐硅禐 db.session.delete(existing_like) post.likes_count = max(0, post.likes_count - 1) message = '鍙栨秷鐐硅禐鎴愬姛' liked = False else: # 娣诲姞鐐硅禐 new_like = PostLike(post_id=post_id, user_id=current_user.id) db.session.add(new_like) post.likes_count += 1 message = '鐐硅禐鎴愬姛' liked = True db.session.commit() return jsonify({ 'success': True, 'message': message, 'likes_count': post.likes_count, 'liked': liked }) except Exception as e: db.session.rollback() print(f"鐐硅禐澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 @app.route('/api/comments//like', methods=['POST']) @login_required def like_comment(comment_id): """鐐硅禐/鍙栨秷鐐硅禐璇勮""" try: comment = Comment.query.get_or_404(comment_id) # 妫€鏌ユ槸鍚﹀凡缁忕偣璧烇紙闇€瑕佸垱寤?CommentLike 鍏宠仈鍒版柊鐨?Comment 妯″瀷锛? # 鏆傛椂浣跨敤绠€鍗曠殑璁℃暟鍣? comment.likes_count += 1 db.session.commit() return jsonify({ 'success': True, 'message': '鐐硅禐鎴愬姛', 'likes_count': comment.likes_count }) except Exception as e: db.session.rollback() print(f"鐐硅禐澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 @app.route('/api/posts/', methods=['DELETE']) @login_required def delete_post(post_id): """鍒犻櫎甯栧瓙""" try: post = Post.query.get_or_404(post_id) # 妫€鏌ユ潈闄愶細鍙兘鍒犻櫎鑷繁鐨勫笘瀛? if post.user_id != current_user.id: return jsonify({ 'success': False, 'message': '鎮ㄥ彧鑳藉垹闄よ嚜宸辩殑甯栧瓙' }), 403 # 杞垹闄? post.status = 'deleted' # 鏇存柊浜嬩欢鐨勫笘瀛愭暟 event = Event.query.get(post.event_id) if event: event.post_count = Post.query.filter_by(event_id=post.event_id, status='active').count() # 鏇存柊鐢ㄦ埛鍙戝笘鏁? if current_user.post_count > 0: current_user.post_count -= 1 db.session.commit() return jsonify({ 'success': True, 'message': '甯栧瓙鍒犻櫎鎴愬姛' }) except Exception as e: db.session.rollback() print(f"鍒犻櫎甯栧瓙澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 @app.route('/api/comments/', methods=['DELETE']) @login_required def delete_comment(comment_id): """鍒犻櫎璇勮""" try: comment = Comment.query.get_or_404(comment_id) # 妫€鏌ユ潈闄愶細鍙兘鍒犻櫎鑷繁鐨勮瘎璁? if comment.user_id != current_user.id: return jsonify({ 'success': False, 'message': '鎮ㄥ彧鑳藉垹闄よ嚜宸辩殑璇勮' }), 403 # 杞垹闄? comment.status = 'deleted' comment.content = '[璇ヨ瘎璁哄凡琚垹闄' # 鏇存柊甯栧瓙璇勮鏁? post = Post.query.get(comment.post_id) if post: post.comments_count = Comment.query.filter_by(post_id=comment.post_id, status='active').count() # 鏇存柊鐢ㄦ埛璇勮鏁? if current_user.comment_count > 0: current_user.comment_count -= 1 db.session.commit() return jsonify({ 'success': True, 'message': '璇勮鍒犻櫎鎴愬姛' }) except Exception as e: db.session.rollback() print(f"鍒犻櫎璇勮澶辫触: {e}") return jsonify({ 'success': False, 'message': str(e) }), 500 def format_decimal(value): """鏍煎紡鍖杁ecimal绫诲瀷鏁版嵁""" if value is None: return None if isinstance(value, Decimal): return float(value) return float(value) def format_date(date_obj): """鏍煎紡鍖栨棩鏈?"" if date_obj is None: return None if isinstance(date_obj, datetime): return date_obj.strftime('%Y-%m-%d') return str(date_obj) def remove_cycles_from_sankey_flows(flows_data): """ 绉婚櫎Sankey鍥炬暟鎹腑鐨勫惊鐜竟锛岀‘淇濇暟鎹槸DAG锛堟湁鍚戞棤鐜浘锛? 浣跨敤鎷撴墤鎺掑簭绠楁硶妫€娴嬪惊鐜紝浼樺厛淇濈暀flow_ratio楂樼殑杈? Args: flows_data: list of flow objects with 'source', 'target', 'flow_metrics' keys Returns: list of flows without cycles """ if not flows_data: return flows_data # 鎸塮low_ratio闄嶅簭鎺掑簭锛屼紭鍏堜繚鐣欓噸瑕佺殑杈? sorted_flows = sorted( flows_data, key=lambda x: x.get('flow_metrics', {}).get('flow_ratio', 0) or 0, reverse=True ) # 鏋勫缓鍥剧殑閭绘帴琛ㄥ拰鍏ュ害琛? def build_graph(flows): graph = {} # node -> list of successors in_degree = {} # node -> in-degree count all_nodes = set() for flow in flows: source = flow['source']['node_name'] target = flow['target']['node_name'] all_nodes.add(source) all_nodes.add(target) if source not in graph: graph[source] = [] graph[source].append(target) if target not in in_degree: in_degree[target] = 0 in_degree[target] += 1 if source not in in_degree: in_degree[source] = 0 return graph, in_degree, all_nodes # 浣跨敤Kahn绠楁硶妫€娴嬫槸鍚︽湁鐜? def has_cycle(graph, in_degree, all_nodes): # 鎵惧埌鎵€鏈夊叆搴︿负0鐨勮妭鐐? queue = [node for node in all_nodes if in_degree.get(node, 0) == 0] visited_count = 0 while queue: node = queue.pop(0) visited_count += 1 # 璁块棶鎵€鏈夐偦灞? for neighbor in graph.get(node, []): in_degree[neighbor] -= 1 if in_degree[neighbor] == 0: queue.append(neighbor) # 濡傛灉璁块棶鐨勮妭鐐规暟绛変簬鎬昏妭鐐规暟锛岃鏄庢病鏈夌幆 return visited_count < len(all_nodes) # 閫愪釜娣诲姞杈癸紝濡傛灉娣诲姞鍚庝骇鐢熺幆鍒欒烦杩? result_flows = [] for flow in sorted_flows: # 灏濊瘯娣诲姞杩欐潯杈? temp_flows = result_flows + [flow] # 妫€鏌ユ槸鍚︿骇鐢熺幆 graph, in_degree, all_nodes = build_graph(temp_flows) # 澶嶅埗in_degree鐢ㄤ簬妫€娴嬶紙鍥犱负妫€娴嬭繃绋嬩細淇敼瀹冿級 in_degree_copy = in_degree.copy() if not has_cycle(graph, in_degree_copy, all_nodes): # 娌℃湁浜х敓鐜紝鍙互娣诲姞 result_flows.append(flow) else: # 浜х敓鐜紝璺宠繃杩欐潯杈? print(f"Skipping edge that creates cycle: {flow['source']['node_name']} -> {flow['target']['node_name']}") removed_count = len(flows_data) - len(result_flows) if removed_count > 0: print(f"Removed {removed_count} edges to eliminate cycles in Sankey diagram") return result_flows def get_report_type(date_str): """鑾峰彇鎶ュ憡鏈熺被鍨?"" if not date_str: return '' if isinstance(date_str, str): date = datetime.strptime(date_str, '%Y-%m-%d') else: date = date_str month = date.month year = date.year if month == 3: return f"{year}骞翠竴瀛f姤" elif month == 6: return f"{year}骞翠腑鎶? elif month == 9: return f"{year}骞翠笁瀛f姤" elif month == 12: return f"{year}骞村勾鎶? else: return str(date_str) @app.route('/api/financial/stock-info/', methods=['GET']) def get_stock_info(seccode): """鑾峰彇鑲$エ鍩烘湰淇℃伅鍜屾渶鏂拌储鍔℃憳瑕?"" try: # 鑾峰彇鏈€鏂扮殑璐㈠姟鏁版嵁 query = text(""" SELECT distinct a.SECCODE, a.SECNAME, a.ENDDATE, a.F003N as eps, a.F004N as basic_eps, a.F005N as diluted_eps, a.F006N as deducted_eps, a.F007N as undistributed_profit_ps, a.F008N as bvps, a.F010N as capital_reserve_ps, a.F014N as roe, a.F067N as roe_weighted, a.F016N as roa, a.F078N as gross_margin, a.F017N as net_margin, a.F089N as revenue, a.F101N as net_profit, a.F102N as parent_net_profit, a.F118N as total_assets, a.F121N as total_liabilities, a.F128N as total_equity, a.F052N as revenue_growth, a.F053N as profit_growth, a.F054N as equity_growth, a.F056N as asset_growth, a.F122N as share_capital FROM ea_financialindex a WHERE a.SECCODE = :seccode ORDER BY a.ENDDATE DESC LIMIT 1 """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode}).fetchone() if not result: return jsonify({ 'success': False, 'message': f'鏈壘鍒拌偂绁ㄤ唬鐮?{seccode} 鐨勮储鍔℃暟鎹? }), 404 # 鑾峰彇鏈€杩戠殑涓氱哗棰勫憡 forecast_query = text(""" SELECT distinct F001D as report_date, F003V as forecast_type, F004V as content, F007N as profit_lower, F008N as profit_upper, F009N as change_lower, F010N as change_upper FROM ea_forecast WHERE SECCODE = :seccode AND F006C = 'T' ORDER BY F001D DESC LIMIT 1 """) with engine.connect() as conn: forecast_result = conn.execute(forecast_query, {'seccode': seccode}).fetchone() data = { 'stock_code': result.SECCODE, 'stock_name': result.SECNAME, 'latest_period': format_date(result.ENDDATE), 'report_type': get_report_type(result.ENDDATE), 'key_metrics': { 'eps': format_decimal(result.eps), 'basic_eps': format_decimal(result.basic_eps), 'diluted_eps': format_decimal(result.diluted_eps), 'deducted_eps': format_decimal(result.deducted_eps), 'bvps': format_decimal(result.bvps), 'roe': format_decimal(result.roe), 'roe_weighted': format_decimal(result.roe_weighted), 'roa': format_decimal(result.roa), 'gross_margin': format_decimal(result.gross_margin), 'net_margin': format_decimal(result.net_margin), }, 'financial_summary': { 'revenue': format_decimal(result.revenue), 'net_profit': format_decimal(result.net_profit), 'parent_net_profit': format_decimal(result.parent_net_profit), 'total_assets': format_decimal(result.total_assets), 'total_liabilities': format_decimal(result.total_liabilities), 'total_equity': format_decimal(result.total_equity), 'share_capital': format_decimal(result.share_capital), }, 'growth_rates': { 'revenue_growth': format_decimal(result.revenue_growth), 'profit_growth': format_decimal(result.profit_growth), 'equity_growth': format_decimal(result.equity_growth), 'asset_growth': format_decimal(result.asset_growth), } } # 娣诲姞涓氱哗棰勫憡淇℃伅 if forecast_result: data['latest_forecast'] = { 'report_date': format_date(forecast_result.report_date), 'forecast_type': forecast_result.forecast_type, 'content': forecast_result.content, 'profit_range': { 'lower': format_decimal(forecast_result.profit_lower), 'upper': format_decimal(forecast_result.profit_upper), }, 'change_range': { 'lower': format_decimal(forecast_result.change_lower), 'upper': format_decimal(forecast_result.change_upper), } } return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/balance-sheet/', methods=['GET']) def get_balance_sheet(seccode): """鑾峰彇瀹屾暣鐨勮祫浜ц礋鍊鸿〃鏁版嵁""" try: limit = request.args.get('limit', 12, type=int) query = text(""" SELECT distinct ENDDATE, DECLAREDATE, -- 娴佸姩璧勪骇 F006N as cash, -- 璐у竵璧勯噾 F007N as trading_financial_assets, -- 浜ゆ槗鎬ч噾铻嶈祫浜? F008N as notes_receivable, -- 搴旀敹绁ㄦ嵁 F009N as accounts_receivable, -- 搴旀敹璐︽ F010N as prepayments, -- 棰勪粯娆鹃」 F011N as other_receivables, -- 鍏朵粬搴旀敹娆? F013N as interest_receivable, -- 搴旀敹鍒╂伅 F014N as dividends_receivable, -- 搴旀敹鑲″埄 F015N as inventory, -- 瀛樿揣 F016N as consumable_biological_assets, -- 娑堣€楁€х敓鐗╄祫浜? F017N as non_current_assets_due_within_one_year, -- 涓€骞村唴鍒版湡鐨勯潪娴佸姩璧勪骇 F018N as other_current_assets, -- 鍏朵粬娴佸姩璧勪骇 F019N as total_current_assets, -- 娴佸姩璧勪骇鍚堣 -- 闈炴祦鍔ㄨ祫浜? F020N as available_for_sale_financial_assets, -- 鍙緵鍑哄敭閲戣瀺璧勪骇 F021N as held_to_maturity_investments, -- 鎸佹湁鑷冲埌鏈熸姇璧? F022N as long_term_receivables, -- 闀挎湡搴旀敹娆? F023N as long_term_equity_investments, -- 闀挎湡鑲℃潈鎶曡祫 F024N as investment_property, -- 鎶曡祫鎬ф埧鍦颁骇 F025N as fixed_assets, -- 鍥哄畾璧勪骇 F026N as construction_in_progress, -- 鍦ㄥ缓宸ョ▼ F027N as engineering_materials, -- 宸ョ▼鐗╄祫 F029N as productive_biological_assets, -- 鐢熶骇鎬х敓鐗╄祫浜? F030N as oil_and_gas_assets, -- 娌规皵璧勪骇 F031N as intangible_assets, -- 鏃犲舰璧勪骇 F032N as development_expenditure, -- 寮€鍙戞敮鍑? F033N as goodwill, -- 鍟嗚獕 F034N as long_term_deferred_expenses, -- 闀挎湡寰呮憡璐圭敤 F035N as deferred_tax_assets, -- 閫掑欢鎵€寰楃◣璧勪骇 F036N as other_non_current_assets, -- 鍏朵粬闈炴祦鍔ㄨ祫浜? F037N as total_non_current_assets, -- 闈炴祦鍔ㄨ祫浜у悎璁? F038N as total_assets, -- 璧勪骇鎬昏 -- 娴佸姩璐熷€? F039N as short_term_borrowings, -- 鐭湡鍊熸 F040N as trading_financial_liabilities, -- 浜ゆ槗鎬ч噾铻嶈礋鍊? F041N as notes_payable, -- 搴斾粯绁ㄦ嵁 F042N as accounts_payable, -- 搴斾粯璐︽ F043N as advance_receipts, -- 棰勬敹娆鹃」 F044N as employee_compensation_payable, -- 搴斾粯鑱屽伐钖叕 F045N as taxes_payable, -- 搴斾氦绋庤垂 F046N as interest_payable, -- 搴斾粯鍒╂伅 F047N as dividends_payable, -- 搴斾粯鑲″埄 F048N as other_payables, -- 鍏朵粬搴斾粯娆? F050N as non_current_liabilities_due_within_one_year, -- 涓€骞村唴鍒版湡鐨勯潪娴佸姩璐熷€? F051N as other_current_liabilities, -- 鍏朵粬娴佸姩璐熷€? F052N as total_current_liabilities, -- 娴佸姩璐熷€哄悎璁? -- 闈炴祦鍔ㄨ礋鍊? F053N as long_term_borrowings, -- 闀挎湡鍊熸 F054N as bonds_payable, -- 搴斾粯鍊哄埜 F055N as long_term_payables, -- 闀挎湡搴斾粯娆? F056N as special_payables, -- 涓撻」搴斾粯娆? F057N as estimated_liabilities, -- 棰勮璐熷€? F058N as deferred_tax_liabilities, -- 閫掑欢鎵€寰楃◣璐熷€? F059N as other_non_current_liabilities, -- 鍏朵粬闈炴祦鍔ㄨ礋鍊? F060N as total_non_current_liabilities, -- 闈炴祦鍔ㄨ礋鍊哄悎璁? F061N as total_liabilities, -- 璐熷€哄悎璁? -- 鎵€鏈夎€呮潈鐩? F062N as share_capital, -- 鑲℃湰 F063N as capital_reserve, -- 璧勬湰鍏Н F064N as surplus_reserve, -- 鐩堜綑鍏Н F065N as undistributed_profit, -- 鏈垎閰嶅埄娑? F066N as treasury_stock, -- 搴撳瓨鑲? F067N as minority_interests, -- 灏戞暟鑲′笢鏉冪泭 F070N as total_equity, -- 鎵€鏈夎€呮潈鐩婂悎璁? F071N as total_liabilities_and_equity, -- 璐熷€哄拰鎵€鏈夎€呮潈鐩婂悎璁? F073N as parent_company_equity, -- 褰掑睘浜庢瘝鍏徃鎵€鏈夎€呮潈鐩? F074N as other_comprehensive_income, -- 鍏朵粬缁煎悎鏀剁泭 -- 鏂颁細璁″噯鍒欑鐩? F110N as other_debt_investments, -- 鍏朵粬鍊烘潈鎶曡祫 F111N as other_equity_investments, -- 鍏朵粬鏉冪泭宸ュ叿鎶曡祫 F112N as other_non_current_financial_assets, -- 鍏朵粬闈炴祦鍔ㄩ噾铻嶈祫浜? F115N as contract_liabilities, -- 鍚堝悓璐熷€? F119N as contract_assets, -- 鍚堝悓璧勪骇 F120N as receivables_financing, -- 搴旀敹娆鹃」铻嶈祫 F121N as right_of_use_assets, -- 浣跨敤鏉冭祫浜? F122N as lease_liabilities -- 绉熻祦璐熷€? FROM ea_asset WHERE SECCODE = :seccode and F002V = '071001' ORDER BY ENDDATE DESC LIMIT :limit """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'limit': limit}) data = [] for row in result: # 瀹夊叏璁$畻鍏抽敭姣旂巼锛岄伩鍏?Decimal 涓?None 杩愮畻閿欒 def to_float(v): try: return float(v) if v is not None else None except Exception: return None ta = to_float(row.total_assets) tl = to_float(row.total_liabilities) tca = to_float(row.total_current_assets) tcl = to_float(row.total_current_liabilities) inv = to_float(row.inventory) or 0.0 asset_liability_ratio_val = None if ta is not None and ta != 0 and tl is not None: asset_liability_ratio_val = (tl / ta) * 100 current_ratio_val = None if tcl is not None and tcl != 0 and tca is not None: current_ratio_val = tca / tcl quick_ratio_val = None if tcl is not None and tcl != 0 and tca is not None: quick_ratio_val = (tca - inv) / tcl period_data = { 'period': format_date(row.ENDDATE), 'declare_date': format_date(row.DECLAREDATE), 'report_type': get_report_type(row.ENDDATE), # 璧勪骇閮ㄥ垎 'assets': { 'current_assets': { 'cash': format_decimal(row.cash), 'trading_financial_assets': format_decimal(row.trading_financial_assets), 'notes_receivable': format_decimal(row.notes_receivable), 'accounts_receivable': format_decimal(row.accounts_receivable), 'prepayments': format_decimal(row.prepayments), 'other_receivables': format_decimal(row.other_receivables), 'inventory': format_decimal(row.inventory), 'contract_assets': format_decimal(row.contract_assets), 'other_current_assets': format_decimal(row.other_current_assets), 'total': format_decimal(row.total_current_assets), }, 'non_current_assets': { 'long_term_equity_investments': format_decimal(row.long_term_equity_investments), 'investment_property': format_decimal(row.investment_property), 'fixed_assets': format_decimal(row.fixed_assets), 'construction_in_progress': format_decimal(row.construction_in_progress), 'intangible_assets': format_decimal(row.intangible_assets), 'goodwill': format_decimal(row.goodwill), 'right_of_use_assets': format_decimal(row.right_of_use_assets), 'deferred_tax_assets': format_decimal(row.deferred_tax_assets), 'other_non_current_assets': format_decimal(row.other_non_current_assets), 'total': format_decimal(row.total_non_current_assets), }, 'total': format_decimal(row.total_assets), }, # 璐熷€洪儴鍒? 'liabilities': { 'current_liabilities': { 'short_term_borrowings': format_decimal(row.short_term_borrowings), 'notes_payable': format_decimal(row.notes_payable), 'accounts_payable': format_decimal(row.accounts_payable), 'advance_receipts': format_decimal(row.advance_receipts), 'contract_liabilities': format_decimal(row.contract_liabilities), 'employee_compensation_payable': format_decimal(row.employee_compensation_payable), 'taxes_payable': format_decimal(row.taxes_payable), 'other_payables': format_decimal(row.other_payables), 'non_current_liabilities_due_within_one_year': format_decimal( row.non_current_liabilities_due_within_one_year), 'total': format_decimal(row.total_current_liabilities), }, 'non_current_liabilities': { 'long_term_borrowings': format_decimal(row.long_term_borrowings), 'bonds_payable': format_decimal(row.bonds_payable), 'lease_liabilities': format_decimal(row.lease_liabilities), 'deferred_tax_liabilities': format_decimal(row.deferred_tax_liabilities), 'other_non_current_liabilities': format_decimal(row.other_non_current_liabilities), 'total': format_decimal(row.total_non_current_liabilities), }, 'total': format_decimal(row.total_liabilities), }, # 鑲′笢鏉冪泭閮ㄥ垎 'equity': { 'share_capital': format_decimal(row.share_capital), 'capital_reserve': format_decimal(row.capital_reserve), 'surplus_reserve': format_decimal(row.surplus_reserve), 'undistributed_profit': format_decimal(row.undistributed_profit), 'treasury_stock': format_decimal(row.treasury_stock), 'other_comprehensive_income': format_decimal(row.other_comprehensive_income), 'parent_company_equity': format_decimal(row.parent_company_equity), 'minority_interests': format_decimal(row.minority_interests), 'total': format_decimal(row.total_equity), }, # 鍏抽敭姣旂巼 'key_ratios': { 'asset_liability_ratio': format_decimal(asset_liability_ratio_val), 'current_ratio': format_decimal(current_ratio_val), 'quick_ratio': format_decimal(quick_ratio_val), } } data.append(period_data) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/income-statement/', methods=['GET']) def get_income_statement(seccode): """鑾峰彇瀹屾暣鐨勫埄娑﹁〃鏁版嵁""" try: limit = request.args.get('limit', 12, type=int) query = text(""" SELECT distinct ENDDATE, STARTDATE, DECLAREDATE, -- 钀ヤ笟鏀跺叆閮ㄥ垎 F006N as revenue, -- 钀ヤ笟鏀跺叆 F035N as total_operating_revenue, -- 钀ヤ笟鎬绘敹鍏? F051N as other_income, -- 鍏朵粬鏀跺叆 -- 钀ヤ笟鎴愭湰閮ㄥ垎 F007N as cost, -- 钀ヤ笟鎴愭湰 F008N as taxes_and_surcharges, -- 绋庨噾鍙婇檮鍔? F009N as selling_expenses, -- 閿€鍞垂鐢? F010N as admin_expenses, -- 绠$悊璐圭敤 F056N as rd_expenses, -- 鐮斿彂璐圭敤 F012N as financial_expenses, -- 璐㈠姟璐圭敤 F062N as interest_expense, -- 鍒╂伅璐圭敤 F063N as interest_income, -- 鍒╂伅鏀跺叆 F013N as asset_impairment_loss, -- 璧勪骇鍑忓€兼崯澶憋紙钀ヤ笟鎬绘垚鏈級 F057N as credit_impairment_loss, -- 淇$敤鍑忓€兼崯澶憋紙钀ヤ笟鎬绘垚鏈級 F036N as total_operating_cost, -- 钀ヤ笟鎬绘垚鏈? -- 鍏朵粬鏀剁泭 F014N as fair_value_change_income, -- 鍏厑浠峰€煎彉鍔ㄥ噣鏀剁泭 F015N as investment_income, -- 鎶曡祫鏀剁泭 F016N as investment_income_from_associates, -- 瀵硅仈钀ヤ紒涓氬拰鍚堣惀浼佷笟鐨勬姇璧勬敹鐩? F037N as exchange_income, -- 姹囧厬鏀剁泭 F058N as net_exposure_hedging_income, -- 鍑€鏁炲彛濂楁湡鏀剁泭 F059N as asset_disposal_income, -- 璧勪骇澶勭疆鏀剁泭 -- 鍒╂鼎閮ㄥ垎 F018N as operating_profit, -- 钀ヤ笟鍒╂鼎 F019N as subsidy_income, -- 琛ヨ创鏀跺叆 F020N as non_operating_income, -- 钀ヤ笟澶栨敹鍏? F021N as non_operating_expenses, -- 钀ヤ笟澶栨敮鍑? F022N as non_current_asset_disposal_loss, -- 闈炴祦鍔ㄨ祫浜у缃崯澶? F024N as total_profit, -- 鍒╂鼎鎬婚 F025N as income_tax_expense, -- 鎵€寰楃◣ F027N as net_profit, -- 鍑€鍒╂鼎 F028N as parent_net_profit, -- 褰掑睘浜庢瘝鍏徃鎵€鏈夎€呯殑鍑€鍒╂鼎 F029N as minority_profit, -- 灏戞暟鑲′笢鎹熺泭 -- 鎸佺画缁忚惀 F060N as continuing_operations_net_profit, -- 鎸佺画缁忚惀鍑€鍒╂鼎 F061N as discontinued_operations_net_profit, -- 缁堟缁忚惀鍑€鍒╂鼎 -- 姣忚偂鏀剁泭 F031N as basic_eps, -- 鍩烘湰姣忚偂鏀剁泭 F032N as diluted_eps, -- 绋€閲婃瘡鑲℃敹鐩? -- 缁煎悎鏀剁泭 F038N as other_comprehensive_income_after_tax, -- 鍏朵粬缁煎悎鏀剁泭鐨勭◣鍚庡噣棰? F039N as total_comprehensive_income, -- 缁煎悎鏀剁泭鎬婚 F040N as parent_company_comprehensive_income, -- 褰掑睘浜庢瘝鍏徃鐨勭患鍚堟敹鐩? F041N as minority_comprehensive_income -- 褰掑睘浜庡皯鏁拌偂涓滅殑缁煎悎鏀剁泭 FROM ea_profit WHERE SECCODE = :seccode and F002V = '071001' ORDER BY ENDDATE DESC LIMIT :limit """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'limit': limit}) data = [] for row in result: # 璁$畻涓€浜涜鐢熸寚鏍? gross_profit = (row.revenue - row.cost) if row.revenue and row.cost else None gross_margin = (gross_profit / row.revenue * 100) if row.revenue and gross_profit else None operating_margin = ( row.operating_profit / row.revenue * 100) if row.revenue and row.operating_profit else None net_margin = (row.net_profit / row.revenue * 100) if row.revenue and row.net_profit else None # 涓夎垂鍚堣 three_expenses = 0 if row.selling_expenses: three_expenses += row.selling_expenses if row.admin_expenses: three_expenses += row.admin_expenses if row.financial_expenses: three_expenses += row.financial_expenses # 鍥涜垂鍚堣锛堝姞鐮斿彂锛? four_expenses = three_expenses if row.rd_expenses: four_expenses += row.rd_expenses period_data = { 'period': format_date(row.ENDDATE), 'start_date': format_date(row.STARTDATE), 'declare_date': format_date(row.DECLAREDATE), 'report_type': get_report_type(row.ENDDATE), # 鏀跺叆閮ㄥ垎 'revenue': { 'operating_revenue': format_decimal(row.revenue), 'total_operating_revenue': format_decimal(row.total_operating_revenue), 'other_income': format_decimal(row.other_income), }, # 鎴愭湰璐圭敤閮ㄥ垎 'costs': { 'operating_cost': format_decimal(row.cost), 'taxes_and_surcharges': format_decimal(row.taxes_and_surcharges), 'selling_expenses': format_decimal(row.selling_expenses), 'admin_expenses': format_decimal(row.admin_expenses), 'rd_expenses': format_decimal(row.rd_expenses), 'financial_expenses': format_decimal(row.financial_expenses), 'interest_expense': format_decimal(row.interest_expense), 'interest_income': format_decimal(row.interest_income), 'asset_impairment_loss': format_decimal(row.asset_impairment_loss), 'credit_impairment_loss': format_decimal(row.credit_impairment_loss), 'total_operating_cost': format_decimal(row.total_operating_cost), 'three_expenses_total': format_decimal(three_expenses), 'four_expenses_total': format_decimal(four_expenses), }, # 鍏朵粬鏀剁泭 'other_gains': { 'fair_value_change': format_decimal(row.fair_value_change_income), 'investment_income': format_decimal(row.investment_income), 'investment_income_from_associates': format_decimal(row.investment_income_from_associates), 'exchange_income': format_decimal(row.exchange_income), 'asset_disposal_income': format_decimal(row.asset_disposal_income), }, # 鍒╂鼎 'profit': { 'gross_profit': format_decimal(gross_profit), 'operating_profit': format_decimal(row.operating_profit), 'total_profit': format_decimal(row.total_profit), 'net_profit': format_decimal(row.net_profit), 'parent_net_profit': format_decimal(row.parent_net_profit), 'minority_profit': format_decimal(row.minority_profit), 'continuing_operations_net_profit': format_decimal(row.continuing_operations_net_profit), 'discontinued_operations_net_profit': format_decimal(row.discontinued_operations_net_profit), }, # 闈炵粡钀ラ」鐩? 'non_operating': { 'subsidy_income': format_decimal(row.subsidy_income), 'non_operating_income': format_decimal(row.non_operating_income), 'non_operating_expenses': format_decimal(row.non_operating_expenses), }, # 姣忚偂鏀剁泭 'per_share': { 'basic_eps': format_decimal(row.basic_eps), 'diluted_eps': format_decimal(row.diluted_eps), }, # 缁煎悎鏀剁泭 'comprehensive_income': { 'other_comprehensive_income': format_decimal(row.other_comprehensive_income_after_tax), 'total_comprehensive_income': format_decimal(row.total_comprehensive_income), 'parent_comprehensive_income': format_decimal(row.parent_company_comprehensive_income), 'minority_comprehensive_income': format_decimal(row.minority_comprehensive_income), }, # 鍏抽敭姣旂巼 'margins': { 'gross_margin': format_decimal(gross_margin), 'operating_margin': format_decimal(operating_margin), 'net_margin': format_decimal(net_margin), 'expense_ratio': format_decimal(four_expenses / row.revenue * 100) if row.revenue else None, 'rd_ratio': format_decimal( row.rd_expenses / row.revenue * 100) if row.revenue and row.rd_expenses else None, } } data.append(period_data) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/cashflow/', methods=['GET']) def get_cashflow(seccode): """鑾峰彇瀹屾暣鐨勭幇閲戞祦閲忚〃鏁版嵁""" try: limit = request.args.get('limit', 12, type=int) query = text(""" SELECT distinct ENDDATE, STARTDATE, DECLAREDATE, -- 缁忚惀娲诲姩鐜伴噾娴? F006N as cash_from_sales, -- 閿€鍞晢鍝併€佹彁渚涘姵鍔℃敹鍒扮殑鐜伴噾 F007N as tax_refunds, -- 鏀跺埌鐨勭◣璐硅繑杩? F008N as other_operating_cash_received, -- 鏀跺埌鍏朵粬涓庣粡钀ユ椿鍔ㄦ湁鍏崇殑鐜伴噾 F009N as total_operating_cash_inflow, -- 缁忚惀娲诲姩鐜伴噾娴佸叆灏忚 F010N as cash_paid_for_goods, -- 璐拱鍟嗗搧銆佹帴鍙楀姵鍔℃敮浠樼殑鐜伴噾 F011N as cash_paid_to_employees, -- 鏀粯缁欒亴宸ヤ互鍙婁负鑱屽伐鏀粯鐨勭幇閲? F012N as taxes_paid, -- 鏀粯鐨勫悇椤圭◣璐? F013N as other_operating_cash_paid, -- 鏀粯鍏朵粬涓庣粡钀ユ椿鍔ㄦ湁鍏崇殑鐜伴噾 F014N as total_operating_cash_outflow, -- 缁忚惀娲诲姩鐜伴噾娴佸嚭灏忚 F015N as net_operating_cash_flow, -- 缁忚惀娲诲姩浜х敓鐨勭幇閲戞祦閲忓噣棰? -- 鎶曡祫娲诲姩鐜伴噾娴? F016N as cash_from_investment_recovery, -- 鏀跺洖鎶曡祫鏀跺埌鐨勭幇閲? F017N as cash_from_investment_income, -- 鍙栧緱鎶曡祫鏀剁泭鏀跺埌鐨勭幇閲? F018N as cash_from_asset_disposal, -- 澶勭疆鍥哄畾璧勪骇銆佹棤褰㈣祫浜у拰鍏朵粬闀挎湡璧勪骇鏀跺洖鐨勭幇閲戝噣棰? F019N as cash_from_subsidiary_disposal, -- 澶勭疆瀛愬叕鍙稿強鍏朵粬钀ヤ笟鍗曚綅鏀跺埌鐨勭幇閲戝噣棰? F020N as other_investment_cash_received, -- 鏀跺埌鍏朵粬涓庢姇璧勬椿鍔ㄦ湁鍏崇殑鐜伴噾 F021N as total_investment_cash_inflow, -- 鎶曡祫娲诲姩鐜伴噾娴佸叆灏忚 F022N as cash_paid_for_assets, -- 璐缓鍥哄畾璧勪骇銆佹棤褰㈣祫浜у拰鍏朵粬闀挎湡璧勪骇鏀粯鐨勭幇閲? F023N as cash_paid_for_investments, -- 鎶曡祫鏀粯鐨勭幇閲? F024N as cash_paid_for_subsidiaries, -- 鍙栧緱瀛愬叕鍙稿強鍏朵粬钀ヤ笟鍗曚綅鏀粯鐨勭幇閲戝噣棰? F025N as other_investment_cash_paid, -- 鏀粯鍏朵粬涓庢姇璧勬椿鍔ㄦ湁鍏崇殑鐜伴噾 F026N as total_investment_cash_outflow, -- 鎶曡祫娲诲姩鐜伴噾娴佸嚭灏忚 F027N as net_investment_cash_flow, -- 鎶曡祫娲诲姩浜х敓鐨勭幇閲戞祦閲忓噣棰? -- 绛硅祫娲诲姩鐜伴噾娴? F028N as cash_from_capital, -- 鍚告敹鎶曡祫鏀跺埌鐨勭幇閲? F029N as cash_from_borrowings, -- 鍙栧緱鍊熸鏀跺埌鐨勭幇閲? F030N as other_financing_cash_received, -- 鏀跺埌鍏朵粬涓庣璧勬椿鍔ㄦ湁鍏崇殑鐜伴噾 F031N as total_financing_cash_inflow, -- 绛硅祫娲诲姩鐜伴噾娴佸叆灏忚 F032N as cash_paid_for_debt, -- 鍋胯繕鍊哄姟鏀粯鐨勭幇閲? F033N as cash_paid_for_distribution, -- 鍒嗛厤鑲″埄銆佸埄娑︽垨鍋夸粯鍒╂伅鏀粯鐨勭幇閲? F034N as other_financing_cash_paid, -- 鏀粯鍏朵粬涓庣璧勬椿鍔ㄦ湁鍏崇殑鐜伴噾 F035N as total_financing_cash_outflow, -- 绛硅祫娲诲姩鐜伴噾娴佸嚭灏忚 F036N as net_financing_cash_flow, -- 绛硅祫娲诲姩浜х敓鐨勭幇閲戞祦閲忓噣棰? -- 姹囩巼鍙樺姩褰卞搷 F037N as exchange_rate_effect, -- 姹囩巼鍙樺姩瀵圭幇閲戝強鐜伴噾绛変环鐗╃殑褰卞搷 F038N as other_cash_effect, -- 鍏朵粬鍘熷洜瀵圭幇閲戠殑褰卞搷 -- 鐜伴噾鍑€澧炲姞棰? F039N as net_cash_increase, -- 鐜伴噾鍙婄幇閲戠瓑浠风墿鍑€澧炲姞棰? F040N as beginning_cash_balance, -- 鏈熷垵鐜伴噾鍙婄幇閲戠瓑浠风墿浣欓 F041N as ending_cash_balance, -- 鏈熸湯鐜伴噾鍙婄幇閲戠瓑浠风墿浣欓 -- 琛ュ厖璧勬枡閮ㄥ垎 F044N as net_profit, -- 鍑€鍒╂鼎 F045N as asset_impairment, -- 璧勪骇鍑忓€煎噯澶? F096N as credit_impairment, -- 淇$敤鍑忓€兼崯澶? F046N as depreciation, -- 鍥哄畾璧勪骇鎶樻棫銆佹补姘旇祫浜ф姌鑰椼€佺敓浜ф€х敓鐗╄祫浜ф姌鏃? F097N as right_of_use_asset_depreciation, -- 浣跨敤鏉冭祫浜ф姌鏃?鎽婇攢 F047N as intangible_amortization, -- 鏃犲舰璧勪骇鎽婇攢 F048N as long_term_expense_amortization, -- 闀挎湡寰呮憡璐圭敤鎽婇攢 F049N as loss_on_disposal, -- 澶勭疆鍥哄畾璧勪骇銆佹棤褰㈣祫浜у拰鍏朵粬闀挎湡璧勪骇鐨勬崯澶? F050N as fixed_asset_scrap_loss, -- 鍥哄畾璧勪骇鎶ュ簾鎹熷け F051N as fair_value_change_loss, -- 鍏厑浠峰€煎彉鍔ㄦ崯澶? F052N as financial_expenses, -- 璐㈠姟璐圭敤 F053N as investment_loss, -- 鎶曡祫鎹熷け F054N as deferred_tax_asset_decrease, -- 閫掑欢鎵€寰楃◣璧勪骇鍑忓皯 F055N as deferred_tax_liability_increase, -- 閫掑欢鎵€寰楃◣璐熷€哄鍔? F056N as inventory_decrease, -- 瀛樿揣鐨勫噺灏? F057N as operating_receivables_decrease, -- 缁忚惀鎬у簲鏀堕」鐩殑鍑忓皯 F058N as operating_payables_increase, -- 缁忚惀鎬у簲浠橀」鐩殑澧炲姞 F059N as other, -- 鍏朵粬 F060N as net_operating_cash_flow_indirect, -- 缁忚惀娲诲姩浜х敓鐨勭幇閲戞祦閲忓噣棰濓紙闂存帴娉曪級 -- 鐗规畩琛屼笟绉戠洰锛堥噾铻嶏級 F072N as customer_deposit_increase, -- 瀹㈡埛瀛樻鍜屽悓涓氬瓨鏀炬椤瑰噣澧炲姞棰? F073N as central_bank_borrowing_increase, -- 鍚戜腑澶摱琛屽€熸鍑€澧炲姞棰? F081N as interest_and_commission_received, -- 鏀跺彇鍒╂伅銆佹墜缁垂鍙婁剑閲戠殑鐜伴噾 F087N as interest_and_commission_paid -- 鏀粯鍒╂伅銆佹墜缁垂鍙婁剑閲戠殑鐜伴噾 FROM ea_cashflow WHERE SECCODE = :seccode and F002V = '071001' ORDER BY ENDDATE DESC LIMIT :limit """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'limit': limit}) data = [] for row in result: # 璁$畻涓€浜涜鐢熸寚鏍? free_cash_flow = None if row.net_operating_cash_flow and row.cash_paid_for_assets: free_cash_flow = row.net_operating_cash_flow - row.cash_paid_for_assets period_data = { 'period': format_date(row.ENDDATE), 'start_date': format_date(row.STARTDATE), 'declare_date': format_date(row.DECLAREDATE), 'report_type': get_report_type(row.ENDDATE), # 缁忚惀娲诲姩鐜伴噾娴? 'operating_activities': { 'inflow': { 'cash_from_sales': format_decimal(row.cash_from_sales), 'tax_refunds': format_decimal(row.tax_refunds), 'other': format_decimal(row.other_operating_cash_received), 'total': format_decimal(row.total_operating_cash_inflow), }, 'outflow': { 'cash_for_goods': format_decimal(row.cash_paid_for_goods), 'cash_for_employees': format_decimal(row.cash_paid_to_employees), 'taxes_paid': format_decimal(row.taxes_paid), 'other': format_decimal(row.other_operating_cash_paid), 'total': format_decimal(row.total_operating_cash_outflow), }, 'net_flow': format_decimal(row.net_operating_cash_flow), }, # 鎶曡祫娲诲姩鐜伴噾娴? 'investment_activities': { 'inflow': { 'investment_recovery': format_decimal(row.cash_from_investment_recovery), 'investment_income': format_decimal(row.cash_from_investment_income), 'asset_disposal': format_decimal(row.cash_from_asset_disposal), 'subsidiary_disposal': format_decimal(row.cash_from_subsidiary_disposal), 'other': format_decimal(row.other_investment_cash_received), 'total': format_decimal(row.total_investment_cash_inflow), }, 'outflow': { 'asset_purchase': format_decimal(row.cash_paid_for_assets), 'investments': format_decimal(row.cash_paid_for_investments), 'subsidiaries': format_decimal(row.cash_paid_for_subsidiaries), 'other': format_decimal(row.other_investment_cash_paid), 'total': format_decimal(row.total_investment_cash_outflow), }, 'net_flow': format_decimal(row.net_investment_cash_flow), }, # 绛硅祫娲诲姩鐜伴噾娴? 'financing_activities': { 'inflow': { 'capital': format_decimal(row.cash_from_capital), 'borrowings': format_decimal(row.cash_from_borrowings), 'other': format_decimal(row.other_financing_cash_received), 'total': format_decimal(row.total_financing_cash_inflow), }, 'outflow': { 'debt_repayment': format_decimal(row.cash_paid_for_debt), 'distribution': format_decimal(row.cash_paid_for_distribution), 'other': format_decimal(row.other_financing_cash_paid), 'total': format_decimal(row.total_financing_cash_outflow), }, 'net_flow': format_decimal(row.net_financing_cash_flow), }, # 鐜伴噾鍙樺姩 'cash_changes': { 'exchange_rate_effect': format_decimal(row.exchange_rate_effect), 'other_effect': format_decimal(row.other_cash_effect), 'net_increase': format_decimal(row.net_cash_increase), 'beginning_balance': format_decimal(row.beginning_cash_balance), 'ending_balance': format_decimal(row.ending_cash_balance), }, # 琛ュ厖璧勬枡锛堥棿鎺ユ硶锛? 'indirect_method': { 'net_profit': format_decimal(row.net_profit), 'adjustments': { 'asset_impairment': format_decimal(row.asset_impairment), 'credit_impairment': format_decimal(row.credit_impairment), 'depreciation': format_decimal(row.depreciation), 'intangible_amortization': format_decimal(row.intangible_amortization), 'financial_expenses': format_decimal(row.financial_expenses), 'investment_loss': format_decimal(row.investment_loss), 'inventory_decrease': format_decimal(row.inventory_decrease), 'receivables_decrease': format_decimal(row.operating_receivables_decrease), 'payables_increase': format_decimal(row.operating_payables_increase), }, 'net_operating_cash_flow': format_decimal(row.net_operating_cash_flow_indirect), }, # 鍏抽敭鎸囨爣 'key_metrics': { 'free_cash_flow': format_decimal(free_cash_flow), 'cash_flow_to_profit_ratio': format_decimal( row.net_operating_cash_flow / row.net_profit) if row.net_profit and row.net_operating_cash_flow else None, 'capex': format_decimal(row.cash_paid_for_assets), } } data.append(period_data) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/financial-metrics/', methods=['GET']) def get_financial_metrics(seccode): """鑾峰彇瀹屾暣鐨勮储鍔℃寚鏍囨暟鎹?"" try: limit = request.args.get('limit', 12, type=int) query = text(""" SELECT distinct ENDDATE, STARTDATE, -- 姣忚偂鎸囨爣 F003N as eps, -- 姣忚偂鏀剁泭 F004N as basic_eps, -- 鍩烘湰姣忚偂鏀剁泭 F005N as diluted_eps, -- 绋€閲婃瘡鑲℃敹鐩? F006N as deducted_eps, -- 鎵i櫎闈炵粡甯告€ф崯鐩婃瘡鑲℃敹鐩? F007N as undistributed_profit_ps, -- 姣忚偂鏈垎閰嶅埄娑? F008N as bvps, -- 姣忚偂鍑€璧勪骇 F009N as adjusted_bvps, -- 璋冩暣鍚庢瘡鑲″噣璧勪骇 F010N as capital_reserve_ps, -- 姣忚偂璧勬湰鍏Н閲? F059N as cash_flow_ps, -- 姣忚偂鐜伴噾娴侀噺 F060N as operating_cash_flow_ps, -- 姣忚偂缁忚惀鐜伴噾娴侀噺 -- 鐩堝埄鑳藉姏鎸囨爣 F011N as operating_profit_margin, -- 钀ヤ笟鍒╂鼎鐜? F012N as tax_rate, -- 钀ヤ笟绋庨噾鐜? F013N as cost_ratio, -- 钀ヤ笟鎴愭湰鐜? F014N as roe, -- 鍑€璧勪骇鏀剁泭鐜? F066N as roe_deducted, -- 鍑€璧勪骇鏀剁泭鐜?鎵i櫎闈炵粡甯告€ф崯鐩? F067N as roe_weighted, -- 鍑€璧勪骇鏀剁泭鐜?鍔犳潈 F068N as roe_weighted_deducted, -- 鍑€璧勪骇鏀剁泭鐜?鍔犳潈(鎵i櫎闈炵粡甯告€ф崯鐩? F015N as investment_return, -- 鎶曡祫鏀剁泭鐜? F016N as roa, -- 鎬昏祫浜ф姤閰巼 F017N as net_profit_margin, -- 鍑€鍒╂鼎鐜? F078N as gross_margin, -- 姣涘埄鐜? F020N as cost_profit_ratio, -- 鎴愭湰璐圭敤鍒╂鼎鐜? -- 璐圭敤鐜囨寚鏍? F018N as admin_expense_ratio, -- 绠$悊璐圭敤鐜? F019N as financial_expense_ratio, -- 璐㈠姟璐圭敤鐜? F021N as three_expense_ratio, -- 涓夎垂姣旈噸 F091N as selling_expense, -- 閿€鍞垂鐢? F092N as admin_expense, -- 绠$悊璐圭敤 F093N as financial_expense, -- 璐㈠姟璐圭敤 F094N as three_expense_total, -- 涓夎垂鍚堣 F130N as rd_expense, -- 鐮斿彂璐圭敤 F131N as rd_expense_ratio, -- 鐮斿彂璐圭敤鐜? F132N as selling_expense_ratio, -- 閿€鍞垂鐢ㄧ巼 F133N as four_expense_ratio, -- 鍥涜垂璐圭敤鐜? -- 杩愯惀鑳藉姏鎸囨爣 F022N as receivable_turnover, -- 搴旀敹璐︽鍛ㄨ浆鐜? F023N as inventory_turnover, -- 瀛樿揣鍛ㄨ浆鐜? F024N as working_capital_turnover, -- 杩愯惀璧勯噾鍛ㄨ浆鐜? F025N as total_asset_turnover, -- 鎬昏祫浜у懆杞巼 F026N as fixed_asset_turnover, -- 鍥哄畾璧勪骇鍛ㄨ浆鐜? F027N as receivable_days, -- 搴旀敹璐︽鍛ㄨ浆澶╂暟 F028N as inventory_days, -- 瀛樿揣鍛ㄨ浆澶╂暟 F029N as current_asset_turnover, -- 娴佸姩璧勪骇鍛ㄨ浆鐜? F030N as current_asset_days, -- 娴佸姩璧勪骇鍛ㄨ浆澶╂暟 F031N as total_asset_days, -- 鎬昏祫浜у懆杞ぉ鏁? F032N as equity_turnover, -- 鑲′笢鏉冪泭鍛ㄨ浆鐜? -- 鍋垮€鸿兘鍔涙寚鏍? F041N as asset_liability_ratio, -- 璧勪骇璐熷€虹巼 F042N as current_ratio, -- 娴佸姩姣旂巼 F043N as quick_ratio, -- 閫熷姩姣旂巼 F044N as cash_ratio, -- 鐜伴噾姣旂巼 F045N as interest_coverage, -- 鍒╂伅淇濋殰鍊嶆暟 F049N as conservative_quick_ratio, -- 淇濆畧閫熷姩姣旂巼 F050N as cash_to_maturity_debt_ratio, -- 鐜伴噾鍒版湡鍊哄姟姣旂巼 F051N as tangible_asset_debt_ratio, -- 鏈夊舰璧勪骇鍑€鍊煎€哄姟鐜? -- 鎴愰暱鑳藉姏鎸囨爣 F052N as revenue_growth, -- 钀ヤ笟鏀跺叆澧為暱鐜? F053N as net_profit_growth, -- 鍑€鍒╂鼎澧為暱鐜? F054N as equity_growth, -- 鍑€璧勪骇澧為暱鐜? F055N as fixed_asset_growth, -- 鍥哄畾璧勪骇澧為暱鐜? F056N as total_asset_growth, -- 鎬昏祫浜у闀跨巼 F057N as investment_income_growth, -- 鎶曡祫鏀剁泭澧為暱鐜? F058N as operating_profit_growth, -- 钀ヤ笟鍒╂鼎澧為暱鐜? F141N as deducted_profit_growth, -- 鎵i櫎闈炵粡甯告€ф崯鐩婂悗鐨勫噣鍒╂鼎鍚屾瘮鍙樺寲鐜? F142N as parent_profit_growth, -- 褰掑睘浜庢瘝鍏徃鎵€鏈夎€呯殑鍑€鍒╂鼎鍚屾瘮鍙樺寲鐜? F143N as operating_cash_flow_growth, -- 缁忚惀娲诲姩浜х敓鐨勭幇閲戞祦鍑€棰濆悓姣斿彉鍖栫巼 -- 鐜伴噾娴侀噺鎸囨爣 F061N as operating_cash_to_short_debt, -- 缁忚惀鍑€鐜伴噾姣旂巼锛堢煭鏈熷€哄姟锛? F062N as operating_cash_to_total_debt, -- 缁忚惀鍑€鐜伴噾姣旂巼锛堝叏閮ㄥ€哄姟锛? F063N as operating_cash_to_profit_ratio, -- 缁忚惀娲诲姩鐜伴噾鍑€娴侀噺涓庡噣鍒╂鼎姣旂巼 F064N as cash_revenue_ratio, -- 钀ヤ笟鏀跺叆鐜伴噾鍚噺 F065N as cash_recovery_rate, -- 鍏ㄩ儴璧勪骇鐜伴噾鍥炴敹鐜? F082N as cash_to_profit_ratio, -- 鍑€鍒╁惈閲戦噺 -- 璐㈠姟缁撴瀯鎸囨爣 F033N as current_asset_ratio, -- 娴佸姩璧勪骇姣旂巼 F034N as cash_ratio_structure, -- 璐у竵璧勯噾姣旂巼 F036N as inventory_ratio, -- 瀛樿揣姣旂巼 F037N as fixed_asset_ratio, -- 鍥哄畾璧勪骇姣旂巼 F038N as liability_structure_ratio, -- 璐熷€虹粨鏋勬瘮 F039N as equity_ratio, -- 浜ф潈姣旂巼 F040N as net_asset_ratio, -- 鍑€璧勪骇姣旂巼 F046N as working_capital, -- 钀ヨ繍璧勯噾 F047N as non_current_liability_ratio, -- 闈炴祦鍔ㄨ礋鍊烘瘮鐜? F048N as current_liability_ratio, -- 娴佸姩璐熷€烘瘮鐜? -- 闈炵粡甯告€ф崯鐩? F076N as deducted_net_profit, -- 鎵i櫎闈炵粡甯告€ф崯鐩婂悗鐨勫噣鍒╂鼎 F077N as non_recurring_items, -- 闈炵粡甯告€ф崯鐩婂悎璁? F083N as non_recurring_ratio, -- 闈炵粡甯告€ф崯鐩婂崰姣? -- 缁煎悎鎸囨爣 F085N as ebit, -- 鍩烘湰鑾峰埄鑳藉姏(EBIT) F086N as receivable_to_asset_ratio, -- 搴旀敹璐︽鍗犳瘮 F087N as inventory_to_asset_ratio -- 瀛樿揣鍗犳瘮 FROM ea_financialindex WHERE SECCODE = :seccode ORDER BY ENDDATE DESC LIMIT :limit """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'limit': limit}) data = [] for row in result: period_data = { 'period': format_date(row.ENDDATE), 'start_date': format_date(row.STARTDATE), 'report_type': get_report_type(row.ENDDATE), # 姣忚偂鎸囨爣 'per_share_metrics': { 'eps': format_decimal(row.eps), 'basic_eps': format_decimal(row.basic_eps), 'diluted_eps': format_decimal(row.diluted_eps), 'deducted_eps': format_decimal(row.deducted_eps), 'bvps': format_decimal(row.bvps), 'adjusted_bvps': format_decimal(row.adjusted_bvps), 'undistributed_profit_ps': format_decimal(row.undistributed_profit_ps), 'capital_reserve_ps': format_decimal(row.capital_reserve_ps), 'cash_flow_ps': format_decimal(row.cash_flow_ps), 'operating_cash_flow_ps': format_decimal(row.operating_cash_flow_ps), }, # 鐩堝埄鑳藉姏 'profitability': { 'roe': format_decimal(row.roe), 'roe_deducted': format_decimal(row.roe_deducted), 'roe_weighted': format_decimal(row.roe_weighted), 'roa': format_decimal(row.roa), 'gross_margin': format_decimal(row.gross_margin), 'net_profit_margin': format_decimal(row.net_profit_margin), 'operating_profit_margin': format_decimal(row.operating_profit_margin), 'cost_profit_ratio': format_decimal(row.cost_profit_ratio), 'ebit': format_decimal(row.ebit), }, # 璐圭敤鐜? 'expense_ratios': { 'selling_expense_ratio': format_decimal(row.selling_expense_ratio), 'admin_expense_ratio': format_decimal(row.admin_expense_ratio), 'financial_expense_ratio': format_decimal(row.financial_expense_ratio), 'rd_expense_ratio': format_decimal(row.rd_expense_ratio), 'three_expense_ratio': format_decimal(row.three_expense_ratio), 'four_expense_ratio': format_decimal(row.four_expense_ratio), }, # 杩愯惀鑳藉姏 'operational_efficiency': { 'receivable_turnover': format_decimal(row.receivable_turnover), 'receivable_days': format_decimal(row.receivable_days), 'inventory_turnover': format_decimal(row.inventory_turnover), 'inventory_days': format_decimal(row.inventory_days), 'total_asset_turnover': format_decimal(row.total_asset_turnover), 'total_asset_days': format_decimal(row.total_asset_days), 'fixed_asset_turnover': format_decimal(row.fixed_asset_turnover), 'current_asset_turnover': format_decimal(row.current_asset_turnover), 'working_capital_turnover': format_decimal(row.working_capital_turnover), }, # 鍋垮€鸿兘鍔? 'solvency': { 'current_ratio': format_decimal(row.current_ratio), 'quick_ratio': format_decimal(row.quick_ratio), 'cash_ratio': format_decimal(row.cash_ratio), 'conservative_quick_ratio': format_decimal(row.conservative_quick_ratio), 'asset_liability_ratio': format_decimal(row.asset_liability_ratio), 'interest_coverage': format_decimal(row.interest_coverage), 'cash_to_maturity_debt_ratio': format_decimal(row.cash_to_maturity_debt_ratio), 'tangible_asset_debt_ratio': format_decimal(row.tangible_asset_debt_ratio), }, # 鎴愰暱鑳藉姏 'growth': { 'revenue_growth': format_decimal(row.revenue_growth), 'net_profit_growth': format_decimal(row.net_profit_growth), 'deducted_profit_growth': format_decimal(row.deducted_profit_growth), 'parent_profit_growth': format_decimal(row.parent_profit_growth), 'equity_growth': format_decimal(row.equity_growth), 'total_asset_growth': format_decimal(row.total_asset_growth), 'fixed_asset_growth': format_decimal(row.fixed_asset_growth), 'operating_profit_growth': format_decimal(row.operating_profit_growth), 'operating_cash_flow_growth': format_decimal(row.operating_cash_flow_growth), }, # 鐜伴噾娴侀噺 'cash_flow_quality': { 'operating_cash_to_profit_ratio': format_decimal(row.operating_cash_to_profit_ratio), 'cash_to_profit_ratio': format_decimal(row.cash_to_profit_ratio), 'cash_revenue_ratio': format_decimal(row.cash_revenue_ratio), 'cash_recovery_rate': format_decimal(row.cash_recovery_rate), 'operating_cash_to_short_debt': format_decimal(row.operating_cash_to_short_debt), 'operating_cash_to_total_debt': format_decimal(row.operating_cash_to_total_debt), }, # 璐㈠姟缁撴瀯 'financial_structure': { 'current_asset_ratio': format_decimal(row.current_asset_ratio), 'fixed_asset_ratio': format_decimal(row.fixed_asset_ratio), 'inventory_ratio': format_decimal(row.inventory_ratio), 'receivable_to_asset_ratio': format_decimal(row.receivable_to_asset_ratio), 'current_liability_ratio': format_decimal(row.current_liability_ratio), 'non_current_liability_ratio': format_decimal(row.non_current_liability_ratio), 'equity_ratio': format_decimal(row.equity_ratio), }, # 闈炵粡甯告€ф崯鐩? 'non_recurring': { 'deducted_net_profit': format_decimal(row.deducted_net_profit), 'non_recurring_items': format_decimal(row.non_recurring_items), 'non_recurring_ratio': format_decimal(row.non_recurring_ratio), } } data.append(period_data) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/main-business/', methods=['GET']) def get_main_business(seccode): """鑾峰彇涓昏惀涓氬姟鏋勬垚鏁版嵁锛堝寘鎷骇鍝佸拰琛屼笟鍒嗙被锛?"" try: limit = request.args.get('periods', 4, type=int) # 鑾峰彇鏈€杩戝嚑鏈熺殑鏁版嵁 # 鑾峰彇鏈€杩戠殑鎶ュ憡鏈? period_query = text(""" SELECT DISTINCT ENDDATE FROM ea_mainproduct WHERE SECCODE = :seccode ORDER BY ENDDATE DESC LIMIT :limit """) with engine.connect() as conn: periods = conn.execute(period_query, {'seccode': seccode, 'limit': limit}).fetchall() # 浜у搧鍒嗙被鏁版嵁 product_data = [] for period in periods: query = text(""" SELECT distinct ENDDATE, F002V as category, F003V as content, F005N as revenue, F006N as cost, F007N as profit FROM ea_mainproduct WHERE SECCODE = :seccode AND ENDDATE = :enddate ORDER BY F005N DESC """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'enddate': period[0]}) # Convert result to list to allow multiple iterations rows = list(result) period_products = [] total_revenue = 0 for row in rows: if row.revenue: total_revenue += row.revenue for row in rows: product = { 'category': row.category, 'content': row.content, 'revenue': format_decimal(row.revenue), 'cost': format_decimal(row.cost), 'profit': format_decimal(row.profit), 'profit_margin': format_decimal( (row.profit / row.revenue * 100) if row.revenue and row.profit else None), 'revenue_ratio': format_decimal( (row.revenue / total_revenue * 100) if total_revenue and row.revenue else None) } period_products.append(product) if period_products: product_data.append({ 'period': format_date(period[0]), 'report_type': get_report_type(period[0]), 'total_revenue': format_decimal(total_revenue), 'products': period_products }) # 琛屼笟鍒嗙被鏁版嵁锛堜粠ea_mainind琛級 industry_data = [] for period in periods: query = text(""" SELECT distinct ENDDATE, F002V as business_content, F007N as main_revenue, F008N as main_cost, F009N as main_profit, F010N as gross_margin, F012N as revenue_ratio FROM ea_mainind WHERE SECCODE = :seccode AND ENDDATE = :enddate ORDER BY F007N DESC """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'enddate': period[0]}) # Convert result to list to allow multiple iterations rows = list(result) period_industries = [] for row in rows: industry = { 'content': row.business_content, 'revenue': format_decimal(row.main_revenue), 'cost': format_decimal(row.main_cost), 'profit': format_decimal(row.main_profit), 'gross_margin': format_decimal(row.gross_margin), 'revenue_ratio': format_decimal(row.revenue_ratio) } period_industries.append(industry) if period_industries: industry_data.append({ 'period': format_date(period[0]), 'report_type': get_report_type(period[0]), 'industries': period_industries }) return jsonify({ 'success': True, 'data': { 'product_classification': product_data, 'industry_classification': industry_data } }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/forecast/', methods=['GET']) def get_forecast(seccode): """鑾峰彇涓氱哗棰勫憡鍜岄鎶湶鏃堕棿""" try: # 鑾峰彇涓氱哗棰勫憡 forecast_query = text(""" SELECT distinct DECLAREDATE, F001D as report_date, F002V as forecast_type_code, F003V as forecast_type, F004V as content, F005V as reason, F006C as latest_flag, F007N as profit_lower, F008N as profit_upper, F009N as change_lower, F010N as change_upper, UPDATE_DATE FROM ea_forecast WHERE SECCODE = :seccode ORDER BY F001D DESC, UPDATE_DATE DESC LIMIT 10 """) with engine.connect() as conn: forecast_result = conn.execute(forecast_query, {'seccode': seccode}) forecast_data = [] for row in forecast_result: forecast = { 'declare_date': format_date(row.DECLAREDATE), 'report_date': format_date(row.report_date), 'report_type': get_report_type(row.report_date), 'forecast_type': row.forecast_type, 'forecast_type_code': row.forecast_type_code, 'content': row.content, 'reason': row.reason, 'is_latest': row.latest_flag == 'T', 'profit_range': { 'lower': format_decimal(row.profit_lower), 'upper': format_decimal(row.profit_upper), }, 'change_range': { 'lower': format_decimal(row.change_lower), 'upper': format_decimal(row.change_upper), }, 'update_date': format_date(row.UPDATE_DATE) } forecast_data.append(forecast) # 鑾峰彇棰勬姭闇叉椂闂? pretime_query = text(""" SELECT distinct F001D as report_period, F002D as scheduled_date, F003D as change_date_1, F004D as change_date_2, F005D as change_date_3, F006D as actual_date, F007D as change_date_4, F008D as change_date_5, UPDATE_DATE FROM ea_pretime WHERE SECCODE = :seccode ORDER BY F001D DESC LIMIT 8 """) with engine.connect() as conn: pretime_result = conn.execute(pretime_query, {'seccode': seccode}) pretime_data = [] for row in pretime_result: # 鏀堕泦鎵€鏈夊彉鏇存棩鏈? change_dates = [] for date in [row.change_date_1, row.change_date_2, row.change_date_3, row.change_date_4, row.change_date_5]: if date: change_dates.append(format_date(date)) pretime = { 'report_period': format_date(row.report_period), 'report_type': get_report_type(row.report_period), 'scheduled_date': format_date(row.scheduled_date), 'actual_date': format_date(row.actual_date), 'change_dates': change_dates, 'update_date': format_date(row.UPDATE_DATE), 'status': 'completed' if row.actual_date else 'pending' } pretime_data.append(pretime) return jsonify({ 'success': True, 'data': { 'forecasts': forecast_data, 'disclosure_schedule': pretime_data } }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/industry-rank/', methods=['GET']) def get_industry_rank(seccode): """鑾峰彇琛屼笟鎺掑悕鏁版嵁""" try: limit = request.args.get('limit', 4, type=int) query = text(""" SELECT distinct F001V as industry_level, F002V as level_description, F003D as report_date, INDNAME as industry_name, -- 姣忚偂鏀剁泭 F004N as eps, F005N as eps_industry_avg, F006N as eps_rank, -- 鎵i櫎鍚庢瘡鑲℃敹鐩? F007N as deducted_eps, F008N as deducted_eps_industry_avg, F009N as deducted_eps_rank, -- 姣忚偂鍑€璧勪骇 F010N as bvps, F011N as bvps_industry_avg, F012N as bvps_rank, -- 鍑€璧勪骇鏀剁泭鐜? F013N as roe, F014N as roe_industry_avg, F015N as roe_rank, -- 姣忚偂鏈垎閰嶅埄娑? F016N as undistributed_profit_ps, F017N as undistributed_profit_ps_industry_avg, F018N as undistributed_profit_ps_rank, -- 姣忚偂缁忚惀鐜伴噾娴侀噺 F019N as operating_cash_flow_ps, F020N as operating_cash_flow_ps_industry_avg, F021N as operating_cash_flow_ps_rank, -- 钀ヤ笟鏀跺叆澧為暱鐜? F022N as revenue_growth, F023N as revenue_growth_industry_avg, F024N as revenue_growth_rank, -- 鍑€鍒╂鼎澧為暱鐜? F025N as profit_growth, F026N as profit_growth_industry_avg, F027N as profit_growth_rank, -- 钀ヤ笟鍒╂鼎鐜? F028N as operating_margin, F029N as operating_margin_industry_avg, F030N as operating_margin_rank, -- 璧勪骇璐熷€虹巼 F031N as debt_ratio, F032N as debt_ratio_industry_avg, F033N as debt_ratio_rank, -- 搴旀敹璐︽鍛ㄨ浆鐜? F034N as receivable_turnover, F035N as receivable_turnover_industry_avg, F036N as receivable_turnover_rank, UPDATE_DATE FROM ea_finindexrank WHERE SECCODE = :seccode ORDER BY F003D DESC, F001V ASC LIMIT :limit_total """) # 鑾峰彇澶氫釜鎶ュ憡鏈熺殑鏁版嵁 with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'limit_total': limit * 4}) # 鎸夋姤鍛婃湡鍜岃涓氱骇鍒粍缁囨暟鎹? data_by_period = {} for row in result: period = format_date(row.report_date) if period not in data_by_period: data_by_period[period] = [] rank_data = { 'industry_level': row.industry_level, 'level_description': row.level_description, 'industry_name': row.industry_name, 'metrics': { 'eps': { 'value': format_decimal(row.eps), 'industry_avg': format_decimal(row.eps_industry_avg), 'rank': int(row.eps_rank) if row.eps_rank else None }, 'deducted_eps': { 'value': format_decimal(row.deducted_eps), 'industry_avg': format_decimal(row.deducted_eps_industry_avg), 'rank': int(row.deducted_eps_rank) if row.deducted_eps_rank else None }, 'bvps': { 'value': format_decimal(row.bvps), 'industry_avg': format_decimal(row.bvps_industry_avg), 'rank': int(row.bvps_rank) if row.bvps_rank else None }, 'roe': { 'value': format_decimal(row.roe), 'industry_avg': format_decimal(row.roe_industry_avg), 'rank': int(row.roe_rank) if row.roe_rank else None }, 'operating_cash_flow_ps': { 'value': format_decimal(row.operating_cash_flow_ps), 'industry_avg': format_decimal(row.operating_cash_flow_ps_industry_avg), 'rank': int(row.operating_cash_flow_ps_rank) if row.operating_cash_flow_ps_rank else None }, 'revenue_growth': { 'value': format_decimal(row.revenue_growth), 'industry_avg': format_decimal(row.revenue_growth_industry_avg), 'rank': int(row.revenue_growth_rank) if row.revenue_growth_rank else None }, 'profit_growth': { 'value': format_decimal(row.profit_growth), 'industry_avg': format_decimal(row.profit_growth_industry_avg), 'rank': int(row.profit_growth_rank) if row.profit_growth_rank else None }, 'operating_margin': { 'value': format_decimal(row.operating_margin), 'industry_avg': format_decimal(row.operating_margin_industry_avg), 'rank': int(row.operating_margin_rank) if row.operating_margin_rank else None }, 'debt_ratio': { 'value': format_decimal(row.debt_ratio), 'industry_avg': format_decimal(row.debt_ratio_industry_avg), 'rank': int(row.debt_ratio_rank) if row.debt_ratio_rank else None }, 'receivable_turnover': { 'value': format_decimal(row.receivable_turnover), 'industry_avg': format_decimal(row.receivable_turnover_industry_avg), 'rank': int(row.receivable_turnover_rank) if row.receivable_turnover_rank else None } } } data_by_period[period].append(rank_data) # 杞崲涓哄垪琛ㄦ牸寮? data = [] for period, ranks in data_by_period.items(): data.append({ 'period': period, 'report_type': get_report_type(period), 'rankings': ranks }) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/financial/comparison/', methods=['GET']) def get_period_comparison(seccode): """鑾峰彇涓嶅悓鎶ュ憡鏈熺殑瀵规瘮鏁版嵁""" try: periods = request.args.get('periods', 8, type=int) # 鑾峰彇澶氭湡璐㈠姟鏁版嵁杩涜瀵规瘮 query = text(""" SELECT distinct fi.ENDDATE, fi.F089N as revenue, fi.F101N as net_profit, fi.F102N as parent_net_profit, fi.F078N as gross_margin, fi.F017N as net_margin, fi.F014N as roe, fi.F016N as roa, fi.F052N as revenue_growth, fi.F053N as profit_growth, fi.F003N as eps, fi.F060N as operating_cash_flow_ps, fi.F042N as current_ratio, fi.F041N as debt_ratio, fi.F105N as operating_cash_flow, fi.F118N as total_assets, fi.F121N as total_liabilities, fi.F128N as total_equity FROM ea_financialindex fi WHERE fi.SECCODE = :seccode ORDER BY fi.ENDDATE DESC LIMIT :periods """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'periods': periods}) data = [] for row in result: period_data = { 'period': format_date(row.ENDDATE), 'report_type': get_report_type(row.ENDDATE), 'performance': { 'revenue': format_decimal(row.revenue), 'net_profit': format_decimal(row.net_profit), 'parent_net_profit': format_decimal(row.parent_net_profit), 'operating_cash_flow': format_decimal(row.operating_cash_flow), }, 'profitability': { 'gross_margin': format_decimal(row.gross_margin), 'net_margin': format_decimal(row.net_margin), 'roe': format_decimal(row.roe), 'roa': format_decimal(row.roa), }, 'growth': { 'revenue_growth': format_decimal(row.revenue_growth), 'profit_growth': format_decimal(row.profit_growth), }, 'per_share': { 'eps': format_decimal(row.eps), 'operating_cash_flow_ps': format_decimal(row.operating_cash_flow_ps), }, 'financial_health': { 'current_ratio': format_decimal(row.current_ratio), 'debt_ratio': format_decimal(row.debt_ratio), 'total_assets': format_decimal(row.total_assets), 'total_liabilities': format_decimal(row.total_liabilities), 'total_equity': format_decimal(row.total_equity), } } data.append(period_data) # 璁$畻鍚屾瘮鍜岀幆姣斿彉鍖? for i in range(len(data)): if i > 0: # 鐜瘮 data[i]['qoq_changes'] = { 'revenue': calculate_change(data[i]['performance']['revenue'], data[i - 1]['performance']['revenue']), 'net_profit': calculate_change(data[i]['performance']['net_profit'], data[i - 1]['performance']['net_profit']), } # 鍚屾瘮锛堟壘鍒板幓骞村悓鏈燂級 current_period = data[i]['period'] yoy_period = get_yoy_period(current_period) for j in range(len(data)): if data[j]['period'] == yoy_period: data[i]['yoy_changes'] = { 'revenue': calculate_change(data[i]['performance']['revenue'], data[j]['performance']['revenue']), 'net_profit': calculate_change(data[i]['performance']['net_profit'], data[j]['performance']['net_profit']), } break return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 # 杈呭姪鍑芥暟 def calculate_change(current, previous): """璁$畻鍙樺寲鐜?"" if previous and current: return format_decimal((current - previous) / abs(previous) * 100) return None def get_yoy_period(date_str): """鑾峰彇鍘诲勾鍚屾湡""" if not date_str: return None try: date = datetime.strptime(date_str, '%Y-%m-%d') yoy_date = date.replace(year=date.year - 1) return yoy_date.strftime('%Y-%m-%d') except: return None @app.route('/api/market/trade/', methods=['GET']) def get_trade_data(seccode): """鑾峰彇鑲$エ浜ゆ槗鏁版嵁锛堟棩K绾匡級""" try: days = request.args.get('days', 60, type=int) end_date = request.args.get('end_date', datetime.now().strftime('%Y-%m-%d')) query = text(""" SELECT TRADEDATE, SECNAME, F002N as pre_close, F003N as open, F004N as volume, F005N as high, F006N as low, F007N as close, F008N as trades_count, F009N as change_amount, F010N as change_percent, F011N as amount, F012N as turnover_rate, F013N as amplitude, F020N as total_shares, F021N as float_shares, F026N as pe_ratio FROM ea_trade WHERE SECCODE = :seccode AND TRADEDATE <= :end_date ORDER BY TRADEDATE DESC LIMIT :days """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'end_date': end_date, 'days': days}) data = [] for row in result: data.append({ 'date': format_date(row.TRADEDATE), 'stock_name': row.SECNAME, 'open': format_decimal(row.open), 'high': format_decimal(row.high), 'low': format_decimal(row.low), 'close': format_decimal(row.close), 'pre_close': format_decimal(row.pre_close), 'volume': format_decimal(row.volume), 'amount': format_decimal(row.amount), 'change_amount': format_decimal(row.change_amount), 'change_percent': format_decimal(row.change_percent), 'turnover_rate': format_decimal(row.turnover_rate), 'amplitude': format_decimal(row.amplitude), 'trades_count': format_decimal(row.trades_count), 'pe_ratio': format_decimal(row.pe_ratio), 'total_shares': format_decimal(row.total_shares), 'float_shares': format_decimal(row.float_shares), }) # 鍊掑簭锛岃鏈€鏃╃殑鏃ユ湡鍦ㄥ墠 data.reverse() # 璁$畻缁熻鏁版嵁 if data: prices = [d['close'] for d in data if d['close']] stats = { 'highest': max(prices) if prices else None, 'lowest': min(prices) if prices else None, 'average': sum(prices) / len(prices) if prices else None, 'latest_price': data[-1]['close'] if data else None, 'total_volume': sum([d['volume'] for d in data if d['volume']]) if data else None, 'total_amount': sum([d['amount'] for d in data if d['amount']]) if data else None, } else: stats = {} return jsonify({ 'success': True, 'data': data, 'stats': stats }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/trade/batch', methods=['POST']) def get_batch_trade_data(): """鎵归噺鑾峰彇澶氬彧鑲$エ鐨勪氦鏄撴暟鎹紙鏃绾匡級 璇锋眰浣擄細{ codes: string[], // 鑲$エ浠g爜鍒楄〃锛?浣嶄唬鐮侊級 days: number // 鑾峰彇澶╂暟锛岄粯璁? } 杩斿洖锛歿 success: true, data: { [seccode]: { data: [], stats: {} } } } """ try: data = request.json codes = data.get('codes', []) days = data.get('days', 1) end_date = data.get('end_date', datetime.now().strftime('%Y-%m-%d')) if not codes: return jsonify({'success': False, 'error': '璇锋彁渚涜偂绁ㄤ唬鐮佸垪琛?}), 400 if len(codes) > 100: return jsonify({'success': False, 'error': '鍗曟鏈€澶氭煡璇?00鍙偂绁?}), 400 # 鏋勫缓鎵归噺鏌ヨ placeholders = ','.join([f':code{i}' for i in range(len(codes))]) params = {f'code{i}': code for i, code in enumerate(codes)} params['end_date'] = end_date params['days'] = days query = text(f""" SELECT SECCODE, TRADEDATE, SECNAME, F002N as pre_close, F003N as open, F004N as volume, F005N as high, F006N as low, F007N as close, F008N as trades_count, F009N as change_amount, F010N as change_percent, F011N as amount, F012N as turnover_rate, F013N as amplitude FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE <= :end_date ORDER BY SECCODE, TRADEDATE DESC """) with engine.connect() as conn: result = conn.execute(query, params) rows = result.fetchall() # 鎸夎偂绁ㄤ唬鐮佸垎缁勶紝姣忓彧鑲$エ鍙彇鏈€杩慛澶? stock_data = {} stock_counts = {} for row in rows: seccode = row.SECCODE if seccode not in stock_data: stock_data[seccode] = [] stock_counts[seccode] = 0 # 鍙彇鎸囧畾澶╂暟鐨勬暟鎹? if stock_counts[seccode] < days: stock_data[seccode].append({ 'date': format_date(row.TRADEDATE), 'stock_name': row.SECNAME, 'open': format_decimal(row.open), 'high': format_decimal(row.high), 'low': format_decimal(row.low), 'close': format_decimal(row.close), 'pre_close': format_decimal(row.pre_close), 'volume': format_decimal(row.volume), 'amount': format_decimal(row.amount), 'change_amount': format_decimal(row.change_amount), 'change_percent': format_decimal(row.change_percent), 'turnover_rate': format_decimal(row.turnover_rate), 'amplitude': format_decimal(row.amplitude), 'trades_count': format_decimal(row.trades_count), }) stock_counts[seccode] += 1 # 鍊掑簭姣忓彧鑲$エ鐨勬暟鎹紙璁╂渶鏃╃殑鏃ユ湡鍦ㄥ墠锛? results = {} for seccode, data_list in stock_data.items(): data_list.reverse() results[seccode] = { 'data': data_list, 'stats': { 'latest_price': data_list[-1]['close'] if data_list else None, 'change_percent': data_list[-1]['change_percent'] if data_list else None, } if data_list else {} } # 涓烘病鏈夋暟鎹殑鑲$エ杩斿洖绌虹粨鏋? for code in codes: if code not in results: results[code] = {'data': [], 'stats': {}} return jsonify({ 'success': True, 'data': results }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/funding/', methods=['GET']) def get_funding_data(seccode): """鑾峰彇铻嶈祫铻嶅埜鏁版嵁""" try: days = request.args.get('days', 30, type=int) query = text(""" SELECT TRADEDATE, SECNAME, F001N as financing_balance, F002N as financing_buy, F003N as financing_repay, F004N as securities_balance, F006N as securities_sell, F007N as securities_repay, F008N as securities_balance_amount, F009N as total_balance FROM ea_funding WHERE SECCODE = :seccode ORDER BY TRADEDATE DESC LIMIT :days """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'days': days}) data = [] for row in result: data.append({ 'date': format_date(row.TRADEDATE), 'stock_name': row.SECNAME, 'financing': { 'balance': format_decimal(row.financing_balance), 'buy': format_decimal(row.financing_buy), 'repay': format_decimal(row.financing_repay), 'net': format_decimal( row.financing_buy - row.financing_repay) if row.financing_buy and row.financing_repay else None }, 'securities': { 'balance': format_decimal(row.securities_balance), 'sell': format_decimal(row.securities_sell), 'repay': format_decimal(row.securities_repay), 'balance_amount': format_decimal(row.securities_balance_amount) }, 'total_balance': format_decimal(row.total_balance) }) data.reverse() return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/bigdeal/', methods=['GET']) def get_bigdeal_data(seccode): """鑾峰彇澶у畻浜ゆ槗鏁版嵁""" try: days = request.args.get('days', 30, type=int) query = text(""" SELECT TRADEDATE, SECNAME, F001V as exchange, F002V as buyer_dept, F003V as seller_dept, F004N as price, F005N as volume, F006N as amount, F007N as seq_no FROM ea_bigdeal WHERE SECCODE = :seccode ORDER BY TRADEDATE DESC, F007N LIMIT :days """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode, 'days': days}) data = [] for row in result: data.append({ 'date': format_date(row.TRADEDATE), 'stock_name': row.SECNAME, 'exchange': row.exchange, 'buyer_dept': row.buyer_dept, 'seller_dept': row.seller_dept, 'price': format_decimal(row.price), 'volume': format_decimal(row.volume), 'amount': format_decimal(row.amount), 'seq_no': int(row.seq_no) if row.seq_no else None }) # 鎸夋棩鏈熷垎缁勭粺璁? daily_stats = {} for item in data: date = item['date'] if date not in daily_stats: daily_stats[date] = { 'date': date, 'count': 0, 'total_volume': 0, 'total_amount': 0, 'avg_price': 0, 'deals': [] } daily_stats[date]['count'] += 1 daily_stats[date]['total_volume'] += item['volume'] or 0 daily_stats[date]['total_amount'] += item['amount'] or 0 daily_stats[date]['deals'].append(item) # 璁$畻骞冲潎浠锋牸 for date in daily_stats: if daily_stats[date]['total_volume'] > 0: daily_stats[date]['avg_price'] = daily_stats[date]['total_amount'] / daily_stats[date]['total_volume'] return jsonify({ 'success': True, 'data': data, 'daily_stats': list(daily_stats.values()) }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/unusual/', methods=['GET']) def get_unusual_data(seccode): """鑾峰彇榫欒檸姒滄暟鎹?"" try: days = request.args.get('days', 30, type=int) query = text(""" SELECT TRADEDATE, SECNAME, F001V as info_type_code, F002V as info_type, F003C as trade_type, F004N as rank_no, F005V as dept_name, F006N as buy_amount, F007N as sell_amount, F008N as net_amount FROM ea_unusual WHERE SECCODE = :seccode ORDER BY TRADEDATE DESC, F004N LIMIT 100 """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode}) data = [] for row in result: data.append({ 'date': format_date(row.TRADEDATE), 'stock_name': row.SECNAME, 'info_type': row.info_type, 'info_type_code': row.info_type_code, 'trade_type': 'buy' if row.trade_type == 'B' else 'sell' if row.trade_type == 'S' else 'unknown', 'rank': int(row.rank_no) if row.rank_no else None, 'dept_name': row.dept_name, 'buy_amount': format_decimal(row.buy_amount), 'sell_amount': format_decimal(row.sell_amount), 'net_amount': format_decimal(row.net_amount) }) # 鎸夋棩鏈熷垎缁? grouped_data = {} for item in data: date = item['date'] if date not in grouped_data: grouped_data[date] = { 'date': date, 'info_types': set(), 'buyers': [], 'sellers': [], 'total_buy': 0, 'total_sell': 0, 'net_amount': 0 } grouped_data[date]['info_types'].add(item['info_type']) if item['trade_type'] == 'buy': grouped_data[date]['buyers'].append(item) grouped_data[date]['total_buy'] += item['buy_amount'] or 0 elif item['trade_type'] == 'sell': grouped_data[date]['sellers'].append(item) grouped_data[date]['total_sell'] += item['sell_amount'] or 0 grouped_data[date]['net_amount'] = grouped_data[date]['total_buy'] - grouped_data[date]['total_sell'] # 杞崲set涓簂ist for date in grouped_data: grouped_data[date]['info_types'] = list(grouped_data[date]['info_types']) return jsonify({ 'success': True, 'data': data, 'grouped_data': list(grouped_data.values()) }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/pledge/', methods=['GET']) def get_pledge_data(seccode): """鑾峰彇鑲℃潈璐ㄦ娂鏁版嵁""" try: query = text(""" SELECT ENDDATE, STARTDATE, SECNAME, F001N as unrestricted_pledge, F002N as restricted_pledge, F003N as total_shares_a, F004N as pledge_count, F005N as pledge_ratio FROM ea_pledgeratio WHERE SECCODE = :seccode ORDER BY ENDDATE DESC LIMIT 12 """) with engine.connect() as conn: result = conn.execute(query, {'seccode': seccode}) data = [] for row in result: total_pledge = (row.unrestricted_pledge or 0) + (row.restricted_pledge or 0) data.append({ 'end_date': format_date(row.ENDDATE), 'start_date': format_date(row.STARTDATE), 'stock_name': row.SECNAME, 'unrestricted_pledge': format_decimal(row.unrestricted_pledge), 'restricted_pledge': format_decimal(row.restricted_pledge), 'total_pledge': format_decimal(total_pledge), 'total_shares': format_decimal(row.total_shares_a), 'pledge_count': int(row.pledge_count) if row.pledge_count else None, 'pledge_ratio': format_decimal(row.pledge_ratio) }) return jsonify({ 'success': True, 'data': data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/summary/', methods=['GET']) def get_market_summary(seccode): """鑾峰彇甯傚満鏁版嵁姹囨€?"" try: # 鑾峰彇鏈€鏂颁氦鏄撴暟鎹? trade_query = text(""" SELECT * FROM ea_trade WHERE SECCODE = :seccode ORDER BY TRADEDATE DESC LIMIT 1 """) # 鑾峰彇鏈€鏂拌瀺璧勮瀺鍒告暟鎹? funding_query = text(""" SELECT * FROM ea_funding WHERE SECCODE = :seccode ORDER BY TRADEDATE DESC LIMIT 1 """) # 鑾峰彇鏈€鏂拌川鎶兼暟鎹? pledge_query = text(""" SELECT * FROM ea_pledgeratio WHERE SECCODE = :seccode ORDER BY ENDDATE DESC LIMIT 1 """) with engine.connect() as conn: trade_result = conn.execute(trade_query, {'seccode': seccode}).fetchone() with engine.connect() as conn: funding_result = conn.execute(funding_query, {'seccode': seccode}).fetchone() with engine.connect() as conn: pledge_result = conn.execute(pledge_query, {'seccode': seccode}).fetchone() summary = { 'stock_code': seccode, 'stock_name': trade_result.SECNAME if trade_result else None, 'latest_trade': { 'date': format_date(trade_result.TRADEDATE) if trade_result else None, 'close': format_decimal(trade_result.F007N) if trade_result else None, 'change_percent': format_decimal(trade_result.F010N) if trade_result else None, 'volume': format_decimal(trade_result.F004N) if trade_result else None, 'amount': format_decimal(trade_result.F011N) if trade_result else None, 'pe_ratio': format_decimal(trade_result.F026N) if trade_result else None, 'turnover_rate': format_decimal(trade_result.F012N) if trade_result else None, } if trade_result else None, 'latest_funding': { 'date': format_date(funding_result.TRADEDATE) if funding_result else None, 'financing_balance': format_decimal(funding_result.F001N) if funding_result else None, 'securities_balance': format_decimal(funding_result.F004N) if funding_result else None, 'total_balance': format_decimal(funding_result.F009N) if funding_result else None, } if funding_result else None, 'latest_pledge': { 'date': format_date(pledge_result.ENDDATE) if pledge_result else None, 'pledge_ratio': format_decimal(pledge_result.F005N) if pledge_result else None, 'pledge_count': int(pledge_result.F004N) if pledge_result and pledge_result.F004N else None, } if pledge_result else None } return jsonify({ 'success': True, 'data': summary }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/stocks/search', methods=['GET']) def search_stocks(): """鎼滅储鑲$エ鍜屾寚鏁帮紙鏀寔浠g爜銆佸悕绉版悳绱級""" try: query = request.args.get('q', '').strip() limit = request.args.get('limit', 20, type=int) search_type = request.args.get('type', 'all') # all, stock, index if not query: return jsonify({ 'success': False, 'error': '璇疯緭鍏ユ悳绱㈠叧閿瘝' }), 400 results = [] with engine.connect() as conn: # 鎼滅储鎸囨暟锛堜紭鍏堟樉绀烘寚鏁帮紝鍥犱负閫氬父鐢ㄦ埛鎼滅储浠g爜鏃舵寚鏁版洿甯哥敤锛? if search_type in ('all', 'index'): index_sql = text(""" SELECT DISTINCT INDEXCODE as stock_code, SECNAME as stock_name, INDEXNAME as full_name, F018V as exchange FROM ea_exchangeindex WHERE ( UPPER(INDEXCODE) LIKE UPPER(:query_pattern) OR UPPER(SECNAME) LIKE UPPER(:query_pattern) OR UPPER(INDEXNAME) LIKE UPPER(:query_pattern) ) ORDER BY CASE WHEN UPPER(INDEXCODE) = UPPER(:exact_query) THEN 1 WHEN UPPER(SECNAME) = UPPER(:exact_query) THEN 2 WHEN UPPER(INDEXCODE) LIKE UPPER(:prefix_pattern) THEN 3 WHEN UPPER(SECNAME) LIKE UPPER(:prefix_pattern) THEN 4 ELSE 5 END, INDEXCODE LIMIT :limit """) index_result = conn.execute(index_sql, { 'query_pattern': f'%{query}%', 'exact_query': query, 'prefix_pattern': f'{query}%', 'limit': limit }).fetchall() for row in index_result: results.append({ 'stock_code': row.stock_code, 'stock_name': row.stock_name, 'full_name': row.full_name, 'exchange': row.exchange, 'isIndex': True, 'security_type': '鎸囨暟' }) # 鎼滅储鑲$エ if search_type in ('all', 'stock'): stock_sql = text(""" SELECT DISTINCT SECCODE as stock_code, SECNAME as stock_name, F001V as pinyin_abbr, F003V as security_type, F005V as exchange, F011V as listing_status FROM ea_stocklist WHERE ( UPPER(SECCODE) LIKE UPPER(:query_pattern) OR UPPER(SECNAME) LIKE UPPER(:query_pattern) OR UPPER(F001V) LIKE UPPER(:query_pattern) ) AND (F011V = '姝e父涓婂競' OR F010V = '013001') AND F003V IN ('A鑲?, 'B鑲?) ORDER BY CASE WHEN UPPER(SECCODE) = UPPER(:exact_query) THEN 1 WHEN UPPER(SECNAME) = UPPER(:exact_query) THEN 2 WHEN UPPER(F001V) = UPPER(:exact_query) THEN 3 WHEN UPPER(SECCODE) LIKE UPPER(:prefix_pattern) THEN 4 WHEN UPPER(SECNAME) LIKE UPPER(:prefix_pattern) THEN 5 WHEN UPPER(F001V) LIKE UPPER(:prefix_pattern) THEN 6 ELSE 7 END, SECCODE LIMIT :limit """) stock_result = conn.execute(stock_sql, { 'query_pattern': f'%{query}%', 'exact_query': query, 'prefix_pattern': f'{query}%', 'limit': limit }).fetchall() for row in stock_result: results.append({ 'stock_code': row.stock_code, 'stock_name': row.stock_name, 'pinyin_abbr': row.pinyin_abbr, 'security_type': row.security_type, 'exchange': row.exchange, 'listing_status': row.listing_status, 'isIndex': False }) # 濡傛灉鎼滅储鍏ㄩ儴锛屾寜鐩稿叧鎬ч噸鏂版帓搴忥紙绮剧‘鍖归厤浼樺厛锛? if search_type == 'all': def sort_key(item): code = item['stock_code'].upper() name = item['stock_name'].upper() q = query.upper() # 绮剧‘鍖归厤浠g爜浼樺厛 if code == q: return (0, not item['isIndex'], code) # 鎸囨暟浼樺厛 # 绮剧‘鍖归厤鍚嶇О if name == q: return (1, not item['isIndex'], code) # 鍓嶇紑鍖归厤浠g爜 if code.startswith(q): return (2, not item['isIndex'], code) # 鍓嶇紑鍖归厤鍚嶇О if name.startswith(q): return (3, not item['isIndex'], code) return (4, not item['isIndex'], code) results.sort(key=sort_key) # 闄愬埗鎬绘暟 results = results[:limit] return jsonify({ 'success': True, 'data': results, 'count': len(results) }) except Exception as e: app.logger.error(f"鎼滅储鑲$エ/鎸囨暟閿欒: {e}") return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/heatmap', methods=['GET']) def get_market_heatmap(): """鑾峰彇甯傚満鐑姏鍥炬暟鎹紙鍩轰簬甯傚€煎拰娑ㄨ穼骞咃級""" try: # 鑾峰彇浜ゆ槗鏃ユ湡鍙傛暟 trade_date = request.args.get('date') # 鍓嶇鏄剧ず鐢ㄧ殑limit锛屼絾缁熻鏁版嵁浼氬熀浜庡叏閮ㄨ偂绁? display_limit = request.args.get('limit', 500, type=int) with engine.connect() as conn: # 濡傛灉娌℃湁鎸囧畾鏃ユ湡锛岃幏鍙栨渶鏂颁氦鏄撴棩 if not trade_date: latest_date_result = conn.execute(text(""" SELECT MAX(TRADEDATE) as latest_date FROM ea_trade """)).fetchone() trade_date = latest_date_result.latest_date if latest_date_result else None if not trade_date: return jsonify({ 'success': False, 'error': '鏃犳硶鑾峰彇浜ゆ槗鏁版嵁' }), 404 # 鑾峰彇鍏ㄩ儴鑲$エ鏁版嵁鐢ㄤ簬缁熻 all_stocks_sql = text(""" SELECT t.SECCODE as stock_code, t.SECNAME as stock_name, t.F010N as change_percent, -- 娑ㄨ穼骞? t.F007N as close_price, -- 鏀剁洏浠? t.F021N * t.F007N / 100000000 as market_cap, -- 甯傚€?浜垮厓) t.F011N / 100000000 as amount, -- 鎴愪氦棰?浜垮厓) t.F012N as turnover_rate, -- 鎹㈡墜鐜? b.F034V as industry, -- 鐢充竾琛屼笟鍒嗙被涓€绾у悕绉? b.F026V as province -- 鎵€灞炵渷浠? FROM ea_trade t LEFT JOIN ea_baseinfo b ON t.SECCODE = b.SECCODE WHERE t.TRADEDATE = :trade_date AND t.F010N IS NOT NULL -- 浠呯粺璁″綋鏃ユ湁娑ㄨ穼骞呮暟鎹殑鑲$エ ORDER BY market_cap DESC """) all_result = conn.execute(all_stocks_sql, { 'trade_date': trade_date }).fetchall() # 璁$畻缁熻鏁版嵁锛堝熀浜庡叏閮ㄨ偂绁級 total_market_cap = 0 total_amount = 0 rising_count = 0 falling_count = 0 flat_count = 0 all_data = [] for row in all_result: # F010N 宸插湪 SQL 涓‘淇濋潪绌? change_percent = float(row.change_percent) market_cap = float(row.market_cap) if row.market_cap else 0 amount = float(row.amount) if row.amount else 0 total_market_cap += market_cap total_amount += amount if change_percent > 0: rising_count += 1 elif change_percent < 0: falling_count += 1 else: flat_count += 1 all_data.append({ 'stock_code': row.stock_code, 'stock_name': row.stock_name, 'change_percent': change_percent, 'close_price': float(row.close_price) if row.close_price else 0, 'market_cap': market_cap, 'amount': amount, 'turnover_rate': float(row.turnover_rate) if row.turnover_rate else 0, 'industry': row.industry, 'province': row.province }) # 鍙繑鍥炲墠display_limit鏉$敤浜庣儹鍔涘浘鏄剧ず heatmap_data = all_data[:display_limit] return jsonify({ 'success': True, 'data': heatmap_data, 'trade_date': trade_date.strftime('%Y-%m-%d') if hasattr(trade_date, 'strftime') else str(trade_date), 'count': len(all_data), # 鍏ㄩ儴鑲$エ鏁伴噺 'display_count': len(heatmap_data), # 鏄剧ず鐨勮偂绁ㄦ暟閲? 'statistics': { 'total_market_cap': round(total_market_cap, 2), # 鎬诲競鍊硷紙浜垮厓锛? 'total_amount': round(total_amount, 2), # 鎬绘垚浜ら锛堜嚎鍏冿級 'rising_count': rising_count, # 涓婃定瀹舵暟 'falling_count': falling_count, # 涓嬭穼瀹舵暟 'flat_count': flat_count # 骞崇洏瀹舵暟 } }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/statistics', methods=['GET']) def get_market_statistics(): """鑾峰彇甯傚満缁熻鏁版嵁锛堜粠ea_blocktrading琛級""" try: # 鑾峰彇浜ゆ槗鏃ユ湡鍙傛暟 trade_date = request.args.get('date') with engine.connect() as conn: # 濡傛灉娌℃湁鎸囧畾鏃ユ湡锛岃幏鍙栨渶鏂颁氦鏄撴棩 if not trade_date: latest_date_result = conn.execute(text(""" SELECT MAX(TRADEDATE) as latest_date FROM ea_blocktrading """)).fetchone() trade_date = latest_date_result.latest_date if latest_date_result else None if not trade_date: return jsonify({ 'success': False, 'error': '鏃犳硶鑾峰彇缁熻鏁版嵁' }), 404 # 鑾峰彇娌繁涓ゅ競鐨勭粺璁℃暟鎹? stats_sql = text(""" SELECT EXCHANGECODE, EXCHANGENAME, F001V as indicator_code, F002V as indicator_name, F003N as indicator_value, F004V as unit, TRADEDATE FROM ea_blocktrading WHERE TRADEDATE = :trade_date AND EXCHANGECODE IN ('012001', '012002') -- 鍙幏鍙栦笂浜ゆ墍鍜屾繁浜ゆ墍鐨勬暟鎹? AND F001V IN ( '250006', '250014', -- 娣变氦鎵€鑲$エ鎬诲競鍊笺€佷笂浜ゆ墍甯備环鎬诲€? '250007', '250015', -- 娣变氦鎵€鑲$エ娴侀€氬競鍊笺€佷笂浜ゆ墍娴侀€氬競鍊? '250008', -- 娣变氦鎵€鑲$エ鎴愪氦閲戦 '250010', '250019', -- 娣变氦鎵€鑲$エ骞冲潎甯傜泩鐜囥€佷笂浜ゆ墍骞冲潎甯傜泩鐜? '250050', '250001' -- 涓婁氦鎵€涓婂競鍏徃瀹舵暟銆佹繁浜ゆ墍涓婂競鍏徃鏁? ) """) result = conn.execute(stats_sql, { 'trade_date': trade_date }).fetchall() # 鏁寸悊鏁版嵁 statistics = {} for row in result: key = f"{row.EXCHANGECODE}_{row.indicator_code}" statistics[key] = { 'exchange_code': row.EXCHANGECODE, 'exchange_name': row.EXCHANGENAME, 'indicator_code': row.indicator_code, 'indicator_name': row.indicator_name, 'value': float(row.indicator_value) if row.indicator_value else 0, 'unit': row.unit } # 姹囨€绘暟鎹? summary = { 'total_market_cap': 0, # 鎬诲競鍊? 'total_float_cap': 0, # 娴侀€氬競鍊? 'total_amount': 0, # 鎴愪氦棰? 'sh_pe_ratio': 0, # 涓婁氦鎵€甯傜泩鐜? 'sz_pe_ratio': 0, # 娣变氦鎵€甯傜泩鐜? 'sh_companies': 0, # 涓婁氦鎵€涓婂競鍏徃鏁? 'sz_companies': 0 # 娣变氦鎵€涓婂競鍏徃鏁? } # 璁$畻姹囨€诲€? if '012001_250014' in statistics: # 涓婁氦鎵€甯備环鎬诲€? summary['total_market_cap'] += statistics['012001_250014']['value'] if '012002_250006' in statistics: # 娣变氦鎵€鑲$エ鎬诲競鍊? summary['total_market_cap'] += statistics['012002_250006']['value'] if '012001_250015' in statistics: # 涓婁氦鎵€娴侀€氬競鍊? summary['total_float_cap'] += statistics['012001_250015']['value'] if '012002_250007' in statistics: # 娣变氦鎵€鑲$エ娴侀€氬競鍊? summary['total_float_cap'] += statistics['012002_250007']['value'] # 鎴愪氦棰濋渶瑕佽幏鍙栦笂浜ゆ墍鐨勬暟鎹? # 鑾峰彇涓婁氦鎵€鎴愪氦閲戦 sh_amount_result = conn.execute(text(""" SELECT F003N FROM ea_blocktrading WHERE TRADEDATE = :trade_date AND EXCHANGECODE = '012001' AND F002V LIKE '%鎴愪氦閲戦%' LIMIT 1 """), {'trade_date': trade_date}).fetchone() sh_amount = float(sh_amount_result.F003N) if sh_amount_result and sh_amount_result.F003N else 0 sz_amount = statistics['012002_250008']['value'] if '012002_250008' in statistics else 0 summary['total_amount'] = sh_amount + sz_amount if '012001_250019' in statistics: # 涓婁氦鎵€骞冲潎甯傜泩鐜? summary['sh_pe_ratio'] = statistics['012001_250019']['value'] if '012002_250010' in statistics: # 娣变氦鎵€鑲$エ骞冲潎甯傜泩鐜? summary['sz_pe_ratio'] = statistics['012002_250010']['value'] if '012001_250050' in statistics: # 涓婁氦鎵€涓婂競鍏徃瀹舵暟 summary['sh_companies'] = int(statistics['012001_250050']['value']) if '012002_250001' in statistics: # 娣变氦鎵€涓婂競鍏徃鏁? summary['sz_companies'] = int(statistics['012002_250001']['value']) # 鑾峰彇鍙敤鐨勪氦鏄撴棩鏈熷垪琛? available_dates_result = conn.execute(text(""" SELECT DISTINCT TRADEDATE FROM ea_blocktrading WHERE EXCHANGECODE IN ('012001', '012002') ORDER BY TRADEDATE DESC LIMIT 30 """)).fetchall() available_dates = [str(row.TRADEDATE) for row in available_dates_result] # 鏍煎紡鍖栨棩鏈熶负 YYYY-MM-DD formatted_trade_date = trade_date.strftime('%Y-%m-%d') if hasattr(trade_date, 'strftime') else str(trade_date).split(' ')[0][:10] formatted_available_dates = [ d.strftime('%Y-%m-%d') if hasattr(d, 'strftime') else str(d).split(' ')[0][:10] for d in [row.TRADEDATE for row in available_dates_result] ] return jsonify({ 'success': True, 'trade_date': formatted_trade_date, 'summary': summary, 'details': list(statistics.values()), 'available_dates': formatted_available_dates }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/concepts/daily-top', methods=['GET']) def get_daily_top_concepts(): """鑾峰彇姣忔棩娑ㄥ箙闈犲墠鐨勬蹇垫澘鍧?"" try: # 鑾峰彇浜ゆ槗鏃ユ湡鍙傛暟 trade_date = request.args.get('date') limit = request.args.get('limit', 6, type=int) # 鏋勫缓姒傚康涓績API鐨刄RL concept_api_url = 'http://222.128.1.157:16801/search' # 鍑嗗璇锋眰鏁版嵁 request_data = { 'query': '', 'size': limit, 'page': 1, 'sort_by': 'change_pct' } if trade_date: request_data['trade_date'] = trade_date # 璋冪敤姒傚康涓績API response = requests.post(concept_api_url, json=request_data, timeout=10) if response.status_code == 200: data = response.json() top_concepts = [] for concept in data.get('results', []): # 澶勭悊 stocks 瀛楁锛氬吋瀹?{name, code} 鍜?{stock_name, stock_code} 涓ょ鏍煎紡 raw_stocks = concept.get('stocks', []) formatted_stocks = [] for stock in raw_stocks: # 浼樺厛浣跨敤 stock_name锛屽叾娆′娇鐢?name stock_name = stock.get('stock_name') or stock.get('name', '') stock_code = stock.get('stock_code') or stock.get('code', '') formatted_stocks.append({ 'stock_name': stock_name, 'stock_code': stock_code, 'name': stock_name, # 鍏煎鏃ф牸寮? 'code': stock_code # 鍏煎鏃ф牸寮? }) # 淇濇寔涓?/concept-api/search 鐩稿悓鐨勫瓧娈电粨鏋勶紝骞舵坊鍔犳柊瀛楁 top_concepts.append({ 'concept_id': concept.get('concept_id'), 'concept': concept.get('concept'), # 鍘熷瀛楁鍚? 'concept_name': concept.get('concept'), # 鍏煎鏃у瓧娈靛悕 'description': concept.get('description'), 'stock_count': concept.get('stock_count', 0), 'score': concept.get('score'), 'match_type': concept.get('match_type'), 'price_info': concept.get('price_info', {}), # 瀹屾暣鐨勪环鏍间俊鎭? 'change_percent': concept.get('price_info', {}).get('avg_change_pct', 0), # 鍏煎鏃у瓧娈? 'tags': concept.get('tags', []), # 鏍囩鍒楄〃 'outbreak_dates': concept.get('outbreak_dates', []), # 鐖嗗彂鏃ユ湡鍒楄〃 'hierarchy': concept.get('hierarchy'), # 灞傜骇淇℃伅 {lv1, lv2, lv3} 'stocks': formatted_stocks, # 杩斿洖鏍煎紡鍖栧悗鐨勮偂绁ㄥ垪琛? 'hot_score': concept.get('hot_score') }) # 鏍煎紡鍖栨棩鏈熶负 YYYY-MM-DD price_date = data.get('price_date', '') formatted_date = str(price_date).split(' ')[0][:10] if price_date else '' return jsonify({ 'success': True, 'data': top_concepts, 'trade_date': formatted_date, 'count': len(top_concepts) }) else: return jsonify({ 'success': False, 'error': '鑾峰彇姒傚康鏁版嵁澶辫触' }), 500 except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 # ==================== 鐑偣姒傝 API ==================== @app.route('/api/market/hotspot-overview', methods=['GET']) def get_hotspot_overview(): """ 鑾峰彇鐑偣姒傝鏁版嵁锛堢敤浜庝釜鑲′腑蹇冪殑鐑偣姒傝鍥捐〃锛? 杩斿洖锛氭寚鏁板垎鏃舵暟鎹?+ 姒傚康寮傚姩鏍囨敞 鏁版嵁鏉ユ簮锛? - 鎸囨暟鍒嗘椂锛欳lickHouse index_minute 琛? - 姒傚康寮傚姩锛歁ySQL concept_anomaly_hybrid 琛紙鏉ヨ嚜 realtime_detector.py锛? """ try: trade_date = request.args.get('date') index_code = request.args.get('index', '000001.SH') # 濡傛灉娌℃湁鎸囧畾鏃ユ湡锛屼娇鐢ㄦ渶鏂颁氦鏄撴棩 if not trade_date: today = date.today() if today in trading_days_set: trade_date = today.strftime('%Y-%m-%d') else: target_date = get_trading_day_near_date(today) trade_date = target_date.strftime('%Y-%m-%d') if target_date else today.strftime('%Y-%m-%d') # 1. 鑾峰彇鎸囨暟鍒嗘椂鏁版嵁 client = get_clickhouse_client() target_date_obj = datetime.strptime(trade_date, '%Y-%m-%d').date() index_data = client.execute( """ SELECT timestamp, open, high, low, close, volume FROM index_minute WHERE code = %(code)s AND toDate(timestamp) = %(date)s ORDER BY timestamp """, { 'code': index_code, 'date': target_date_obj } ) # 鑾峰彇鏄ㄦ敹浠? code_no_suffix = index_code.split('.')[0] prev_close = None with engine.connect() as conn: prev_result = conn.execute(text(""" SELECT F006N FROM ea_exchangetrade WHERE INDEXCODE = :code AND TRADEDATE < :today ORDER BY TRADEDATE DESC LIMIT 1 """), { 'code': code_no_suffix, 'today': target_date_obj }).fetchone() if prev_result and prev_result[0]: prev_close = float(prev_result[0]) # 鏍煎紡鍖栨寚鏁版暟鎹? index_timeline = [] for row in index_data: ts, open_p, high_p, low_p, close_p, vol = row change_pct = None if prev_close and close_p: change_pct = round((float(close_p) - prev_close) / prev_close * 100, 4) index_timeline.append({ 'time': ts.strftime('%H:%M'), 'timestamp': ts.isoformat(), 'price': float(close_p) if close_p else None, 'open': float(open_p) if open_p else None, 'high': float(high_p) if high_p else None, 'low': float(low_p) if low_p else None, 'volume': int(vol) if vol else 0, 'change_pct': change_pct }) # 2. 鑾峰彇姒傚康寮傚姩鏁版嵁锛堜紭鍏堜粠 V2 琛紝fallback 鍒版棫琛級 alerts = [] use_v2 = False with engine.connect() as conn: # 灏濊瘯鏌ヨ V2 琛紙鏃堕棿鐗囧榻?+ 鎸佺画纭鐗堟湰锛? try: v2_result = conn.execute(text(""" SELECT concept_id, alert_time, trade_date, alert_type, final_score, rule_score, ml_score, trigger_reason, confirm_ratio, alpha, alpha_zscore, amt_zscore, rank_zscore, momentum_3m, momentum_5m, limit_up_ratio, triggered_rules FROM concept_anomaly_v2 WHERE trade_date = :trade_date ORDER BY alert_time """), {'trade_date': trade_date}) v2_rows = v2_result.fetchall() if v2_rows: use_v2 = True for row in v2_rows: triggered_rules = None if row[16]: try: triggered_rules = json.loads(row[16]) if isinstance(row[16], str) else row[16] except: pass alerts.append({ 'concept_id': row[0], 'concept_name': row[0], # 鍚庨潰浼氬~鍏? 'time': row[1].strftime('%H:%M') if row[1] else None, 'timestamp': row[1].isoformat() if row[1] else None, 'alert_type': row[3], 'final_score': float(row[4]) if row[4] else None, 'rule_score': float(row[5]) if row[5] else None, 'ml_score': float(row[6]) if row[6] else None, 'trigger_reason': row[7], # V2 鏂板瀛楁 'confirm_ratio': float(row[8]) if row[8] else None, 'alpha': float(row[9]) if row[9] else None, 'alpha_zscore': float(row[10]) if row[10] else None, 'amt_zscore': float(row[11]) if row[11] else None, 'rank_zscore': float(row[12]) if row[12] else None, 'momentum_3m': float(row[13]) if row[13] else None, 'momentum_5m': float(row[14]) if row[14] else None, 'limit_up_ratio': float(row[15]) if row[15] else 0, 'triggered_rules': triggered_rules, # 鍏煎瀛楁 'importance_score': float(row[4]) / 100 if row[4] else None, 'is_v2': True, }) except Exception as v2_err: app.logger.debug(f"V2 琛ㄦ煡璇㈠け璐ワ紝浣跨敤鏃ц〃: {v2_err}") # Fallback: 鏌ヨ鏃ц〃 if not use_v2: try: alert_result = conn.execute(text(""" SELECT a.concept_id, a.alert_time, a.trade_date, a.alert_type, a.final_score, a.rule_score, a.ml_score, a.trigger_reason, a.alpha, a.alpha_delta, a.amt_ratio, a.amt_delta, a.rank_pct, a.limit_up_ratio, a.stock_count, a.total_amt, a.triggered_rules FROM concept_anomaly_hybrid a WHERE a.trade_date = :trade_date ORDER BY a.alert_time """), {'trade_date': trade_date}) for row in alert_result: triggered_rules = None if row[16]: try: triggered_rules = json.loads(row[16]) if isinstance(row[16], str) else row[16] except: pass limit_up_ratio = float(row[13]) if row[13] else 0 stock_count = int(row[14]) if row[14] else 0 limit_up_count = int(limit_up_ratio * stock_count) if stock_count > 0 else 0 alerts.append({ 'concept_id': row[0], 'concept_name': row[0], 'time': row[1].strftime('%H:%M') if row[1] else None, 'timestamp': row[1].isoformat() if row[1] else None, 'alert_type': row[3], 'final_score': float(row[4]) if row[4] else None, 'rule_score': float(row[5]) if row[5] else None, 'ml_score': float(row[6]) if row[6] else None, 'trigger_reason': row[7], 'alpha': float(row[8]) if row[8] else None, 'alpha_delta': float(row[9]) if row[9] else None, 'amt_ratio': float(row[10]) if row[10] else None, 'amt_delta': float(row[11]) if row[11] else None, 'rank_pct': float(row[12]) if row[12] else None, 'limit_up_ratio': limit_up_ratio, 'limit_up_count': limit_up_count, 'stock_count': stock_count, 'total_amt': float(row[15]) if row[15] else None, 'triggered_rules': triggered_rules, 'importance_score': float(row[4]) / 100 if row[4] else None, 'is_v2': False, }) except Exception as old_err: app.logger.debug(f"鏃ц〃鏌ヨ涔熷け璐? {old_err}") # 灏濊瘯鎵归噺鑾峰彇姒傚康鍚嶇О if alerts: concept_ids = list(set(a['concept_id'] for a in alerts)) concept_names = {} # 鍒濆鍖?concept_names 瀛楀吀 try: from elasticsearch import Elasticsearch es_client = Elasticsearch(["http://222.128.1.157:19200"]) es_result = es_client.mget( index='concept_library_v3', body={'ids': concept_ids}, _source=['concept'] ) for doc in es_result.get('docs', []): if doc.get('found') and doc.get('_source'): concept_names[doc['_id']] = doc['_source'].get('concept', doc['_id']) # 鏇存柊 alerts 涓殑姒傚康鍚嶇О for alert in alerts: if alert['concept_id'] in concept_names: alert['concept_name'] = concept_names[alert['concept_id']] except Exception as e: app.logger.warning(f"鑾峰彇姒傚康鍚嶇О澶辫触: {e}") # 璁$畻缁熻淇℃伅 day_high = max([d['price'] for d in index_timeline if d['price']], default=None) day_low = min([d['price'] for d in index_timeline if d['price']], default=None) latest_price = index_timeline[-1]['price'] if index_timeline else None latest_change_pct = index_timeline[-1]['change_pct'] if index_timeline else None return jsonify({ 'success': True, 'data': { 'trade_date': trade_date, 'index': { 'code': index_code, 'name': '涓婅瘉鎸囨暟' if index_code == '000001.SH' else index_code, 'prev_close': prev_close, 'latest_price': latest_price, 'change_pct': latest_change_pct, 'high': day_high, 'low': day_low, 'timeline': index_timeline }, 'alerts': alerts, 'alert_count': len(alerts), 'alert_summary': { 'surge': len([a for a in alerts if a['alert_type'] == 'surge']), 'surge_up': len([a for a in alerts if a['alert_type'] == 'surge_up']), 'surge_down': len([a for a in alerts if a['alert_type'] == 'surge_down']), 'volume_surge_up': len([a for a in alerts if a['alert_type'] == 'volume_surge_up']), 'shrink_surge_up': len([a for a in alerts if a['alert_type'] == 'shrink_surge_up']), 'volume_oscillation': len([a for a in alerts if a['alert_type'] == 'volume_oscillation']), 'limit_up': len([a for a in alerts if a['alert_type'] == 'limit_up']), 'volume_spike': len([a for a in alerts if a['alert_type'] == 'volume_spike']), 'rank_jump': len([a for a in alerts if a['alert_type'] == 'rank_jump']) } } }) except Exception as e: import traceback error_trace = traceback.format_exc() app.logger.error(f"鑾峰彇鐑偣姒傝鏁版嵁澶辫触: {error_trace}") return jsonify({ 'success': False, 'error': str(e), 'traceback': error_trace # 涓存椂杩斿洖瀹屾暣閿欒淇℃伅鐢ㄤ簬璋冭瘯 }), 500 @app.route('/api/concept//stocks', methods=['GET']) def get_concept_stocks(concept_id): """ 鑾峰彇姒傚康鐨勭浉鍏宠偂绁ㄥ垪琛紙甯﹀疄鏃舵定璺屽箙锛? Args: concept_id: 姒傚康 ID 鎴栨蹇靛悕绉帮紙鏀寔涓ょ鏂瑰紡鏌ヨ锛? Returns: - stocks: 鑲$エ鍒楄〃 [{code, name, reason, change_pct}, ...] """ try: from elasticsearch import Elasticsearch from clickhouse_driver import Client es_client = Elasticsearch(["http://222.128.1.157:19200"]) # 1. 灏濊瘯澶氱鏂瑰紡鑾峰彇姒傚康鏁版嵁 source = None concept_name = concept_id # 鏂瑰紡1: 鍏堝皾璇曟寜 ID 鏌ヨ try: es_result = es_client.get(index='concept_library_v3', id=concept_id) if es_result.get('found'): source = es_result.get('_source', {}) concept_name = source.get('concept', concept_id) except: pass # 鏂瑰紡2: 濡傛灉鎸?ID 娌℃壘鍒帮紝灏濊瘯鎸夋蹇靛悕绉版悳绱? if not source: try: search_result = es_client.search( index='concept_library_v3', body={ 'query': { 'term': { 'concept.keyword': concept_id } }, 'size': 1 } ) hits = search_result.get('hits', {}).get('hits', []) if hits: source = hits[0].get('_source', {}) concept_name = source.get('concept', concept_id) except Exception as search_err: app.logger.debug(f"ES 鎼滅储姒傚康澶辫触: {search_err}") if not source: return jsonify({ 'success': False, 'error': f'姒傚康 {concept_id} 涓嶅瓨鍦? }), 404 raw_stocks = source.get('stocks', []) if not raw_stocks: return jsonify({ 'success': True, 'data': { 'concept_id': concept_id, 'concept_name': concept_name, 'stocks': [] } }) # 鎻愬彇鑲$エ浠g爜鍜屽師鍥? stocks_info = [] stock_codes = [] for s in raw_stocks: if isinstance(s, dict): code = s.get('code', '') if code and len(code) == 6: stocks_info.append({ 'code': code, 'name': s.get('name', ''), 'reason': s.get('reason', '') }) stock_codes.append(code) if not stock_codes: return jsonify({ 'success': True, 'data': { 'concept_id': concept_id, 'concept_name': concept_name, 'stocks': stocks_info } }) # 2. 鑾峰彇鏈€鏂颁氦鏄撴棩鍜屽墠涓€浜ゆ槗鏃? today = datetime.now().date() trading_day = None prev_trading_day = None with engine.connect() as conn: # 鑾峰彇鏈€鏂颁氦鏄撴棩 result = conn.execute(text(""" SELECT EXCHANGE_DATE FROM trading_days WHERE EXCHANGE_DATE <= :today ORDER BY EXCHANGE_DATE DESC LIMIT 1 """), {"today": today}).fetchone() if result: trading_day = result[0].date() if hasattr(result[0], 'date') else result[0] # 鑾峰彇鍓嶄竴浜ゆ槗鏃? if trading_day: result = conn.execute(text(""" SELECT EXCHANGE_DATE FROM trading_days WHERE EXCHANGE_DATE < :date ORDER BY EXCHANGE_DATE DESC LIMIT 1 """), {"date": trading_day}).fetchone() if result: prev_trading_day = result[0].date() if hasattr(result[0], 'date') else result[0] # 3. 浠?MySQL ea_trade 鑾峰彇鍓嶄竴浜ゆ槗鏃ユ敹鐩樹环锛團007N锛? prev_close_map = {} if prev_trading_day and stock_codes: with engine.connect() as conn: placeholders = ','.join([f':code{i}' for i in range(len(stock_codes))]) params = {f'code{i}': code for i, code in enumerate(stock_codes)} params['trade_date'] = prev_trading_day result = conn.execute(text(f""" SELECT SECCODE, F007N FROM ea_trade WHERE SECCODE IN ({placeholders}) AND TRADEDATE = :trade_date AND F007N > 0 """), params).fetchall() prev_close_map = {row[0]: float(row[1]) for row in result if row[1]} # 4. 浠?ClickHouse 鑾峰彇鏈€鏂颁环鏍? current_price_map = {} if stock_codes: try: ch_client = Client( host='127.0.0.1', port=9000, user='default', password='Zzl33818!', database='stock' ) # 杞崲涓?ClickHouse 鏍煎紡 ch_codes = [] code_mapping = {} for code in stock_codes: if code.startswith('6'): ch_code = f"{code}.SH" elif code.startswith('0') or code.startswith('3'): ch_code = f"{code}.SZ" else: ch_code = f"{code}.BJ" ch_codes.append(ch_code) code_mapping[ch_code] = code ch_codes_str = "','".join(ch_codes) # 鏌ヨ褰撳ぉ鏈€鏂颁环鏍? query = f""" SELECT code, close FROM stock_minute WHERE code IN ('{ch_codes_str}') AND toDate(timestamp) = today() ORDER BY timestamp DESC LIMIT 1 BY code """ result = ch_client.execute(query) for row in result: ch_code, close_price = row if ch_code in code_mapping and close_price: original_code = code_mapping[ch_code] current_price_map[original_code] = float(close_price) except Exception as ch_err: app.logger.warning(f"ClickHouse 鑾峰彇浠锋牸澶辫触: {ch_err}") # 5. 璁$畻娑ㄨ穼骞呭苟鍚堝苟鏁版嵁 result_stocks = [] for stock in stocks_info: code = stock['code'] prev_close = prev_close_map.get(code) current_price = current_price_map.get(code) change_pct = None if prev_close and current_price and prev_close > 0: change_pct = round((current_price - prev_close) / prev_close * 100, 2) result_stocks.append({ 'code': code, 'name': stock['name'], 'reason': stock['reason'], 'change_pct': change_pct, 'price': current_price, 'prev_close': prev_close }) # 鎸夋定璺屽箙鎺掑簭锛堟定鍋滀紭鍏堬級 result_stocks.sort(key=lambda x: x.get('change_pct') if x.get('change_pct') is not None else -999, reverse=True) return jsonify({ 'success': True, 'data': { 'concept_id': concept_id, 'concept_name': concept_name, 'stock_count': len(result_stocks), 'trading_day': str(trading_day) if trading_day else None, 'stocks': result_stocks } }) except Exception as e: import traceback app.logger.error(f"鑾峰彇姒傚康鑲$エ澶辫触: {traceback.format_exc()}") return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/concept-alerts', methods=['GET']) def get_concept_alerts(): """ 鑾峰彇姒傚康寮傚姩鍒楄〃锛堟敮鎸佸垎椤靛拰绛涢€夛級 """ try: trade_date = request.args.get('date') alert_type = request.args.get('type') # surge/limit_up/rank_jump concept_type = request.args.get('concept_type') # leaf/lv1/lv2/lv3 limit = request.args.get('limit', 50, type=int) offset = request.args.get('offset', 0, type=int) # 鏋勫缓鏌ヨ鏉′欢 conditions = [] params = {'limit': limit, 'offset': offset} if trade_date: conditions.append("trade_date = :trade_date") params['trade_date'] = trade_date else: conditions.append("trade_date = CURDATE()") if alert_type: conditions.append("alert_type = :alert_type") params['alert_type'] = alert_type if concept_type: conditions.append("concept_type = :concept_type") params['concept_type'] = concept_type where_clause = " AND ".join(conditions) if conditions else "1=1" with engine.connect() as conn: # 鑾峰彇鎬绘暟 count_sql = text(f"SELECT COUNT(*) FROM concept_minute_alert WHERE {where_clause}") total = conn.execute(count_sql, params).scalar() # 鑾峰彇鏁版嵁 query_sql = text(f""" SELECT id, concept_id, concept_name, alert_time, alert_type, trade_date, change_pct, prev_change_pct, change_delta, limit_up_count, prev_limit_up_count, limit_up_delta, rank_position, prev_rank_position, rank_delta, index_price, index_change_pct, stock_count, concept_type, extra_info FROM concept_minute_alert WHERE {where_clause} ORDER BY alert_time DESC LIMIT :limit OFFSET :offset """) result = conn.execute(query_sql, params) alerts = [] for row in result: extra_info = None if row[19]: try: extra_info = json.loads(row[19]) if isinstance(row[19], str) else row[19] except: pass alerts.append({ 'id': row[0], 'concept_id': row[1], 'concept_name': row[2], 'alert_time': row[3].isoformat() if row[3] else None, 'alert_type': row[4], 'trade_date': row[5].isoformat() if row[5] else None, 'change_pct': float(row[6]) if row[6] else None, 'prev_change_pct': float(row[7]) if row[7] else None, 'change_delta': float(row[8]) if row[8] else None, 'limit_up_count': row[9], 'prev_limit_up_count': row[10], 'limit_up_delta': row[11], 'rank_position': row[12], 'prev_rank_position': row[13], 'rank_delta': row[14], 'index_price': float(row[15]) if row[15] else None, 'index_change_pct': float(row[16]) if row[16] else None, 'stock_count': row[17], 'concept_type': row[18], 'extra_info': extra_info }) return jsonify({ 'success': True, 'data': alerts, 'total': total, 'limit': limit, 'offset': offset }) except Exception as e: import traceback app.logger.error(f"鑾峰彇姒傚康寮傚姩鍒楄〃澶辫触: {traceback.format_exc()}") return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/market/rise-analysis/', methods=['GET']) def get_rise_analysis(seccode): """鑾峰彇鑲$エ娑ㄥ箙鍒嗘瀽鏁版嵁锛堜粠 Elasticsearch 鑾峰彇锛?"" try: # 鑾峰彇鏃ユ湡鑼冨洿鍙傛暟 start_date = request.args.get('start_date') end_date = request.args.get('end_date') limit = request.args.get('limit', 100, type=int) # 鏋勫缓 ES 鏌ヨ must_conditions = [ {"term": {"stock_code": seccode}} ] # 娣诲姞鏃ユ湡鑼冨洿绛涢€? if start_date and end_date: must_conditions.append({ "range": { "trade_date": { "gte": start_date, "lte": end_date, "format": "yyyy-MM-dd" } } }) es_query = { "query": { "bool": { "must": must_conditions } }, "sort": [ {"trade_date": {"order": "desc"}} ], "size": limit, "_source": { "excludes": ["rise_reason_detail_embedding"] # 鎺掗櫎鍚戦噺瀛楁 } } # 鎵ц ES 鏌ヨ response = es_client.search(index="stock_rise_analysis", body=es_query) # 鏍煎紡鍖栨暟鎹? rise_analysis_data = [] for hit in response['hits']['hits']: source = hit['_source'] # 澶勭悊鐮旀姤寮曠敤鏁版嵁 verification_reports = [] if source.get('has_verification_info') and source.get('verification_info'): v_info = source['verification_info'] processed_results = v_info.get('processed_result', []) for report in processed_results: verification_reports.append({ 'publisher': report.get('publisher', ''), 'report_title': report.get('report_title', ''), 'author': report.get('author', ''), 'declare_date': report.get('declare_date', ''), 'content': report.get('content', ''), 'verification_item': report.get('verification_item', ''), 'match_ratio': report.get('match_ratio', 0), 'match_score': report.get('match_score', '') }) rise_analysis_data.append({ 'stock_code': source.get('stock_code', ''), 'stock_name': source.get('stock_name', ''), 'trade_date': source.get('trade_date', ''), 'rise_rate': source.get('rise_rate', 0), 'close_price': source.get('close_price', 0), 'volume': source.get('volume', 0), 'amount': source.get('amount', 0), 'main_business': source.get('main_business', ''), 'rise_reason_brief': source.get('rise_reason_brief', ''), 'rise_reason_detail': source.get('rise_reason_detail', ''), 'announcements': source.get('announcements', ''), 'verification_reports': verification_reports, 'has_verification_info': source.get('has_verification_info', False), 'create_time': source.get('create_time', ''), 'update_time': source.get('update_time', '') }) return jsonify({ 'success': True, 'data': rise_analysis_data, 'count': len(rise_analysis_data), 'total': response['hits']['total']['value'] }) except Exception as e: import traceback print(f"ES鏌ヨ閿欒: {traceback.format_exc()}") return jsonify({ 'success': False, 'error': str(e) }), 500 # ============================================ # 鍏徃鍒嗘瀽鐩稿叧鎺ュ彛 # ============================================ @app.route('/api/company/comprehensive-analysis/', methods=['GET']) def get_comprehensive_analysis(company_code): """鑾峰彇鍏徃缁煎悎鍒嗘瀽鏁版嵁""" try: # 鑾峰彇鍏徃瀹氭€у垎鏋? qualitative_query = text(""" SELECT one_line_intro, investment_highlights, business_model_desc, company_story, positioning_analysis, unique_value_proposition, business_logic_explanation, revenue_driver_analysis, customer_value_analysis, strategy_description, strategic_initiatives, created_at, updated_at FROM company_analysis WHERE company_code = :company_code """) with engine.connect() as conn: qualitative_result = conn.execute(qualitative_query, {'company_code': company_code}).fetchone() # 鑾峰彇涓氬姟鏉垮潡鍒嗘瀽 segments_query = text(""" SELECT segment_name, segment_description, competitive_position, future_potential, key_customers, value_chain_position, created_at, updated_at FROM business_segment_analysis WHERE company_code = :company_code ORDER BY created_at DESC """) with engine.connect() as conn: segments_result = conn.execute(segments_query, {'company_code': company_code}).fetchall() # 鑾峰彇绔炰簤鍦颁綅鏁版嵁 - 鏈€鏂颁竴鏈? competitive_query = text(""" SELECT market_position_score, technology_score, brand_score, operation_score, finance_score, innovation_score, risk_score, growth_score, industry_avg_comparison, main_competitors, competitive_advantages, competitive_disadvantages, industry_rank, total_companies, report_period, updated_at FROM company_competitive_position WHERE company_code = :company_code ORDER BY report_period DESC LIMIT 1 """) with engine.connect() as conn: competitive_result = conn.execute(competitive_query, {'company_code': company_code}).fetchone() # 鑾峰彇涓氬姟缁撴瀯鏁版嵁 - 鏈€鏂颁竴鏈? business_structure_query = text(""" SELECT business_name, parent_business, business_level, revenue, revenue_unit, revenue_ratio, profit, profit_unit, profit_ratio, revenue_growth, profit_growth, gross_margin, customer_count, market_share, report_period FROM company_business_structure WHERE company_code = :company_code AND report_period = (SELECT MAX(report_period) FROM company_business_structure WHERE company_code = :company_code) ORDER BY revenue_ratio DESC """) with engine.connect() as conn: business_structure_result = conn.execute(business_structure_query, {'company_code': company_code}).fetchall() # 鏋勫缓杩斿洖鏁版嵁 response_data = { 'company_code': company_code, 'qualitative_analysis': None, 'business_segments': [], 'competitive_position': None, 'business_structure': [] } # 澶勭悊瀹氭€у垎鏋愭暟鎹? if qualitative_result: response_data['qualitative_analysis'] = { 'core_positioning': { 'one_line_intro': qualitative_result.one_line_intro, 'investment_highlights': qualitative_result.investment_highlights, 'business_model_desc': qualitative_result.business_model_desc, 'company_story': qualitative_result.company_story }, 'business_understanding': { 'positioning_analysis': qualitative_result.positioning_analysis, 'unique_value_proposition': qualitative_result.unique_value_proposition, 'business_logic_explanation': qualitative_result.business_logic_explanation, 'revenue_driver_analysis': qualitative_result.revenue_driver_analysis, 'customer_value_analysis': qualitative_result.customer_value_analysis }, 'strategy': { 'strategy_description': qualitative_result.strategy_description, 'strategic_initiatives': qualitative_result.strategic_initiatives }, 'updated_at': qualitative_result.updated_at.strftime( '%Y-%m-%d %H:%M:%S') if qualitative_result.updated_at else None } # 澶勭悊涓氬姟鏉垮潡鏁版嵁 for segment in segments_result: response_data['business_segments'].append({ 'segment_name': segment.segment_name, 'segment_description': segment.segment_description, 'competitive_position': segment.competitive_position, 'future_potential': segment.future_potential, 'key_customers': segment.key_customers, 'value_chain_position': segment.value_chain_position, 'updated_at': segment.updated_at.strftime('%Y-%m-%d %H:%M:%S') if segment.updated_at else None }) # 澶勭悊绔炰簤鍦颁綅鏁版嵁 if competitive_result: response_data['competitive_position'] = { 'scores': { 'market_position': competitive_result.market_position_score, 'technology': competitive_result.technology_score, 'brand': competitive_result.brand_score, 'operation': competitive_result.operation_score, 'finance': competitive_result.finance_score, 'innovation': competitive_result.innovation_score, 'risk': competitive_result.risk_score, 'growth': competitive_result.growth_score }, 'analysis': { 'industry_avg_comparison': competitive_result.industry_avg_comparison, 'main_competitors': competitive_result.main_competitors, 'competitive_advantages': competitive_result.competitive_advantages, 'competitive_disadvantages': competitive_result.competitive_disadvantages }, 'ranking': { 'industry_rank': competitive_result.industry_rank, 'total_companies': competitive_result.total_companies, 'rank_percentage': round( (competitive_result.industry_rank / competitive_result.total_companies * 100), 2) if competitive_result.industry_rank and competitive_result.total_companies else None }, 'report_period': competitive_result.report_period, 'updated_at': competitive_result.updated_at.strftime( '%Y-%m-%d %H:%M:%S') if competitive_result.updated_at else None } # 澶勭悊涓氬姟缁撴瀯鏁版嵁 for business in business_structure_result: response_data['business_structure'].append({ 'business_name': business.business_name, 'parent_business': business.parent_business, 'business_level': business.business_level, 'revenue': format_decimal(business.revenue), 'revenue_unit': business.revenue_unit, 'profit': format_decimal(business.profit), 'profit_unit': business.profit_unit, 'financial_metrics': { 'revenue': format_decimal(business.revenue), 'revenue_ratio': format_decimal(business.revenue_ratio), 'profit': format_decimal(business.profit), 'profit_ratio': format_decimal(business.profit_ratio), 'gross_margin': format_decimal(business.gross_margin) }, 'growth_metrics': { 'revenue_growth': format_decimal(business.revenue_growth), 'profit_growth': format_decimal(business.profit_growth) }, 'market_metrics': { 'customer_count': business.customer_count, 'market_share': format_decimal(business.market_share) }, 'report_period': business.report_period }) return jsonify({ 'success': True, 'data': response_data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/company/value-chain-analysis/', methods=['GET']) def get_value_chain_analysis(company_code): """鑾峰彇鍏徃浜т笟閾惧垎鏋愭暟鎹?"" try: # 鑾峰彇浜т笟閾捐妭鐐规暟鎹? nodes_query = text(""" SELECT node_name, node_type, node_level, node_description, importance_score, market_share, dependency_degree, created_at FROM company_value_chain_nodes WHERE company_code = :company_code ORDER BY node_level ASC, importance_score DESC """) with engine.connect() as conn: nodes_result = conn.execute(nodes_query, {'company_code': company_code}).fetchall() # 鑾峰彇浜т笟閾炬祦鍚戞暟鎹? flows_query = text(""" SELECT source_node, source_type, source_level, target_node, target_type, target_level, flow_value, flow_ratio, flow_type, relationship_desc, transaction_volume FROM company_value_chain_flows WHERE company_code = :company_code ORDER BY flow_ratio DESC """) with engine.connect() as conn: flows_result = conn.execute(flows_query, {'company_code': company_code}).fetchall() # 鏋勫缓鑺傜偣鏁版嵁缁撴瀯 nodes_by_level = {} all_nodes = [] for node in nodes_result: node_data = { 'node_name': node.node_name, 'node_type': node.node_type, 'node_level': node.node_level, 'node_description': node.node_description, 'importance_score': node.importance_score, 'market_share': format_decimal(node.market_share), 'dependency_degree': format_decimal(node.dependency_degree), 'created_at': node.created_at.strftime('%Y-%m-%d %H:%M:%S') if node.created_at else None } all_nodes.append(node_data) # 鎸夊眰绾у垎缁? level_key = f"level_{node.node_level}" if level_key not in nodes_by_level: nodes_by_level[level_key] = [] nodes_by_level[level_key].append(node_data) # 鏋勫缓娴佸悜鏁版嵁 flows_data = [] for flow in flows_result: flows_data.append({ 'source': { 'node_name': flow.source_node, 'node_type': flow.source_type, 'node_level': flow.source_level }, 'target': { 'node_name': flow.target_node, 'node_type': flow.target_type, 'node_level': flow.target_level }, 'flow_metrics': { 'flow_value': format_decimal(flow.flow_value), 'flow_ratio': format_decimal(flow.flow_ratio), 'flow_type': flow.flow_type }, 'relationship_info': { 'relationship_desc': flow.relationship_desc, 'transaction_volume': flow.transaction_volume } }) # 绉婚櫎寰幆杈癸紝纭繚Sankey鍥炬暟鎹槸DAG锛堟湁鍚戞棤鐜浘锛? flows_data = remove_cycles_from_sankey_flows(flows_data) # 缁熻鍚勫眰绾ц妭鐐规暟閲? level_stats = {} for level_key, nodes in nodes_by_level.items(): level_stats[level_key] = { 'count': len(nodes), 'avg_importance': round(sum(node['importance_score'] or 0 for node in nodes) / len(nodes), 2) if nodes else 0 } response_data = { 'company_code': company_code, 'value_chain_structure': { 'nodes_by_level': nodes_by_level, 'level_statistics': level_stats, 'total_nodes': len(all_nodes) }, 'value_chain_flows': flows_data, 'analysis_summary': { 'total_flows': len(flows_data), 'upstream_nodes': len([n for n in all_nodes if n['node_level'] < 0]), 'company_nodes': len([n for n in all_nodes if n['node_level'] == 0]), 'downstream_nodes': len([n for n in all_nodes if n['node_level'] > 0]) } } return jsonify({ 'success': True, 'data': response_data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/company/value-chain/related-companies', methods=['GET']) def get_related_companies_by_node(): """ 鏍规嵁浜т笟閾捐妭鐐瑰悕绉版煡璇㈢浉鍏冲叕鍙革紙缁撳悎nodes鍜宖lows琛級 鍙傛暟: node_name - 鑺傜偣鍚嶇О锛堝 "涓姱鍥介檯"銆?EDA/IP"绛夛級 杩斿洖: 鍖呭惈璇ヨ妭鐐圭殑鎵€鏈夊叕鍙稿垪琛紝闄勫甫鑺傜偣灞傜骇銆佺被鍨嬨€佸叧绯绘弿杩扮瓑淇℃伅 """ try: node_name = request.args.get('node_name') if not node_name: return jsonify({ 'success': False, 'error': '缂哄皯蹇呴渶鍙傛暟 node_name' }), 400 # 鏌ヨ鍖呭惈璇ヨ妭鐐圭殑鎵€鏈夊叕鍙稿強鍏惰妭鐐逛俊鎭? query = text(""" SELECT DISTINCT n.company_code as stock_code, s.SECNAME as stock_name, s.ORGNAME as company_name, n.node_level, n.node_type, n.node_description, n.importance_score, n.market_share, n.dependency_degree FROM company_value_chain_nodes n LEFT JOIN ea_stocklist s ON n.company_code = s.SECCODE WHERE n.node_name = :node_name ORDER BY n.importance_score DESC, n.company_code """) with engine.connect() as conn: nodes_result = conn.execute(query, {'node_name': node_name}).fetchall() # 鏋勫缓杩斿洖鏁版嵁 companies = [] for row in nodes_result: company_data = { 'stock_code': row.stock_code, 'stock_name': row.stock_name or row.stock_code, 'company_name': row.company_name, 'node_info': { 'node_level': row.node_level, 'node_type': row.node_type, 'node_description': row.node_description, 'importance_score': row.importance_score, 'market_share': format_decimal(row.market_share), 'dependency_degree': format_decimal(row.dependency_degree) }, 'relationships': [] } # 鏌ヨ璇ヨ妭鐐瑰湪璇ュ叕鍙镐骇涓氶摼涓殑娴佸悜鍏崇郴 flows_query = text(""" SELECT source_node, source_type, source_level, target_node, target_type, target_level, flow_type, relationship_desc, flow_value, flow_ratio FROM company_value_chain_flows WHERE company_code = :company_code AND (source_node = :node_name OR target_node = :node_name) ORDER BY flow_ratio DESC LIMIT 5 """) with engine.connect() as conn: flows_result = conn.execute(flows_query, { 'company_code': row.stock_code, 'node_name': node_name }).fetchall() # 娣诲姞娴佸悜鍏崇郴淇℃伅 for flow in flows_result: # 鍒ゆ柇鑺傜偣鍦ㄦ祦鍚戜腑鐨勮鑹? is_source = (flow.source_node == node_name) relationship = { 'role': 'source' if is_source else 'target', 'connected_node': flow.target_node if is_source else flow.source_node, 'connected_type': flow.target_type if is_source else flow.source_type, 'connected_level': flow.target_level if is_source else flow.source_level, 'flow_type': flow.flow_type, 'relationship_desc': flow.relationship_desc, 'flow_ratio': format_decimal(flow.flow_ratio) } company_data['relationships'].append(relationship) companies.append(company_data) return jsonify({ 'success': True, 'data': companies, 'total': len(companies), 'node_name': node_name }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/company/key-factors-timeline/', methods=['GET']) def get_key_factors_timeline(company_code): """鑾峰彇鍏徃鍏抽敭鍥犵礌鍜屾椂闂寸嚎鏁版嵁""" try: # 鑾峰彇璇锋眰鍙傛暟 report_period = request.args.get('report_period') # 鍙€夌殑鎶ュ憡鏈熺瓫閫? event_limit = request.args.get('event_limit', 50, type=int) # 鏃堕棿绾夸簨浠舵暟閲忛檺鍒? # 鑾峰彇鍏抽敭鍥犵礌绫诲埆 categories_query = text(""" SELECT id, category_name, category_desc, display_order FROM company_key_factor_categories WHERE company_code = :company_code ORDER BY display_order ASC, created_at ASC """) with engine.connect() as conn: categories_result = conn.execute(categories_query, {'company_code': company_code}).fetchall() # 鑾峰彇鍏抽敭鍥犵礌璇︽儏 factors_query = text(""" SELECT kf.category_id, kf.factor_name, kf.factor_type, kf.factor_value, kf.factor_unit, kf.factor_desc, kf.impact_direction, kf.impact_weight, kf.report_period, kf.year_on_year, kf.data_source, kf.created_at, kf.updated_at FROM company_key_factors kf WHERE kf.company_code = :company_code """) params = {'company_code': company_code} # 濡傛灉鎸囧畾浜嗘姤鍛婃湡锛屾坊鍔犵瓫閫夋潯浠? if report_period: factors_query = text(""" SELECT kf.category_id, kf.factor_name, kf.factor_type, kf.factor_value, kf.factor_unit, kf.factor_desc, kf.impact_direction, kf.impact_weight, kf.report_period, kf.year_on_year, kf.data_source, kf.created_at, kf.updated_at FROM company_key_factors kf WHERE kf.company_code = :company_code AND kf.report_period = :report_period ORDER BY kf.impact_weight DESC, kf.updated_at DESC """) params['report_period'] = report_period else: factors_query = text(""" SELECT kf.category_id, kf.factor_name, kf.factor_type, kf.factor_value, kf.factor_unit, kf.factor_desc, kf.impact_direction, kf.impact_weight, kf.report_period, kf.year_on_year, kf.data_source, kf.created_at, kf.updated_at FROM company_key_factors kf WHERE kf.company_code = :company_code ORDER BY kf.report_period DESC, kf.impact_weight DESC, kf.updated_at DESC """) with engine.connect() as conn: factors_result = conn.execute(factors_query, params).fetchall() # 鑾峰彇鍙戝睍鏃堕棿绾夸簨浠? timeline_query = text(""" SELECT event_date, event_type, event_title, event_desc, impact_score, is_positive, related_products, related_partners, financial_impact, created_at FROM company_timeline_events WHERE company_code = :company_code ORDER BY event_date DESC LIMIT :limit """) with engine.connect() as conn: timeline_result = conn.execute(timeline_query, {'company_code': company_code, 'limit': event_limit}).fetchall() # 鏋勫缓鍏抽敭鍥犵礌鏁版嵁缁撴瀯 key_factors_data = {} factors_by_category = {} # 鍏堝缓绔嬬被鍒储寮? categories_map = {} for category in categories_result: categories_map[category.id] = { 'category_name': category.category_name, 'category_desc': category.category_desc, 'display_order': category.display_order, 'factors': [] } # 灏嗗洜绱犲垎缁勫埌绫诲埆涓? for factor in factors_result: factor_data = { 'factor_name': factor.factor_name, 'factor_type': factor.factor_type, 'factor_value': factor.factor_value, 'factor_unit': factor.factor_unit, 'factor_desc': factor.factor_desc, 'impact_direction': factor.impact_direction, 'impact_weight': factor.impact_weight, 'report_period': factor.report_period, 'year_on_year': format_decimal(factor.year_on_year), 'data_source': factor.data_source, 'updated_at': factor.updated_at.strftime('%Y-%m-%d %H:%M:%S') if factor.updated_at else None } category_id = factor.category_id if category_id and category_id in categories_map: categories_map[category_id]['factors'].append(factor_data) # 鏋勫缓鏃堕棿绾挎暟鎹? timeline_data = [] for event in timeline_result: timeline_data.append({ 'event_date': event.event_date.strftime('%Y-%m-%d') if event.event_date else None, 'event_type': event.event_type, 'event_title': event.event_title, 'event_desc': event.event_desc, 'impact_metrics': { 'impact_score': event.impact_score, 'is_positive': event.is_positive }, 'related_info': { 'related_products': event.related_products, 'related_partners': event.related_partners, 'financial_impact': event.financial_impact }, 'created_at': event.created_at.strftime('%Y-%m-%d %H:%M:%S') if event.created_at else None }) # 缁熻淇℃伅 total_factors = len(factors_result) positive_events = len([e for e in timeline_result if e.is_positive]) negative_events = len(timeline_result) - positive_events response_data = { 'company_code': company_code, 'key_factors': { 'categories': list(categories_map.values()), 'total_factors': total_factors, 'report_period': report_period }, 'development_timeline': { 'events': timeline_data, 'statistics': { 'total_events': len(timeline_data), 'positive_events': positive_events, 'negative_events': negative_events, 'event_types': list(set(event.event_type for event in timeline_result if event.event_type)) } } } return jsonify({ 'success': True, 'data': response_data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 # ============================================ # 妯℃嫙鐩樻湇鍔″嚱鏁? # ============================================ def get_or_create_simulation_account(user_id): """鑾峰彇鎴栧垱寤烘ā鎷熻处鎴?"" account = SimulationAccount.query.filter_by(user_id=user_id).first() if not account: account = SimulationAccount( user_id=user_id, account_name=f'妯℃嫙璐︽埛_{user_id}', initial_capital=1000000.00, available_cash=1000000.00 ) db.session.add(account) db.session.commit() return account def is_trading_time(): """鍒ゆ柇鏄惁涓轰氦鏄撴椂闂?"" now = beijing_now() # 妫€鏌ユ槸鍚︿负宸ヤ綔鏃? if now.weekday() >= 5: # 鍛ㄥ叚鏃? return False # 妫€鏌ユ槸鍚︿负浜ゆ槗鏃堕棿 current_time = now.time() morning_start = dt_time(9, 30) morning_end = dt_time(11, 30) afternoon_start = dt_time(13, 0) afternoon_end = dt_time(15, 0) if (morning_start <= current_time <= morning_end) or \ (afternoon_start <= current_time <= afternoon_end): return True return False def get_latest_price_from_clickhouse(stock_code): """浠嶤lickHouse鑾峰彇鏈€鏂颁环鏍硷紙浼樺厛鍒嗛挓鏁版嵁锛屽閫夋棩绾挎暟鎹級""" try: client = get_clickhouse_client() # 纭繚stock_code鍖呭惈鍚庣紑 if '.' not in stock_code: if stock_code.startswith('6'): stock_code = f"{stock_code}.SH" # 涓婃捣 elif stock_code.startswith(('8', '9', '4')): stock_code = f"{stock_code}.BJ" # 鍖椾氦鎵€ else: stock_code = f"{stock_code}.SZ" # 娣卞湷 # 1. 棣栧厛灏濊瘯鑾峰彇鏈€鏂扮殑鍒嗛挓鏁版嵁锛堣繎30澶╋級 minute_query = """ SELECT close, timestamp FROM stock_minute WHERE code = %(code)s AND timestamp >= today() - 30 ORDER BY timestamp DESC LIMIT 1 \ """ result = client.execute(minute_query, {'code': stock_code}) if result: return float(result[0][0]), result[0][1] # 2. 濡傛灉娌℃湁鍒嗛挓鏁版嵁锛岃幏鍙栨渶鏂扮殑鏃ョ嚎鏀剁洏浠? daily_query = """ SELECT close, date FROM stock_daily WHERE code = %(code)s AND date >= today() - 90 ORDER BY date DESC LIMIT 1 \ """ daily_result = client.execute(daily_query, {'code': stock_code}) if daily_result: return float(daily_result[0][0]), daily_result[0][1] # 3. 濡傛灉杩樻槸娌℃湁锛屽皾璇曚粠鍏朵粬琛ㄨ幏鍙栵紙濡傛灉鏈夌殑璇濓級 fallback_query = """ SELECT close_price, trade_date FROM stock_minute_kline WHERE stock_code = %(code6)s AND trade_date >= today() - 30 ORDER BY trade_date DESC, trade_time DESC LIMIT 1 \ """ # 鎻愬彇6浣嶄唬鐮? code6 = stock_code.split('.')[0] fallback_result = client.execute(fallback_query, {'code6': code6}) if fallback_result: return float(fallback_result[0][0]), fallback_result[0][1] print(f"璀﹀憡: 鏃犳硶鑾峰彇鑲$エ {stock_code} 鐨勪环鏍兼暟鎹?) return None, None except Exception as e: print(f"鑾峰彇鏈€鏂颁环鏍煎け璐?{stock_code}: {e}") return None, None def get_next_minute_price(stock_code, order_time): """鑾峰彇涓嬪崟鍚庝竴鍒嗛挓鍐呯殑鏀剁洏浠蜂綔涓烘垚浜や环""" try: client = get_clickhouse_client() # 纭繚stock_code鍖呭惈鍚庣紑 if '.' not in stock_code: if stock_code.startswith('6'): stock_code = f"{stock_code}.SH" # 涓婃捣 elif stock_code.startswith(('8', '9', '4')): stock_code = f"{stock_code}.BJ" # 鍖椾氦鎵€ else: stock_code = f"{stock_code}.SZ" # 娣卞湷 # 鑾峰彇涓嬪崟鍚庝竴鍒嗛挓鍐呯殑鏁版嵁 query = """ SELECT close, timestamp FROM stock_minute WHERE code = %(code)s AND timestamp \ > %(order_time)s AND timestamp <= %(end_time)s ORDER BY timestamp ASC LIMIT 1 \ """ end_time = order_time + timedelta(minutes=1) result = client.execute(query, { 'code': stock_code, 'order_time': order_time, 'end_time': end_time }) if result: return float(result[0][0]), result[0][1] # 濡傛灉涓€鍒嗛挓鍐呮病鏈夋暟鎹紝鑾峰彇鏈€杩戠殑鏁版嵁 query = """ SELECT close, timestamp FROM stock_minute WHERE code = %(code)s AND timestamp \ > %(order_time)s ORDER BY timestamp ASC LIMIT 1 \ """ result = client.execute(query, { 'code': stock_code, 'order_time': order_time }) if result: return float(result[0][0]), result[0][1] # 濡傛灉娌℃湁鍚庣画鍒嗛挓鏁版嵁锛屼娇鐢ㄦ渶鏂板彲鐢ㄤ环鏍? print(f"娌℃湁鎵惧埌涓嬪崟鍚庣殑鍒嗛挓鏁版嵁锛屼娇鐢ㄦ渶鏂颁环鏍? {stock_code}") return get_latest_price_from_clickhouse(stock_code) except Exception as e: print(f"鑾峰彇鎴愪氦浠锋牸澶辫触: {e}") # 鍑洪敊鏃朵篃灏濊瘯鑾峰彇鏈€鏂颁环鏍? return get_latest_price_from_clickhouse(stock_code) def validate_and_get_stock_info(stock_input): """楠岃瘉鑲$エ杈撳叆骞惰幏鍙栨爣鍑嗕唬鐮佸拰鍚嶇О 鏀寔杈撳叆鏍煎紡锛? - 鑲$エ浠g爜锛?00519 鎴?600519.SH - 鑲$エ鍚嶇О锛氳吹宸炶寘鍙? - 鎷奸煶棣栧瓧姣嶏細gzmt - 鍚嶇О(浠g爜)锛氳吹宸炶寘鍙?600519) 杩斿洖: (stock_code_with_suffix, stock_code_6digit, stock_name) 鎴?(None, None, None) """ # 鍏堝皾璇曟爣鍑嗗寲杈撳叆 code6, name_from_input = _normalize_stock_input(stock_input) if code6: # 濡傛灉鑳借В鏋愬嚭6浣嶄唬鐮侊紝鏌ヨ鑲$エ鍚嶇О stock_name = name_from_input or _query_stock_name_by_code(code6) if code6.startswith('6'): stock_code_full = f"{code6}.SH" # 涓婃捣 elif code6.startswith(('8', '9', '4')): stock_code_full = f"{code6}.BJ" # 鍖椾氦鎵€ else: stock_code_full = f"{code6}.SZ" # 娣卞湷 return stock_code_full, code6, stock_name # 濡傛灉涓嶆槸鏍囧噯浠g爜鏍煎紡锛屽皾璇曟悳绱? with engine.connect() as conn: search_sql = text(""" SELECT DISTINCT SECCODE as stock_code, SECNAME as stock_name FROM ea_stocklist WHERE ( UPPER(SECCODE) = UPPER(:exact_match) OR UPPER(SECNAME) = UPPER(:exact_match) OR UPPER(F001V) = UPPER(:exact_match) ) AND F011V = '姝e父涓婂競' AND F003V IN ('A鑲?, 'B鑲?) LIMIT 1 """) result = conn.execute(search_sql, { 'exact_match': stock_input.upper() }).fetchone() if result: code6 = result.stock_code stock_name = result.stock_name if code6.startswith('6'): stock_code_full = f"{code6}.SH" # 涓婃捣 elif code6.startswith(('8', '9', '4')): stock_code_full = f"{code6}.BJ" # 鍖椾氦鎵€ else: stock_code_full = f"{code6}.SZ" # 娣卞湷 return stock_code_full, code6, stock_name return None, None, None def execute_simulation_order(order): """鎵ц妯℃嫙璁㈠崟锛堜紭鍖栫増锛?"" try: # 鏍囧噯鍖栬偂绁ㄤ唬鐮? stock_code_full, code6, stock_name = validate_and_get_stock_info(order.stock_code) if not stock_code_full: order.status = 'REJECTED' order.reject_reason = '鏃犳晥鐨勮偂绁ㄤ唬鐮? db.session.commit() return False # 鏇存柊璁㈠崟鐨勮偂绁ㄤ俊鎭? order.stock_code = stock_code_full order.stock_name = stock_name # 鑾峰彇鎴愪氦浠锋牸锛堜笅鍗曞悗涓€鍒嗛挓鐨勬敹鐩樹环锛? filled_price, filled_time = get_next_minute_price(stock_code_full, order.order_time) if not filled_price: # 濡傛灉鏃犳硶鑾峰彇浠锋牸锛岃鍗曚繚鎸丳ENDING鐘舵€侊紝绛夊緟鍚庡彴澶勭悊 order.status = 'PENDING' db.session.commit() return True # 杩斿洖True琛ㄧず涓嬪崟鎴愬姛锛屼絾鏈垚浜? # 鏇存柊璁㈠崟淇℃伅 order.filled_qty = order.order_qty order.filled_price = filled_price order.filled_amount = filled_price * order.order_qty order.filled_time = filled_time or beijing_now() # 璁$畻璐圭敤 order.calculate_fees() # 鑾峰彇璐︽埛 account = SimulationAccount.query.get(order.account_id) if order.order_type == 'BUY': # 涔板叆鎿嶄綔 total_cost = float(order.filled_amount) + float(order.total_fee) # 妫€鏌ヨ祫閲戞槸鍚﹀厖瓒? if float(account.available_cash) < total_cost: order.status = 'REJECTED' order.reject_reason = '鍙敤璧勯噾涓嶈冻' db.session.commit() return False # 鎵i櫎璧勯噾 account.available_cash -= Decimal(str(total_cost)) # 鏇存柊鎴栧垱寤烘寔浠? position = SimulationPosition.query.filter_by( account_id=account.id, stock_code=order.stock_code ).first() if position: # 鏇存柊鎸佷粨 total_cost_before = float(position.avg_cost) * position.position_qty total_cost_after = total_cost_before + float(order.filled_amount) total_qty_after = position.position_qty + order.filled_qty position.avg_cost = Decimal(str(total_cost_after / total_qty_after)) position.position_qty = total_qty_after # 浠婃棩涔板叆锛孴+1鎵嶅彲鐢? position.frozen_qty += order.filled_qty else: # 鍒涘缓鏂版寔浠? position = SimulationPosition( account_id=account.id, stock_code=order.stock_code, stock_name=order.stock_name, position_qty=order.filled_qty, available_qty=0, # T+1 frozen_qty=order.filled_qty, # 浠婃棩涔板叆鍐荤粨 avg_cost=order.filled_price, current_price=order.filled_price ) db.session.add(position) # 鏇存柊鎸佷粨甯傚€? position.update_market_value(order.filled_price) else: # SELL # 鍗栧嚭鎿嶄綔 print(f"馃攳 璋冭瘯锛氭煡鎵炬寔浠擄紝璐︽埛ID: {account.id}, 鑲$エ浠g爜: {order.stock_code}") # 鍏堝皾璇曠敤瀹屾暣鏍煎紡鏌ユ壘 position = SimulationPosition.query.filter_by( account_id=account.id, stock_code=order.stock_code ).first() # 濡傛灉娌℃壘鍒帮紝灏濊瘯鐢?浣嶆暟瀛楁牸寮忔煡鎵? if not position and '.' in order.stock_code: code6 = order.stock_code.split('.')[0] print(f"馃攳 璋冭瘯锛氬皾璇曠敤6浣嶆牸寮忔煡鎵? {code6}") position = SimulationPosition.query.filter_by( account_id=account.id, stock_code=code6 ).first() print(f"馃攳 璋冭瘯锛氭壘鍒版寔浠? {position}") if position: print( f"馃攳 璋冭瘯锛氭寔浠撹鎯?- 鑲$エ浠g爜: {position.stock_code}, 鎸佷粨鏁伴噺: {position.position_qty}, 鍙敤鏁伴噺: {position.available_qty}") # 妫€鏌ユ寔浠撴槸鍚﹀瓨鍦? if not position: order.status = 'REJECTED' order.reject_reason = '鎸佷粨涓嶅瓨鍦? db.session.commit() return False # 妫€鏌ユ€绘寔浠撴暟閲忔槸鍚﹁冻澶燂紙鍖呮嫭鍐荤粨鐨勶級 total_holdings = position.position_qty if total_holdings < order.order_qty: order.status = 'REJECTED' order.reject_reason = f'鎸佷粨鏁伴噺涓嶈冻锛屽綋鍓嶆寔浠? {total_holdings} 鑲★紝闇€瑕? {order.order_qty} 鑲? db.session.commit() return False # 濡傛灉鍙敤鏁伴噺涓嶈冻锛屼絾鎬绘寔浠撹冻澶燂紝鍒欎粠鍐荤粨鏁伴噺涓В鍐? if position.available_qty < order.order_qty: # 璁$畻闇€瑕佽В鍐荤殑鏁伴噺 need_to_unfreeze = order.order_qty - position.available_qty if position.frozen_qty >= need_to_unfreeze: # 瑙e喕閮ㄥ垎鍐荤粨鏁伴噺 position.frozen_qty -= need_to_unfreeze position.available_qty += need_to_unfreeze print(f"瑙e喕 {need_to_unfreeze} 鑲$敤浜庡崠鍑?) else: order.status = 'REJECTED' order.reject_reason = f'鍙敤鏁伴噺涓嶈冻锛屽彲鐢? {position.available_qty} 鑲★紝鍐荤粨: {position.frozen_qty} 鑲★紝闇€瑕? {order.order_qty} 鑲? db.session.commit() return False # 鏇存柊鎸佷粨 position.position_qty -= order.filled_qty position.available_qty -= order.filled_qty # 澧炲姞璧勯噾 account.available_cash += Decimal(str(float(order.filled_amount) - float(order.total_fee))) # 濡傛灉鍏ㄩ儴鍗栧嚭锛屽垹闄ゆ寔浠撹褰? if position.position_qty == 0: db.session.delete(position) # 鍒涘缓鎴愪氦璁板綍 transaction = SimulationTransaction( account_id=account.id, order_id=order.id, transaction_no=f"T{int(beijing_now().timestamp() * 1000000)}", stock_code=order.stock_code, stock_name=order.stock_name, transaction_type=order.order_type, transaction_price=order.filled_price, transaction_qty=order.filled_qty, transaction_amount=order.filled_amount, commission=order.commission, stamp_tax=order.stamp_tax, transfer_fee=order.transfer_fee, total_fee=order.total_fee, transaction_time=order.filled_time, settlement_date=(order.filled_time + timedelta(days=1)).date() ) db.session.add(transaction) # 鏇存柊璁㈠崟鐘舵€? order.status = 'FILLED' # 鏇存柊璐︽埛鎬昏祫浜? update_account_assets(account) db.session.commit() return True except Exception as e: print(f"鎵ц璁㈠崟澶辫触: {e}") db.session.rollback() return False def update_account_assets(account): """鏇存柊璐︽埛璧勪骇锛堣交閲忕骇鐗堟湰锛屼笉瀹炴椂鑾峰彇浠锋牸锛?"" try: # 鍙绠楀凡鏈夌殑鎸佷粨甯傚€硷紝涓嶅疄鏃惰幏鍙栦环鏍? # 浠锋牸鏇存柊鐢卞悗鍙拌剼鏈礋璐? positions = SimulationPosition.query.filter_by(account_id=account.id).all() total_market_value = sum(position.market_value or Decimal('0') for position in positions) account.position_value = total_market_value account.calculate_total_assets() db.session.commit() except Exception as e: print(f"鏇存柊璐︽埛璧勪骇澶辫触: {e}") db.session.rollback() def update_all_positions_price(): """鏇存柊鎵€鏈夋寔浠撶殑鏈€鏂颁环鏍硷紙瀹氭椂浠诲姟璋冪敤锛?"" try: positions = SimulationPosition.query.all() for position in positions: latest_price, _ = get_latest_price_from_clickhouse(position.stock_code) if latest_price: # 璁板綍鏄ㄦ棩鏀剁洏浠凤紙鐢ㄤ簬璁$畻浠婃棩鐩堜簭锛? yesterday_close = position.current_price # 鏇存柊甯傚€? position.update_market_value(latest_price) # 璁$畻浠婃棩鐩堜簭 position.today_profit = (Decimal(str(latest_price)) - yesterday_close) * position.position_qty position.today_profit_rate = ((Decimal( str(latest_price)) - yesterday_close) / yesterday_close * 100) if yesterday_close > 0 else 0 db.session.commit() except Exception as e: print(f"鏇存柊鎸佷粨浠锋牸澶辫触: {e}") db.session.rollback() def process_t1_settlement(): """澶勭悊T+1缁撶畻锛堟瘡鏃ユ敹鐩樺悗杩愯锛?"" try: # 鑾峰彇鎵€鏈夐渶瑕佺粨绠楃殑鎸佷粨 positions = SimulationPosition.query.filter(SimulationPosition.frozen_qty > 0).all() for position in positions: # 灏嗗喕缁撴暟閲忚浆涓哄彲鐢ㄦ暟閲? position.available_qty += position.frozen_qty position.frozen_qty = 0 db.session.commit() except Exception as e: print(f"T+1缁撶畻澶辫触: {e}") db.session.rollback() # ============================================ # 妯℃嫙鐩楢PI鎺ュ彛 # ============================================ @app.route('/api/simulation/account', methods=['GET']) @login_required def get_simulation_account(): """鑾峰彇妯℃嫙璐︽埛淇℃伅""" try: account = get_or_create_simulation_account(current_user.id) # 鏇存柊璐︽埛璧勪骇 update_account_assets(account) return jsonify({ 'success': True, 'data': { 'account_id': account.id, 'account_name': account.account_name, 'initial_capital': float(account.initial_capital), 'available_cash': float(account.available_cash), 'frozen_cash': float(account.frozen_cash), 'position_value': float(account.position_value), 'total_assets': float(account.total_assets), 'total_profit': float(account.total_profit), 'total_profit_rate': float(account.total_profit_rate), 'daily_profit': float(account.daily_profit), 'daily_profit_rate': float(account.daily_profit_rate), 'created_at': account.created_at.isoformat(), 'updated_at': account.updated_at.isoformat() } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/positions', methods=['GET']) @login_required def get_simulation_positions(): """鑾峰彇妯℃嫙鎸佷粨鍒楄〃锛堜紭鍖栫増鏈紝浣跨敤缂撳瓨鐨勪环鏍兼暟鎹級""" try: account = get_or_create_simulation_account(current_user.id) # 鐩存帴鑾峰彇鎸佷粨鏁版嵁锛屼笉瀹炴椂鏇存柊浠锋牸锛堢敱鍚庡彴鑴氭湰璐熻矗锛? positions = SimulationPosition.query.filter_by(account_id=account.id).all() positions_data = [] for position in positions: positions_data.append({ 'id': position.id, 'stock_code': position.stock_code, 'stock_name': position.stock_name, 'position_qty': position.position_qty, 'available_qty': position.available_qty, 'frozen_qty': position.frozen_qty, 'avg_cost': float(position.avg_cost), 'current_price': float(position.current_price or 0), 'market_value': float(position.market_value or 0), 'profit': float(position.profit or 0), 'profit_rate': float(position.profit_rate or 0), 'today_profit': float(position.today_profit or 0), 'today_profit_rate': float(position.today_profit_rate or 0), 'updated_at': position.updated_at.isoformat() }) return jsonify({ 'success': True, 'data': positions_data }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/orders', methods=['GET']) @login_required def get_simulation_orders(): """鑾峰彇妯℃嫙璁㈠崟鍒楄〃""" try: account = get_or_create_simulation_account(current_user.id) # 鑾峰彇鏌ヨ鍙傛暟 status = request.args.get('status') # 璁㈠崟鐘舵€佺瓫閫? date_str = request.args.get('date') # 鏃ユ湡绛涢€? limit = request.args.get('limit', 50, type=int) query = SimulationOrder.query.filter_by(account_id=account.id) if status: query = query.filter_by(status=status) if date_str: try: date = datetime.strptime(date_str, '%Y-%m-%d').date() start_time = datetime.combine(date, dt_time(0, 0, 0)) end_time = datetime.combine(date, dt_time(23, 59, 59)) query = query.filter(SimulationOrder.order_time.between(start_time, end_time)) except ValueError: pass orders = query.order_by(SimulationOrder.order_time.desc()).limit(limit).all() orders_data = [] for order in orders: orders_data.append({ 'id': order.id, 'order_no': order.order_no, 'stock_code': order.stock_code, 'stock_name': order.stock_name, 'order_type': order.order_type, 'price_type': order.price_type, 'order_price': float(order.order_price) if order.order_price else None, 'order_qty': order.order_qty, 'filled_qty': order.filled_qty, 'filled_price': float(order.filled_price) if order.filled_price else None, 'filled_amount': float(order.filled_amount) if order.filled_amount else None, 'commission': float(order.commission), 'stamp_tax': float(order.stamp_tax), 'transfer_fee': float(order.transfer_fee), 'total_fee': float(order.total_fee), 'status': order.status, 'reject_reason': order.reject_reason, 'order_time': order.order_time.isoformat(), 'filled_time': order.filled_time.isoformat() if order.filled_time else None }) return jsonify({ 'success': True, 'data': orders_data }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/place-order', methods=['POST']) @login_required def place_simulation_order(): """涓嬪崟""" try: # 绉婚櫎浜ゆ槗鏃堕棿妫€鏌ワ紝鍏佽7x24灏忔椂涓嬪崟 # 闈炰氦鏄撴椂闂翠笅鐨勫崟瀛愪細淇濇寔PENDING鐘舵€侊紝绛夊緟琛屾儏鏁版嵁 data = request.get_json() stock_code = data.get('stock_code') order_type = data.get('order_type') # BUY/SELL order_qty = data.get('order_qty') price_type = data.get('price_type', 'MARKET') # 鐩墠鍙敮鎸佸競浠峰崟 # 鏍囧噯鍖栬偂绁ㄤ唬鐮佹牸寮? if stock_code and '.' not in stock_code: # 濡傛灉娌℃湁鍚庣紑锛屾牴鎹偂绁ㄤ唬鐮佹坊鍔犲悗缂€ if stock_code.startswith('6'): stock_code = f"{stock_code}.SH" elif stock_code.startswith('0') or stock_code.startswith('3'): stock_code = f"{stock_code}.SZ" # 鍙傛暟楠岃瘉 if not all([stock_code, order_type, order_qty]): return jsonify({'success': False, 'error': '缂哄皯蹇呰鍙傛暟'}), 400 if order_type not in ['BUY', 'SELL']: return jsonify({'success': False, 'error': '璁㈠崟绫诲瀷閿欒'}), 400 order_qty = int(order_qty) if order_qty <= 0 or order_qty % 100 != 0: return jsonify({'success': False, 'error': '涓嬪崟鏁伴噺蹇呴』涓?00鐨勬暣鏁板€?}), 400 # 鑾峰彇璐︽埛 account = get_or_create_simulation_account(current_user.id) # 鑾峰彇鑲$エ淇℃伅 stock_name = None with engine.connect() as conn: result = conn.execute(text( "SELECT SECNAME FROM ea_stocklist WHERE SECCODE = :code" ), {"code": stock_code.split('.')[0]}).fetchone() if result: stock_name = result[0] # 鍒涘缓璁㈠崟 order = SimulationOrder( account_id=account.id, order_no=f"O{int(beijing_now().timestamp() * 1000000)}", stock_code=stock_code, stock_name=stock_name, order_type=order_type, price_type=price_type, order_qty=order_qty, status='PENDING' ) db.session.add(order) db.session.commit() # 鎵ц璁㈠崟 print(f"馃攳 璋冭瘯锛氬紑濮嬫墽琛岃鍗曪紝鑲$エ浠g爜: {order.stock_code}, 璁㈠崟绫诲瀷: {order.order_type}") success = execute_simulation_order(order) print(f"馃攳 璋冭瘯锛氳鍗曟墽琛岀粨鏋? {success}, 璁㈠崟鐘舵€? {order.status}") if success: # 閲嶆柊鏌ヨ璁㈠崟鐘舵€侊紝鍥犱负鍙兘鍦╡xecute_simulation_order涓淇敼 db.session.refresh(order) if order.status == 'FILLED': return jsonify({ 'success': True, 'message': '璁㈠崟鎵ц鎴愬姛锛屽凡鎴愪氦', 'data': { 'order_no': order.order_no, 'status': 'FILLED', 'filled_price': float(order.filled_price) if order.filled_price else None, 'filled_qty': order.filled_qty, 'filled_amount': float(order.filled_amount) if order.filled_amount else None, 'total_fee': float(order.total_fee) } }) elif order.status == 'PENDING': return jsonify({ 'success': True, 'message': '璁㈠崟鎻愪氦鎴愬姛锛岀瓑寰呰鎯呮暟鎹垚浜?, 'data': { 'order_no': order.order_no, 'status': 'PENDING', 'order_qty': order.order_qty, 'order_price': float(order.order_price) if order.order_price else None } }) else: return jsonify({ 'success': False, 'error': order.reject_reason or '璁㈠崟鐘舵€佸紓甯? }), 400 else: return jsonify({ 'success': False, 'error': order.reject_reason or '璁㈠崟鎵ц澶辫触' }), 400 except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/cancel-order/', methods=['POST']) @login_required def cancel_simulation_order(order_id): """鎾ら攢璁㈠崟""" try: account = get_or_create_simulation_account(current_user.id) order = SimulationOrder.query.filter_by( id=order_id, account_id=account.id, status='PENDING' ).first() if not order: return jsonify({'success': False, 'error': '璁㈠崟涓嶅瓨鍦ㄦ垨鏃犳硶鎾ら攢'}), 404 order.status = 'CANCELLED' order.cancel_time = beijing_now() db.session.commit() return jsonify({ 'success': True, 'message': '璁㈠崟宸叉挙閿€' }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/transactions', methods=['GET']) @login_required def get_simulation_transactions(): """鑾峰彇鎴愪氦璁板綍""" try: account = get_or_create_simulation_account(current_user.id) # 鑾峰彇鏌ヨ鍙傛暟 date_str = request.args.get('date') limit = request.args.get('limit', 100, type=int) query = SimulationTransaction.query.filter_by(account_id=account.id) if date_str: try: date = datetime.strptime(date_str, '%Y-%m-%d').date() start_time = datetime.combine(date, dt_time(0, 0, 0)) end_time = datetime.combine(date, dt_time(23, 59, 59)) query = query.filter(SimulationTransaction.transaction_time.between(start_time, end_time)) except ValueError: pass transactions = query.order_by(SimulationTransaction.transaction_time.desc()).limit(limit).all() transactions_data = [] for trans in transactions: transactions_data.append({ 'id': trans.id, 'transaction_no': trans.transaction_no, 'stock_code': trans.stock_code, 'stock_name': trans.stock_name, 'transaction_type': trans.transaction_type, 'transaction_price': float(trans.transaction_price), 'transaction_qty': trans.transaction_qty, 'transaction_amount': float(trans.transaction_amount), 'commission': float(trans.commission), 'stamp_tax': float(trans.stamp_tax), 'transfer_fee': float(trans.transfer_fee), 'total_fee': float(trans.total_fee), 'transaction_time': trans.transaction_time.isoformat(), 'settlement_date': trans.settlement_date.isoformat() if trans.settlement_date else None }) return jsonify({ 'success': True, 'data': transactions_data }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 def get_simulation_statistics(): """鑾峰彇妯℃嫙浜ゆ槗缁熻""" try: account = get_or_create_simulation_account(current_user.id) # 鑾峰彇缁熻鏃堕棿鑼冨洿 days = request.args.get('days', 30, type=int) end_date = beijing_now().date() start_date = end_date - timedelta(days=days) # 鏌ヨ鏃ョ粺璁℃暟鎹? daily_stats = SimulationDailyStats.query.filter( SimulationDailyStats.account_id == account.id, SimulationDailyStats.stat_date >= start_date, SimulationDailyStats.stat_date <= end_date ).order_by(SimulationDailyStats.stat_date).all() # 鏌ヨ鎬讳綋缁熻 total_transactions = SimulationTransaction.query.filter_by(account_id=account.id).count() win_transactions = SimulationTransaction.query.filter( SimulationTransaction.account_id == account.id, SimulationTransaction.transaction_type == 'SELL' ).all() win_count = 0 total_profit = Decimal('0') for trans in win_transactions: # 鏌ユ壘瀵瑰簲鐨勪拱鍏ヨ褰曡绠楃泩浜? position = SimulationPosition.query.filter_by( account_id=account.id, stock_code=trans.stock_code ).first() if position and trans.transaction_price > position.avg_cost: win_count += 1 profit = (trans.transaction_price - position.avg_cost) * trans.transaction_qty if position else 0 total_profit += profit # 鏋勫缓鏃ユ敹鐩婃洸绾? daily_returns = [] for stat in daily_stats: daily_returns.append({ 'date': stat.stat_date.isoformat(), 'daily_profit': float(stat.daily_profit), 'daily_profit_rate': float(stat.daily_profit_rate), 'total_profit': float(stat.total_profit), 'total_profit_rate': float(stat.total_profit_rate), 'closing_assets': float(stat.closing_assets) }) return jsonify({ 'success': True, 'data': { 'summary': { 'total_transactions': total_transactions, 'win_count': win_count, 'win_rate': (win_count / len(win_transactions) * 100) if win_transactions else 0, 'total_profit': float(total_profit), 'average_profit_per_trade': float(total_profit / len(win_transactions)) if win_transactions else 0 }, 'daily_returns': daily_returns } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/simulation/t1-settlement', methods=['POST']) @login_required def trigger_t1_settlement(): """鎵嬪姩瑙﹀彂T+1缁撶畻""" try: # 瀵煎叆鍚庡彴澶勭悊鍣ㄧ殑鍑芥暟 from simulation_background_processor import process_t1_settlement # 鎵цT+1缁撶畻 process_t1_settlement() return jsonify({ 'success': True, 'message': 'T+1缁撶畻鎵ц鎴愬姛' }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/simulation/debug-positions', methods=['GET']) @login_required def debug_positions(): """璋冭瘯鎺ュ彛锛氭煡鐪嬫寔浠撴暟鎹?"" try: account = get_or_create_simulation_account(current_user.id) positions = SimulationPosition.query.filter_by(account_id=account.id).all() positions_data = [] for position in positions: positions_data.append({ 'stock_code': position.stock_code, 'stock_name': position.stock_name, 'position_qty': position.position_qty, 'available_qty': position.available_qty, 'frozen_qty': position.frozen_qty, 'avg_cost': float(position.avg_cost), 'current_price': float(position.current_price or 0) }) return jsonify({ 'success': True, 'data': positions_data }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/simulation/debug-transactions', methods=['GET']) @login_required def debug_transactions(): """璋冭瘯鎺ュ彛锛氭煡鐪嬫垚浜よ褰曟暟鎹?"" try: account = get_or_create_simulation_account(current_user.id) transactions = SimulationTransaction.query.filter_by(account_id=account.id).all() transactions_data = [] for trans in transactions: transactions_data.append({ 'id': trans.id, 'transaction_no': trans.transaction_no, 'stock_code': trans.stock_code, 'stock_name': trans.stock_name, 'transaction_type': trans.transaction_type, 'transaction_price': float(trans.transaction_price), 'transaction_qty': trans.transaction_qty, 'transaction_amount': float(trans.transaction_amount), 'commission': float(trans.commission), 'stamp_tax': float(trans.stamp_tax), 'transfer_fee': float(trans.transfer_fee), 'total_fee': float(trans.total_fee), 'transaction_time': trans.transaction_time.isoformat(), 'settlement_date': trans.settlement_date.isoformat() if trans.settlement_date else None }) return jsonify({ 'success': True, 'data': transactions_data, 'count': len(transactions_data) }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/simulation/daily-settlement', methods=['POST']) @login_required def trigger_daily_settlement(): """鎵嬪姩瑙﹀彂鏃ョ粨绠?"" try: # 瀵煎叆鍚庡彴澶勭悊鍣ㄧ殑鍑芥暟 from simulation_background_processor import generate_daily_stats # 鎵ц鏃ョ粨绠? generate_daily_stats() return jsonify({ 'success': True, 'message': '鏃ョ粨绠楁墽琛屾垚鍔? }) except Exception as e: return jsonify({ 'success': False, 'error': str(e) }), 500 @app.route('/api/simulation/reset', methods=['POST']) @login_required def reset_simulation_account(): """閲嶇疆妯℃嫙璐︽埛""" try: account = SimulationAccount.query.filter_by(user_id=current_user.id).first() if account: # 鍒犻櫎鎵€鏈夌浉鍏虫暟鎹? SimulationPosition.query.filter_by(account_id=account.id).delete() SimulationOrder.query.filter_by(account_id=account.id).delete() SimulationTransaction.query.filter_by(account_id=account.id).delete() SimulationDailyStats.query.filter_by(account_id=account.id).delete() # 閲嶇疆璐︽埛鏁版嵁 account.available_cash = account.initial_capital account.frozen_cash = Decimal('0') account.position_value = Decimal('0') account.total_assets = account.initial_capital account.total_profit = Decimal('0') account.total_profit_rate = Decimal('0') account.daily_profit = Decimal('0') account.daily_profit_rate = Decimal('0') account.updated_at = beijing_now() db.session.commit() return jsonify({ 'success': True, 'message': '妯℃嫙璐︽埛宸查噸缃? }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 # =========================== # 棰勬祴甯傚満 API 璺敱 # 璇峰皢姝ゆ枃浠跺唴瀹规彃鍏ュ埌 app.py 鐨?`if __name__ == '__main__':` 涔嬪墠 # =========================== # --- 绉垎绯荤粺 API --- @app.route('/api/prediction/credit/account', methods=['GET']) @login_required def get_credit_account(): """鑾峰彇鐢ㄦ埛绉垎璐︽埛""" try: account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() # 濡傛灉璐︽埛涓嶅瓨鍦紝鑷姩鍒涘缓 if not account: account = UserCreditAccount(user_id=current_user.id) db.session.add(account) db.session.commit() return jsonify({ 'success': True, 'data': { 'balance': float(account.balance), 'frozen_balance': float(account.frozen_balance), 'available_balance': float(account.balance - account.frozen_balance), 'total_earned': float(account.total_earned), 'total_spent': float(account.total_spent), 'last_daily_bonus_at': account.last_daily_bonus_at.isoformat() if account.last_daily_bonus_at else None } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/credit/daily-bonus', methods=['POST']) @login_required def claim_daily_bonus(): """棰嗗彇姣忔棩濂栧姳锛?00绉垎锛?"" try: account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account: account = UserCreditAccount(user_id=current_user.id) db.session.add(account) # 妫€鏌ユ槸鍚﹀凡棰嗗彇浠婃棩濂栧姳 today = beijing_now().date() if account.last_daily_bonus_at and account.last_daily_bonus_at.date() == today: return jsonify({ 'success': False, 'error': '浠婃棩濂栧姳宸查鍙? }), 400 # 鍙戞斁濂栧姳 bonus_amount = 100.0 account.balance += bonus_amount account.total_earned += bonus_amount account.last_daily_bonus_at = beijing_now() # 璁板綍浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, transaction_type='daily_bonus', amount=bonus_amount, balance_after=account.balance, description='姣忔棩鐧诲綍濂栧姳' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'message': f'棰嗗彇鎴愬姛锛岃幏寰?{bonus_amount} 绉垎', 'data': { 'bonus_amount': bonus_amount, 'new_balance': float(account.balance) } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 # --- 棰勬祴璇濋 API --- @app.route('/api/prediction/topics', methods=['POST']) @login_required def create_prediction_topic(): """鍒涘缓棰勬祴璇濋锛堟秷鑰?00绉垎锛?"" try: data = request.get_json() title = data.get('title', '').strip() description = data.get('description', '').strip() category = data.get('category', 'stock') deadline_str = data.get('deadline') # 楠岃瘉鍙傛暟 if not title or len(title) < 5: return jsonify({'success': False, 'error': '鏍囬鑷冲皯5涓瓧绗?}), 400 if not deadline_str: return jsonify({'success': False, 'error': '璇疯缃埅姝㈡椂闂?}), 400 # 瑙f瀽鎴鏃堕棿锛堢Щ闄ゆ椂鍖轰俊鎭互鍖归厤鏁版嵁搴撴牸寮忥級 deadline = datetime.fromisoformat(deadline_str.replace('Z', '+00:00')) # 绉婚櫎鏃跺尯淇℃伅锛岃浆鎹负naive datetime if deadline.tzinfo is not None: deadline = deadline.replace(tzinfo=None) if deadline <= beijing_now(): return jsonify({'success': False, 'error': '鎴鏃堕棿蹇呴』鍦ㄦ湭鏉?}), 400 # 妫€鏌ョН鍒嗚处鎴? account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account or account.balance < 100: return jsonify({'success': False, 'error': '绉垎涓嶈冻锛堥渶瑕?00绉垎锛?}), 400 # 鎵i櫎鍒涘缓璐圭敤 create_cost = 100.0 account.balance -= create_cost account.total_spent += create_cost # 鍒涘缓璇濋 topic = PredictionTopic( creator_id=current_user.id, title=title, description=description, category=category, deadline=deadline ) db.session.add(topic) # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, transaction_type='create_topic', amount=-create_cost, balance_after=account.balance, description=f'鍒涘缓棰勬祴璇濋锛歿title}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'message': '璇濋鍒涘缓鎴愬姛', 'data': { 'topic_id': topic.id, 'title': topic.title, 'new_balance': float(account.balance) } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics', methods=['GET']) def get_prediction_topics(): """鑾峰彇棰勬祴璇濋鍒楄〃""" try: status = request.args.get('status', 'active') category = request.args.get('category') sort_by = request.args.get('sort_by', 'created_at') page = request.args.get('page', 1, type=int) per_page = request.args.get('per_page', 20, type=int) # 鏋勫缓鏌ヨ query = PredictionTopic.query if status: query = query.filter_by(status=status) if category: query = query.filter_by(category=category) # 鎺掑簭 if sort_by == 'hot': query = query.order_by(desc(PredictionTopic.views_count)) elif sort_by == 'participants': query = query.order_by(desc(PredictionTopic.participants_count)) else: query = query.order_by(desc(PredictionTopic.created_at)) # 鍒嗛〉 pagination = query.paginate(page=page, per_page=per_page, error_out=False) topics = pagination.items # 鏍煎紡鍖栬繑鍥炴暟鎹? topics_data = [] for topic in topics: # 璁$畻甯傚満鍊惧悜 total_shares = topic.yes_total_shares + topic.no_total_shares yes_prob = (topic.yes_total_shares / total_shares * 100) if total_shares > 0 else 50.0 # 澶勭悊datetime锛岀‘淇濈Щ闄ゆ椂鍖轰俊鎭? deadline = topic.deadline if hasattr(deadline, 'replace') and deadline.tzinfo is not None: deadline = deadline.replace(tzinfo=None) created_at = topic.created_at if hasattr(created_at, 'replace') and created_at.tzinfo is not None: created_at = created_at.replace(tzinfo=None) topics_data.append({ 'id': topic.id, 'title': topic.title, 'description': topic.description, 'category': topic.category, 'status': topic.status, 'yes_price': float(topic.yes_price), 'no_price': float(topic.no_price), 'yes_probability': round(yes_prob, 1), 'total_pool': float(topic.total_pool), 'yes_lord': { 'id': topic.yes_lord.id, 'username': topic.yes_lord.username, 'nickname': topic.yes_lord.nickname or topic.yes_lord.username, 'avatar_url': topic.yes_lord.avatar_url } if topic.yes_lord else None, 'no_lord': { 'id': topic.no_lord.id, 'username': topic.no_lord.username, 'nickname': topic.no_lord.nickname or topic.no_lord.username, 'avatar_url': topic.no_lord.avatar_url } if topic.no_lord else None, 'deadline': deadline.isoformat() if deadline else None, 'created_at': created_at.isoformat() if created_at else None, 'views_count': topic.views_count, 'comments_count': topic.comments_count, 'participants_count': topic.participants_count, 'creator': { 'id': topic.creator.id, 'username': topic.creator.username, 'nickname': topic.creator.nickname or topic.creator.username } }) return jsonify({ 'success': True, 'data': topics_data, 'pagination': { 'page': page, 'per_page': per_page, 'total': pagination.total, 'pages': pagination.pages, 'has_next': pagination.has_next, 'has_prev': pagination.has_prev } }) except Exception as e: import traceback print(f"[ERROR] 鑾峰彇璇濋鍒楄〃澶辫触: {str(e)}") print(traceback.format_exc()) return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics/', methods=['GET']) def get_prediction_topic_detail(topic_id): """鑾峰彇棰勬祴璇濋璇︽儏""" try: # 鍒锋柊浼氳瘽锛岀‘淇濊幏鍙栨渶鏂版暟鎹? db.session.expire_all() topic = PredictionTopic.query.get_or_404(topic_id) # 澧炲姞娴忚閲? topic.views_count += 1 db.session.commit() # 璁$畻甯傚満鍊惧悜 total_shares = topic.yes_total_shares + topic.no_total_shares yes_prob = (topic.yes_total_shares / total_shares * 100) if total_shares > 0 else 50.0 # 鑾峰彇 TOP 5 鎸佷粨锛圷ES 鍜?NO 鍚?涓級 yes_top_positions = PredictionPosition.query.filter_by( topic_id=topic_id, direction='yes' ).order_by(desc(PredictionPosition.shares)).limit(5).all() no_top_positions = PredictionPosition.query.filter_by( topic_id=topic_id, direction='no' ).order_by(desc(PredictionPosition.shares)).limit(5).all() def format_position(position): return { 'user': { 'id': position.user.id, 'username': position.user.username, 'nickname': position.user.nickname or position.user.username, 'avatar_url': position.user.avatar_url }, 'shares': position.shares, 'avg_cost': float(position.avg_cost), 'total_invested': float(position.total_invested), 'is_lord': (topic.yes_lord_id == position.user_id and position.direction == 'yes') or (topic.no_lord_id == position.user_id and position.direction == 'no') } return jsonify({ 'success': True, 'data': { 'id': topic.id, 'title': topic.title, 'description': topic.description, 'category': topic.category, 'status': topic.status, 'result': topic.result, 'yes_price': float(topic.yes_price), 'no_price': float(topic.no_price), 'yes_total_shares': topic.yes_total_shares, 'no_total_shares': topic.no_total_shares, 'yes_probability': round(yes_prob, 1), 'no_probability': round(100 - yes_prob, 1), 'total_pool': float(topic.total_pool), 'yes_lord': { 'id': topic.yes_lord.id, 'username': topic.yes_lord.username, 'nickname': topic.yes_lord.nickname or topic.yes_lord.username, 'avatar_url': topic.yes_lord.avatar_url } if topic.yes_lord else None, 'no_lord': { 'id': topic.no_lord.id, 'username': topic.no_lord.username, 'nickname': topic.no_lord.nickname or topic.no_lord.username, 'avatar_url': topic.no_lord.avatar_url } if topic.no_lord else None, 'yes_top_positions': [format_position(p) for p in yes_top_positions], 'no_top_positions': [format_position(p) for p in no_top_positions], 'deadline': topic.deadline.isoformat(), 'settled_at': topic.settled_at.isoformat() if topic.settled_at else None, 'created_at': topic.created_at.isoformat(), 'views_count': topic.views_count, 'comments_count': topic.comments_count, 'participants_count': topic.participants_count, 'creator': { 'id': topic.creator.id, 'username': topic.creator.username, 'nickname': topic.creator.nickname or topic.creator.username, 'avatar_url': topic.creator.avatar_url } } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics//settle', methods=['POST']) @login_required def settle_prediction_topic(topic_id): """缁撶畻棰勬祴璇濋锛堜粎鍒涘缓鑰呭彲鎿嶄綔锛?"" try: topic = PredictionTopic.query.get_or_404(topic_id) # 楠岃瘉鏉冮檺 if topic.creator_id != current_user.id: return jsonify({'success': False, 'error': '鍙湁鍒涘缓鑰呭彲浠ョ粨绠?}), 403 # 楠岃瘉鐘舵€? if topic.status != 'active': return jsonify({'success': False, 'error': '璇濋宸茬粨绠楁垨宸插彇娑?}), 400 # 楠岃瘉鎴鏃堕棿 if beijing_now() < topic.deadline: return jsonify({'success': False, 'error': '鏈埌鎴鏃堕棿'}), 400 # 鑾峰彇缁撶畻缁撴灉 data = request.get_json() result = data.get('result') # 'yes', 'no', 'draw' if result not in ['yes', 'no', 'draw']: return jsonify({'success': False, 'error': '鏃犳晥鐨勭粨绠楃粨鏋?}), 400 # 鏇存柊璇濋鐘舵€? topic.status = 'settled' topic.result = result topic.settled_at = beijing_now() # 鑾峰彇鑾疯儨鏂圭殑鎵€鏈夋寔浠? if result == 'draw': # 骞冲眬锛氭墍鏈変汉鎸夋姇鍏ユ瘮渚嬪垎閰嶅姹? all_positions = PredictionPosition.query.filter_by(topic_id=topic_id).all() total_invested = sum(p.total_invested for p in all_positions) for position in all_positions: if total_invested > 0: share_ratio = position.total_invested / total_invested prize = topic.total_pool * share_ratio # 鍙戞斁濂栭噾 account = UserCreditAccount.query.filter_by(user_id=position.user_id).first() if account: account.balance += prize account.total_earned += prize # 璁板綍浜ゆ槗 transaction = CreditTransaction( user_id=position.user_id, transaction_type='settle_win', amount=prize, balance_after=account.balance, related_topic_id=topic_id, description=f'棰勬祴骞冲眬锛岃幏寰楀姹犲垎绾細{topic.title}' ) db.session.add(transaction) else: # YES 鎴?NO 鑾疯儨 winning_direction = result winning_positions = PredictionPosition.query.filter_by( topic_id=topic_id, direction=winning_direction ).all() if winning_positions: total_winning_shares = sum(p.shares for p in winning_positions) for position in winning_positions: # 鎸変唤棰濇瘮渚嬪垎閰嶅姹? share_ratio = position.shares / total_winning_shares prize = topic.total_pool * share_ratio # 鍙戞斁濂栭噾 account = UserCreditAccount.query.filter_by(user_id=position.user_id).first() if account: account.balance += prize account.total_earned += prize # 璁板綍浜ゆ槗 transaction = CreditTransaction( user_id=position.user_id, transaction_type='settle_win', amount=prize, balance_after=account.balance, related_topic_id=topic_id, description=f'棰勬祴姝g‘锛岃幏寰楀閲戯細{topic.title}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'message': f'璇濋宸茬粨绠楋紝缁撴灉涓猴細{result}', 'data': { 'topic_id': topic.id, 'result': result, 'total_pool': float(topic.total_pool), 'settled_at': topic.settled_at.isoformat() } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 # --- 浜ゆ槗 API --- @app.route('/api/prediction/trade/buy', methods=['POST']) @login_required def buy_prediction_shares(): """涔板叆棰勬祴浠介""" try: data = request.get_json() topic_id = data.get('topic_id') direction = data.get('direction') # 'yes' or 'no' shares = data.get('shares', 0) # 楠岃瘉鍙傛暟 if not topic_id or direction not in ['yes', 'no'] or shares <= 0: return jsonify({'success': False, 'error': '鍙傛暟閿欒'}), 400 if shares > 1000: return jsonify({'success': False, 'error': '鍗曟鏈€澶氫拱鍏?000浠介'}), 400 # 鑾峰彇璇濋 topic = PredictionTopic.query.get_or_404(topic_id) if topic.status != 'active': return jsonify({'success': False, 'error': '璇濋宸茬粨绠楁垨宸插彇娑?}), 400 if beijing_now() >= topic.deadline: return jsonify({'success': False, 'error': '璇濋宸叉埅姝?}), 400 # 鑾峰彇绉垎璐︽埛 account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account: account = UserCreditAccount(user_id=current_user.id) db.session.add(account) db.session.flush() # 璁$畻浠锋牸 current_price = topic.yes_price if direction == 'yes' else topic.no_price # 绠€鍖栫殑AMM瀹氫环锛歱rice = (瀵瑰簲鏂逛唤棰?/ 鎬讳唤棰? * 1000 total_shares = topic.yes_total_shares + topic.no_total_shares if total_shares > 0: if direction == 'yes': current_price = (topic.yes_total_shares / total_shares) * 1000 else: current_price = (topic.no_total_shares / total_shares) * 1000 else: current_price = 500.0 # 鍒濆浠锋牸 # 涔板叆鍚庝环鏍间細涓婃定锛屼娇鐢ㄥ钩鍧囦环鏍? after_total = total_shares + shares if direction == 'yes': after_yes_shares = topic.yes_total_shares + shares after_price = (after_yes_shares / after_total) * 1000 else: after_no_shares = topic.no_total_shares + shares after_price = (after_no_shares / after_total) * 1000 avg_price = (current_price + after_price) / 2 # 璁$畻璐圭敤 amount = avg_price * shares tax = amount * 0.02 # 2% 鎵嬬画璐? total_cost = amount + tax # 妫€鏌ヤ綑棰? if account.balance < total_cost: return jsonify({'success': False, 'error': '绉垎涓嶈冻'}), 400 # 鎵i櫎璐圭敤 account.balance -= total_cost account.total_spent += total_cost # 鏇存柊璇濋鏁版嵁 if direction == 'yes': topic.yes_total_shares += shares topic.yes_price = after_price else: topic.no_total_shares += shares topic.no_price = after_price topic.total_pool += tax # 鎵嬬画璐硅繘鍏ュ姹? # 鏇存柊鎴栧垱寤烘寔浠? position = PredictionPosition.query.filter_by( user_id=current_user.id, topic_id=topic_id, direction=direction ).first() if position: # 鏇存柊骞冲潎鎴愭湰 old_cost = position.avg_cost * position.shares new_cost = avg_price * shares position.shares += shares position.avg_cost = (old_cost + new_cost) / position.shares position.total_invested += total_cost else: position = PredictionPosition( user_id=current_user.id, topic_id=topic_id, direction=direction, shares=shares, avg_cost=avg_price, total_invested=total_cost ) db.session.add(position) topic.participants_count += 1 # 鏇存柊棰嗕富 if direction == 'yes': # 鎵惧埌YES鏂规寔浠撴渶澶氱殑鐢ㄦ埛 top_yes = db.session.query(PredictionPosition).filter_by( topic_id=topic_id, direction='yes' ).order_by(desc(PredictionPosition.shares)).first() if top_yes: topic.yes_lord_id = top_yes.user_id else: # 鎵惧埌NO鏂规寔浠撴渶澶氱殑鐢ㄦ埛 top_no = db.session.query(PredictionPosition).filter_by( topic_id=topic_id, direction='no' ).order_by(desc(PredictionPosition.shares)).first() if top_no: topic.no_lord_id = top_no.user_id # 璁板綍浜ゆ槗 transaction = PredictionTransaction( user_id=current_user.id, topic_id=topic_id, trade_type='buy', direction=direction, shares=shares, price=avg_price, amount=amount, tax=tax, total_cost=total_cost ) db.session.add(transaction) # 璁板綍绉垎浜ゆ槗 credit_transaction = CreditTransaction( user_id=current_user.id, transaction_type='prediction_buy', amount=-total_cost, balance_after=account.balance, related_topic_id=topic_id, related_transaction_id=transaction.id, description=f'涔板叆 {direction.upper()} 浠介锛歿topic.title}' ) db.session.add(credit_transaction) db.session.commit() return jsonify({ 'success': True, 'message': '涔板叆鎴愬姛', 'data': { 'transaction_id': transaction.id, 'shares': shares, 'price': round(avg_price, 2), 'total_cost': round(total_cost, 2), 'tax': round(tax, 2), 'new_balance': float(account.balance), 'new_position': { 'shares': position.shares, 'avg_cost': float(position.avg_cost) } } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/positions', methods=['GET']) @login_required def get_user_positions(): """鑾峰彇鐢ㄦ埛鐨勬墍鏈夋寔浠?"" try: positions = PredictionPosition.query.filter_by(user_id=current_user.id).all() positions_data = [] for position in positions: topic = position.topic # 璁$畻褰撳墠甯傚€硷紙濡傛灉璇濋杩樺湪杩涜涓級 current_value = 0 profit = 0 profit_rate = 0 if topic.status == 'active': current_price = topic.yes_price if position.direction == 'yes' else topic.no_price current_value = current_price * position.shares profit = current_value - position.total_invested profit_rate = (profit / position.total_invested * 100) if position.total_invested > 0 else 0 positions_data.append({ 'id': position.id, 'topic': { 'id': topic.id, 'title': topic.title, 'status': topic.status, 'result': topic.result, 'deadline': topic.deadline.isoformat() }, 'direction': position.direction, 'shares': position.shares, 'avg_cost': float(position.avg_cost), 'total_invested': float(position.total_invested), 'current_value': round(current_value, 2), 'profit': round(profit, 2), 'profit_rate': round(profit_rate, 2), 'created_at': position.created_at.isoformat(), 'is_lord': (topic.yes_lord_id == current_user.id and position.direction == 'yes') or (topic.no_lord_id == current_user.id and position.direction == 'no') }) return jsonify({ 'success': True, 'data': positions_data, 'count': len(positions_data) }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 # --- 璇勮 API --- @app.route('/api/prediction/topics//comments', methods=['POST']) @login_required def create_topic_comment(topic_id): """鍙戣〃璇濋璇勮""" try: topic = PredictionTopic.query.get_or_404(topic_id) data = request.get_json() content = data.get('content', '').strip() parent_id = data.get('parent_id') if not content or len(content) < 2: return jsonify({'success': False, 'error': '璇勮鍐呭鑷冲皯2涓瓧绗?}), 400 # 鍒涘缓璇勮 comment = TopicComment( topic_id=topic_id, user_id=current_user.id, content=content, parent_id=parent_id ) # 濡傛灉鏄涓昏瘎璁猴紝鑷姩缃《 is_lord = (topic.yes_lord_id == current_user.id) or (topic.no_lord_id == current_user.id) if is_lord: comment.is_pinned = True db.session.add(comment) # 鏇存柊璇濋璇勮鏁? topic.comments_count += 1 db.session.commit() return jsonify({ 'success': True, 'message': '璇勮鎴愬姛', 'data': { 'comment_id': comment.id, 'content': comment.content, 'is_pinned': comment.is_pinned, 'created_at': comment.created_at.isoformat() } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics//comments', methods=['GET']) def get_topic_comments(topic_id): """鑾峰彇璇濋璇勮鍒楄〃""" try: topic = PredictionTopic.query.get_or_404(topic_id) page = request.args.get('page', 1, type=int) per_page = request.args.get('per_page', 20, type=int) # 缃《璇勮鍦ㄥ墠锛岀劧鍚庢寜鏃堕棿鍊掑簭 query = TopicComment.query.filter_by( topic_id=topic_id, status='active', parent_id=None # 鍙幏鍙栭《绾ц瘎璁? ).order_by( desc(TopicComment.is_pinned), desc(TopicComment.created_at) ) pagination = query.paginate(page=page, per_page=per_page, error_out=False) comments = pagination.items def format_comment(comment): # 鑾峰彇鍥炲 replies = TopicComment.query.filter_by( parent_id=comment.id, status='active' ).order_by(TopicComment.created_at).limit(5).all() return { 'id': comment.id, 'content': comment.content, 'is_pinned': comment.is_pinned, 'likes_count': comment.likes_count, 'created_at': comment.created_at.isoformat(), 'user': { 'id': comment.user.id, 'username': comment.user.username, 'nickname': comment.user.nickname or comment.user.username, 'avatar_url': comment.user.avatar_url }, 'is_lord': (topic.yes_lord_id == comment.user_id) or (topic.no_lord_id == comment.user_id), 'replies': [{ 'id': reply.id, 'content': reply.content, 'created_at': reply.created_at.isoformat(), 'user': { 'id': reply.user.id, 'username': reply.user.username, 'nickname': reply.user.nickname or reply.user.username, 'avatar_url': reply.user.avatar_url } } for reply in replies] } comments_data = [format_comment(comment) for comment in comments] return jsonify({ 'success': True, 'data': comments_data, 'pagination': { 'page': page, 'per_page': per_page, 'total': pagination.total, 'pages': pagination.pages } }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/comments//like', methods=['POST']) @login_required def like_topic_comment(comment_id): """鐐硅禐/鍙栨秷鐐硅禐璇勮""" try: comment = TopicComment.query.get_or_404(comment_id) # 妫€鏌ユ槸鍚﹀凡鐐硅禐 existing_like = TopicCommentLike.query.filter_by( comment_id=comment_id, user_id=current_user.id ).first() if existing_like: # 鍙栨秷鐐硅禐 db.session.delete(existing_like) comment.likes_count = max(0, comment.likes_count - 1) action = 'unliked' else: # 鐐硅禐 like = TopicCommentLike( comment_id=comment_id, user_id=current_user.id ) db.session.add(like) comment.likes_count += 1 action = 'liked' db.session.commit() return jsonify({ 'success': True, 'action': action, 'likes_count': comment.likes_count }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 # ==================== 瑙傜偣IPO API ==================== @app.route('/api/prediction/comments//invest', methods=['POST']) @login_required def invest_comment(comment_id): """鎶曡祫璇勮锛堣鐐笽PO锛?"" try: data = request.json shares = data.get('shares', 1) # 鑾峰彇璇勮 comment = TopicComment.query.get_or_404(comment_id) # 妫€鏌ヨ瘎璁烘槸鍚﹀凡缁撶畻 if comment.is_verified: return jsonify({'success': False, 'error': '璇ヨ瘎璁哄凡缁撶畻锛屾棤娉曠户缁姇璧?}), 400 # 妫€鏌ユ槸鍚︽槸鑷繁鐨勮瘎璁? if comment.user_id == current_user.id: return jsonify({'success': False, 'error': '涓嶈兘鎶曡祫鑷繁鐨勮瘎璁?}), 400 # 璁$畻鎶曡祫閲戦锛堢畝鍖栵細姣忎唤100绉垎鍩虹浠锋牸 + 宸叉湁鎶曡祫棰?10锛? base_price = 100 price_increase = comment.total_investment / 10 if comment.total_investment > 0 else 0 price_per_share = base_price + price_increase amount = int(price_per_share * shares) # 鑾峰彇鐢ㄦ埛绉垎璐︽埛 account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account: return jsonify({'success': False, 'error': '璐︽埛涓嶅瓨鍦?}), 404 # 妫€鏌ヤ綑棰? if account.balance < amount: return jsonify({'success': False, 'error': '绉垎涓嶈冻'}), 400 # 鎵e噺绉垎 account.balance -= amount # 妫€鏌ユ槸鍚﹀凡鏈夋姇璧勮褰? existing_investment = CommentInvestment.query.filter_by( comment_id=comment_id, user_id=current_user.id, status='active' ).first() if existing_investment: # 鏇存柊鎶曡祫璁板綍 total_shares = existing_investment.shares + shares total_amount = existing_investment.amount + amount existing_investment.shares = total_shares existing_investment.amount = total_amount existing_investment.avg_price = total_amount / total_shares else: # 鍒涘缓鏂版姇璧勮褰? investment = CommentInvestment( comment_id=comment_id, user_id=current_user.id, shares=shares, amount=amount, avg_price=price_per_share ) db.session.add(investment) comment.investor_count += 1 # 鏇存柊璇勮缁熻 comment.total_investment += amount # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, type='comment_investment', amount=-amount, balance_after=account.balance, description=f'鎶曡祫璇勮 #{comment_id}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'data': { 'shares': shares, 'amount': amount, 'price_per_share': price_per_share, 'total_investment': comment.total_investment, 'investor_count': comment.investor_count, 'new_balance': account.balance } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/comments//investments', methods=['GET']) def get_comment_investments(comment_id): """鑾峰彇璇勮鐨勬姇璧勫垪琛?"" try: investments = CommentInvestment.query.filter_by( comment_id=comment_id, status='active' ).all() result = [] for inv in investments: user = User.query.get(inv.user_id) result.append({ 'id': inv.id, 'user_id': inv.user_id, 'user_name': user.username if user else '鏈煡鐢ㄦ埛', 'user_avatar': user.avatar if user else None, 'shares': inv.shares, 'amount': inv.amount, 'avg_price': inv.avg_price, 'created_at': inv.created_at.strftime('%Y-%m-%d %H:%M:%S') }) return jsonify({ 'success': True, 'data': result }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/comments//verify', methods=['POST']) @login_required def verify_comment(comment_id): """绠$悊鍛橀獙璇佽瘎璁洪娴嬬粨鏋?"" try: # 妫€鏌ョ鐞嗗憳鏉冮檺锛堢畝鍖栫増锛氬亣璁?user_id=1 鏄鐞嗗憳锛? if current_user.id != 1: return jsonify({'success': False, 'error': '鏃犳潈闄愭搷浣?}), 403 data = request.json result = data.get('result') # 'correct' or 'incorrect' if result not in ['correct', 'incorrect']: return jsonify({'success': False, 'error': '鏃犳晥鐨勯獙璇佺粨鏋?}), 400 comment = TopicComment.query.get_or_404(comment_id) # 妫€鏌ユ槸鍚﹀凡楠岃瘉 if comment.is_verified: return jsonify({'success': False, 'error': '璇ヨ瘎璁哄凡楠岃瘉'}), 400 # 鏇存柊楠岃瘉鐘舵€? comment.is_verified = True comment.verification_result = result # 濡傛灉棰勬祴姝g‘锛岃繘琛屾敹鐩婂垎閰? if result == 'correct' and comment.total_investment > 0: # 鑾峰彇鎵€鏈夋姇璧勮褰? investments = CommentInvestment.query.filter_by( comment_id=comment_id, status='active' ).all() # 璁$畻鎬绘敹鐩婏紙鎬绘姇璧勯鐨?.5鍊嶏級 total_reward = int(comment.total_investment * 1.5) # 鎸変唤棰濇瘮渚嬪垎閰嶆敹鐩? total_shares = sum([inv.shares for inv in investments]) for inv in investments: # 璁$畻璇ユ姇璧勮€呯殑鏀剁泭 investor_reward = int((inv.shares / total_shares) * total_reward) # 鑾峰彇鎶曡祫鑰呰处鎴? account = UserCreditAccount.query.filter_by(user_id=inv.user_id).first() if account: account.balance += investor_reward # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=inv.user_id, type='comment_investment_profit', amount=investor_reward, balance_after=account.balance, description=f'璇勮鎶曡祫鏀剁泭 #{comment_id}' ) db.session.add(transaction) # 鏇存柊鎶曡祫鐘舵€? inv.status = 'settled' # 璇勮浣滆€呬篃鑾峰緱濂栧姳锛堟€绘姇璧勯鐨?0%锛? author_reward = int(comment.total_investment * 0.2) author_account = UserCreditAccount.query.filter_by(user_id=comment.user_id).first() if author_account: author_account.balance += author_reward transaction = CreditTransaction( user_id=comment.user_id, type='comment_author_bonus', amount=author_reward, balance_after=author_account.balance, description=f'璇勮浣滆€呭鍔?#{comment_id}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'data': { 'comment_id': comment_id, 'verification_result': result, 'total_investment': comment.total_investment } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics//bid-position', methods=['POST']) @login_required def bid_comment_position(topic_id): """绔炴媿璇勮浣嶇疆锛堥鍙戞潈鎷嶅崠锛?"" try: data = request.json position = data.get('position') # 1/2/3 bid_amount = data.get('bid_amount') if position not in [1, 2, 3]: return jsonify({'success': False, 'error': '鏃犳晥鐨勪綅缃?}), 400 if bid_amount < 500: return jsonify({'success': False, 'error': '鏈€浣庡嚭浠?00绉垎'}), 400 # 鑾峰彇鐢ㄦ埛绉垎璐︽埛 account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account or account.balance < bid_amount: return jsonify({'success': False, 'error': '绉垎涓嶈冻'}), 400 # 妫€鏌ヨ浣嶇疆鐨勫綋鍓嶆渶楂樺嚭浠? current_highest = CommentPositionBid.query.filter_by( topic_id=topic_id, position=position, status='pending' ).order_by(CommentPositionBid.bid_amount.desc()).first() if current_highest and bid_amount <= current_highest.bid_amount: return jsonify({ 'success': False, 'error': f'鍑轰环蹇呴』楂樹簬褰撳墠鏈€楂樹环 {current_highest.bid_amount}' }), 400 # 鎵e噺绉垎锛堝喕缁擄級 account.balance -= bid_amount account.frozen += bid_amount # 濡傛灉鏈変箣鍓嶇殑鍑轰环锛岄€€杩樼Н鍒? user_previous_bid = CommentPositionBid.query.filter_by( topic_id=topic_id, position=position, user_id=current_user.id, status='pending' ).first() if user_previous_bid: account.frozen -= user_previous_bid.bid_amount account.balance += user_previous_bid.bid_amount user_previous_bid.status = 'lost' # 鍒涘缓绔炴媿璁板綍 topic = PredictionTopic.query.get_or_404(topic_id) bid = CommentPositionBid( topic_id=topic_id, user_id=current_user.id, position=position, bid_amount=bid_amount, expires_at=topic.deadline # 绔炴媿鎴鏃堕棿涓庤瘽棰樻埅姝㈡椂闂寸浉鍚? ) db.session.add(bid) # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, type='position_bid', amount=-bid_amount, balance_after=account.balance, description=f'绔炴媿璇勮浣嶇疆 #{position} (璇濋#{topic_id})' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'data': { 'bid_id': bid.id, 'position': position, 'bid_amount': bid_amount, 'new_balance': account.balance, 'frozen': account.frozen } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/prediction/topics//position-bids', methods=['GET']) def get_position_bids(topic_id): """鑾峰彇璇濋鐨勪綅缃珵鎷嶅垪琛?"" try: result = {} for position in [1, 2, 3]: bids = CommentPositionBid.query.filter_by( topic_id=topic_id, position=position, status='pending' ).order_by(CommentPositionBid.bid_amount.desc()).limit(5).all() position_bids = [] for bid in bids: user = User.query.get(bid.user_id) position_bids.append({ 'id': bid.id, 'user_id': bid.user_id, 'user_name': user.username if user else '鏈煡鐢ㄦ埛', 'user_avatar': user.avatar if user else None, 'bid_amount': bid.bid_amount, 'created_at': bid.created_at.strftime('%Y-%m-%d %H:%M:%S') }) result[f'position_{position}'] = position_bids return jsonify({ 'success': True, 'data': result }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 # ==================== 鏃堕棿鑳跺泭 API ==================== @app.route('/api/time-capsule/topics', methods=['POST']) @login_required def create_time_capsule_topic(): """鍒涘缓鏃堕棿鑳跺泭璇濋""" try: data = request.json title = data.get('title') description = data.get('description', '') encrypted_content = data.get('encrypted_content') encryption_key = data.get('encryption_key') start_year = data.get('start_year') end_year = data.get('end_year') # 楠岃瘉 if not title or not encrypted_content or not encryption_key: return jsonify({'success': False, 'error': '缂哄皯蹇呰鍙傛暟'}), 400 if not start_year or not end_year or end_year <= start_year: return jsonify({'success': False, 'error': '鏃犳晥鐨勬椂闂磋寖鍥?}), 400 # 鑾峰彇鐢ㄦ埛绉垎璐︽埛 account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account or account.balance < 100: return jsonify({'success': False, 'error': '绉垎涓嶈冻锛岄渶瑕?00绉垎'}), 400 # 鎵e噺绉垎 account.balance -= 100 # 鍒涘缓璇濋 topic = TimeCapsuleTopic( user_id=current_user.id, title=title, description=description, encrypted_content=encrypted_content, encryption_key=encryption_key, start_year=start_year, end_year=end_year, total_pool=100 # 鍒涘缓璐圭敤杩涘叆濂栨睜 ) db.session.add(topic) db.session.flush() # 鑾峰彇 topic.id # 鑷姩鍒涘缓鏃堕棿娈碉紙姣忓勾涓€涓椂闂存锛? for year in range(start_year, end_year + 1): slot = TimeCapsuleTimeSlot( topic_id=topic.id, year_start=year, year_end=year ) db.session.add(slot) # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, type='time_capsule_create', amount=-100, balance_after=account.balance, description=f'鍒涘缓鏃堕棿鑳跺泭璇濋 #{topic.id}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'data': { 'topic_id': topic.id, 'title': topic.title, 'new_balance': account.balance } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/time-capsule/topics', methods=['GET']) def get_time_capsule_topics(): """鑾峰彇鏃堕棿鑳跺泭璇濋鍒楄〃""" try: status = request.args.get('status', 'active') query = TimeCapsuleTopic.query.filter_by(status=status) topics = query.order_by(TimeCapsuleTopic.created_at.desc()).all() result = [] for topic in topics: # 鑾峰彇鐢ㄦ埛淇℃伅 user = User.query.get(topic.user_id) # 鑾峰彇鏃堕棿娈电粺璁? slots = TimeCapsuleTimeSlot.query.filter_by(topic_id=topic.id).all() total_slots = len(slots) active_slots = len([s for s in slots if s.status == 'active']) result.append({ 'id': topic.id, 'title': topic.title, 'description': topic.description, 'start_year': topic.start_year, 'end_year': topic.end_year, 'total_pool': topic.total_pool, 'total_slots': total_slots, 'active_slots': active_slots, 'is_decrypted': topic.is_decrypted, 'status': topic.status, 'author_id': topic.user_id, 'author_name': user.username if user else '鏈煡鐢ㄦ埛', 'author_avatar': user.avatar if user else None, 'created_at': topic.created_at.strftime('%Y-%m-%d %H:%M:%S') }) return jsonify({ 'success': True, 'data': result }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/time-capsule/topics/', methods=['GET']) def get_time_capsule_topic(topic_id): """鑾峰彇鏃堕棿鑳跺泭璇濋璇︽儏""" try: topic = TimeCapsuleTopic.query.get_or_404(topic_id) user = User.query.get(topic.user_id) # 鑾峰彇鎵€鏈夋椂闂存 slots = TimeCapsuleTimeSlot.query.filter_by(topic_id=topic_id).order_by(TimeCapsuleTimeSlot.year_start).all() slots_data = [] for slot in slots: holder = User.query.get(slot.current_holder_id) if slot.current_holder_id else None slots_data.append({ 'id': slot.id, 'year_start': slot.year_start, 'year_end': slot.year_end, 'current_price': slot.current_price, 'total_bids': slot.total_bids, 'status': slot.status, 'current_holder_id': slot.current_holder_id, 'current_holder_name': holder.username if holder else None, 'current_holder_avatar': holder.avatar if holder else None }) result = { 'id': topic.id, 'title': topic.title, 'description': topic.description, 'start_year': topic.start_year, 'end_year': topic.end_year, 'total_pool': topic.total_pool, 'is_decrypted': topic.is_decrypted, 'decrypted_content': topic.encrypted_content if topic.is_decrypted else None, 'actual_happened_year': topic.actual_happened_year, 'status': topic.status, 'author_id': topic.user_id, 'author_name': user.username if user else '鏈煡鐢ㄦ埛', 'author_avatar': user.avatar if user else None, 'time_slots': slots_data, 'created_at': topic.created_at.strftime('%Y-%m-%d %H:%M:%S') } return jsonify({ 'success': True, 'data': result }) except Exception as e: return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/time-capsule/slots//bid', methods=['POST']) @login_required def bid_time_slot(slot_id): """绔炴媿鏃堕棿娈?"" try: data = request.json bid_amount = data.get('bid_amount') slot = TimeCapsuleTimeSlot.query.get_or_404(slot_id) # 妫€鏌ユ椂闂存鏄惁杩樺湪绔炴媿 if slot.status != 'active': return jsonify({'success': False, 'error': '璇ユ椂闂存宸茬粨鏉熺珵鎷?}), 400 # 妫€鏌ュ嚭浠锋槸鍚﹂珮浜庡綋鍓嶄环鏍? min_bid = slot.current_price + 50 # 鑷冲皯姣斿綋鍓嶄环鏍奸珮50绉垎 if bid_amount < min_bid: return jsonify({ 'success': False, 'error': f'鍑轰环蹇呴』鑷冲皯涓?{min_bid} 绉垎' }), 400 # 鑾峰彇鐢ㄦ埛绉垎璐︽埛 account = UserCreditAccount.query.filter_by(user_id=current_user.id).first() if not account or account.balance < bid_amount: return jsonify({'success': False, 'error': '绉垎涓嶈冻'}), 400 # 鎵e噺绉垎 account.balance -= bid_amount # 濡傛灉鏈夊墠浠绘寔鏈夎€咃紝閫€杩樼Н鍒? if slot.current_holder_id: prev_holder_account = UserCreditAccount.query.filter_by(user_id=slot.current_holder_id).first() if prev_holder_account: prev_holder_account.balance += slot.current_price # 鏇存柊鍓嶄换鐨勭珵鎷嶈褰曠姸鎬? prev_bid = TimeSlotBid.query.filter_by( slot_id=slot_id, user_id=slot.current_holder_id, status='holding' ).first() if prev_bid: prev_bid.status = 'outbid' # 鍒涘缓绔炴媿璁板綍 bid = TimeSlotBid( slot_id=slot_id, user_id=current_user.id, bid_amount=bid_amount, status='holding' ) db.session.add(bid) # 鏇存柊鏃堕棿娈? slot.current_holder_id = current_user.id slot.current_price = bid_amount slot.total_bids += 1 # 鏇存柊璇濋濂栨睜 topic = TimeCapsuleTopic.query.get(slot.topic_id) price_increase = bid_amount - (slot.current_price if slot.current_holder_id else 100) topic.total_pool += price_increase # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=current_user.id, type='time_slot_bid', amount=-bid_amount, balance_after=account.balance, description=f'绔炴媿鏃堕棿娈?{slot.year_start}-{slot.year_end}' ) db.session.add(transaction) db.session.commit() return jsonify({ 'success': True, 'data': { 'slot_id': slot_id, 'bid_amount': bid_amount, 'new_balance': account.balance, 'total_pool': topic.total_pool } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/time-capsule/topics//decrypt', methods=['POST']) @login_required def decrypt_time_capsule(topic_id): """瑙e瘑鏃堕棿鑳跺泭锛堢鐞嗗憳鎴栦綔鑰咃級""" try: topic = TimeCapsuleTopic.query.get_or_404(topic_id) # 妫€鏌ユ潈闄愶紙绠$悊鍛樻垨浣滆€咃級 if current_user.id != 1 and current_user.id != topic.user_id: return jsonify({'success': False, 'error': '鏃犳潈闄愭搷浣?}), 403 # 妫€鏌ユ槸鍚﹀凡瑙e瘑 if topic.is_decrypted: return jsonify({'success': False, 'error': '璇ヨ瘽棰樺凡瑙e瘑'}), 400 # 瑙e瘑锛堝墠绔細鐢ㄥ瘑閽ヨВ瀵嗗唴瀹癸級 topic.is_decrypted = True db.session.commit() return jsonify({ 'success': True, 'data': { 'encrypted_content': topic.encrypted_content, 'encryption_key': topic.encryption_key } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 @app.route('/api/time-capsule/topics//settle', methods=['POST']) @login_required def settle_time_capsule(topic_id): """缁撶畻鏃堕棿鑳跺泭璇濋""" try: # 妫€鏌ョ鐞嗗憳鏉冮檺 if current_user.id != 1: return jsonify({'success': False, 'error': '鏃犳潈闄愭搷浣?}), 403 data = request.json happened_year = data.get('happened_year') topic = TimeCapsuleTopic.query.get_or_404(topic_id) # 妫€鏌ユ槸鍚﹀凡缁撶畻 if topic.status == 'settled': return jsonify({'success': False, 'error': '璇ヨ瘽棰樺凡缁撶畻'}), 400 # 鏇存柊璇濋鐘舵€? topic.status = 'settled' topic.actual_happened_year = happened_year # 鎵惧埌涓鐨勬椂闂存 winning_slot = TimeCapsuleTimeSlot.query.filter_by( topic_id=topic_id, year_start=happened_year ).first() if winning_slot and winning_slot.current_holder_id: # 涓鑰呰幏寰楀叏閮ㄥ姹? winner_account = UserCreditAccount.query.filter_by(user_id=winning_slot.current_holder_id).first() if winner_account: winner_account.balance += topic.total_pool # 璁板綍绉垎浜ゆ槗 transaction = CreditTransaction( user_id=winning_slot.current_holder_id, type='time_capsule_win', amount=topic.total_pool, balance_after=winner_account.balance, description=f'鏃堕棿鑳跺泭涓 #{topic_id}' ) db.session.add(transaction) # 鏇存柊绔炴媿璁板綍 winning_bid = TimeSlotBid.query.filter_by( slot_id=winning_slot.id, user_id=winning_slot.current_holder_id, status='holding' ).first() if winning_bid: winning_bid.status = 'won' # 鏇存柊鏃堕棿娈电姸鎬? winning_slot.status = 'won' # 鍏朵粬鏃堕棿娈佃涓鸿繃鏈? other_slots = TimeCapsuleTimeSlot.query.filter( TimeCapsuleTimeSlot.topic_id == topic_id, TimeCapsuleTimeSlot.id != (winning_slot.id if winning_slot else None) ).all() for slot in other_slots: slot.status = 'expired' db.session.commit() return jsonify({ 'success': True, 'data': { 'topic_id': topic_id, 'happened_year': happened_year, 'winner_id': winning_slot.current_holder_id if winning_slot else None, 'prize': topic.total_pool } }) except Exception as e: db.session.rollback() return jsonify({'success': False, 'error': str(e)}), 500 if __name__ == '__main__': # 鍒涘缓鏁版嵁搴撹〃 with app.app_context(): try: db.create_all() # 瀹夊叏鍦板垵濮嬪寲璁㈤槄濂楅 initialize_subscription_plans_safe() except Exception as e: app.logger.error(f"鏁版嵁搴撳垵濮嬪寲澶辫触: {e}") # 鍒濆鍖栦簨浠惰疆璇㈡満鍒讹紙WebSocket 鎺ㄩ€侊級 initialize_event_polling() # 浣跨敤 socketio.run 鏇夸唬 app.run 浠ユ敮鎸?WebSocket socketio.run(app, host='0.0.0.0', port=5001, debug=False, allow_unsafe_werkzeug=True)