#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ 概念异动实时检测服务 - 基于 concept_quota_realtime.py 扩展 - 检测概念板块的异动(急涨、涨停增加、排名跃升) - 记录异动时的指数位置,用于热点概览图表展示 """ import pandas as pd import numpy as np from datetime import datetime, timedelta from sqlalchemy import create_engine, text from elasticsearch import Elasticsearch from clickhouse_driver import Client from collections import deque import time import logging import json import os import hashlib import argparse # ==================== 配置 ==================== # MySQL配置 MYSQL_ENGINE = create_engine( "mysql+pymysql://root:Zzl5588161!@222.128.1.157:33060/stock", echo=False ) # Elasticsearch配置 ES_CLIENT = Elasticsearch(['http://222.128.1.157:19200']) INDEX_NAME = 'concept_library_v3' # ClickHouse配置 CLICKHOUSE_CONFIG = { 'host': '222.128.1.157', 'port': 18000, 'user': 'default', 'password': 'Zzl33818!', 'database': 'stock' } # 层级结构文件 HIERARCHY_FILE = 'concept_hierarchy_v3.json' # ==================== 异动检测阈值配置 ==================== ALERT_CONFIG = { # 急涨检测:N分钟内涨幅变化超过阈值 'surge': { 'enabled': True, 'window_minutes': 5, # 检测窗口(分钟) 'threshold_pct': 1.0, # 涨幅变化阈值(%) 'min_change_pct': 0.5, # 最低涨幅要求(避免负涨幅的噪音) 'cooldown_minutes': 10, # 同一概念冷却时间(避免重复报警) }, # 涨停数增加检测 'limit_up': { 'enabled': True, 'threshold_count': 1, # 涨停数增加阈值 'cooldown_minutes': 15, # 冷却时间 }, # 排名跃升检测 'rank_jump': { 'enabled': True, 'window_minutes': 5, # 检测窗口 'threshold_rank': 15, # 排名上升阈值 'max_rank': 50, # 只关注前N名的变化 'cooldown_minutes': 15, }, } # 参考指数 REFERENCE_INDEX = '000001.SH' # 上证指数 # ==================== 日志配置 ==================== logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[ logging.FileHandler(f'concept_alert_{datetime.now().strftime("%Y%m%d")}.log', encoding='utf-8'), logging.StreamHandler() ] ) logger = logging.getLogger(__name__) # ==================== 全局变量 ==================== ch_client = None # 历史数据缓存(用于异动检测) # 结构: {concept_id: deque([(timestamp, change_pct, rank, limit_up_count), ...])} history_cache = {} HISTORY_WINDOW = 10 # 保留最近10分钟的数据 # 冷却记录(避免重复报警) # 结构: {(concept_id, alert_type): last_alert_time} cooldown_cache = {} # 当前排名缓存 current_rankings = {} def get_ch_client(): """获取ClickHouse客户端""" global ch_client if ch_client is None: ch_client = Client(**CLICKHOUSE_CONFIG) return ch_client def generate_id(name: str) -> str: """生成概念ID""" return hashlib.md5(name.encode('utf-8')).hexdigest()[:16] def code_to_ch_format(code: str) -> str: """将6位股票代码转换为ClickHouse格式""" if not code or len(code) != 6 or not code.isdigit(): return None if code.startswith('6'): return f"{code}.SH" elif code.startswith('0') or code.startswith('3'): return f"{code}.SZ" else: return f"{code}.BJ" # ==================== 概念数据获取 ==================== def get_all_concepts(): """从ES获取所有叶子概念及其股票列表""" concepts = [] query = { "query": {"match_all": {}}, "size": 100, "_source": ["concept_id", "concept", "stocks"] } resp = ES_CLIENT.search(index=INDEX_NAME, body=query, scroll='2m') scroll_id = resp['_scroll_id'] hits = resp['hits']['hits'] while len(hits) > 0: for hit in hits: source = hit['_source'] concept_info = { 'concept_id': source.get('concept_id'), 'concept_name': source.get('concept'), 'stocks': [], 'concept_type': 'leaf' } if 'stocks' in source and isinstance(source['stocks'], list): for stock in source['stocks']: if isinstance(stock, dict) and 'code' in stock and stock['code']: concept_info['stocks'].append(stock['code']) if concept_info['stocks']: concepts.append(concept_info) resp = ES_CLIENT.scroll(scroll_id=scroll_id, scroll='2m') scroll_id = resp['_scroll_id'] hits = resp['hits']['hits'] ES_CLIENT.clear_scroll(scroll_id=scroll_id) return concepts def load_hierarchy_concepts(leaf_concepts: list) -> list: """加载层级结构,生成母概念""" hierarchy_path = os.path.join(os.path.dirname(__file__), HIERARCHY_FILE) if not os.path.exists(hierarchy_path): logger.warning(f"层级文件不存在: {hierarchy_path}") return [] with open(hierarchy_path, 'r', encoding='utf-8') as f: hierarchy_data = json.load(f) concept_to_stocks = {} for c in leaf_concepts: concept_to_stocks[c['concept_name']] = set(c['stocks']) parent_concepts = [] for lv1 in hierarchy_data.get('hierarchy', []): lv1_name = lv1.get('lv1', '') lv1_stocks = set() for child in lv1.get('children', []): lv2_name = child.get('lv2', '') lv2_stocks = set() if 'children' in child: for lv3_child in child.get('children', []): lv3_name = lv3_child.get('lv3', '') lv3_stocks = set() for concept_name in lv3_child.get('concepts', []): if concept_name in concept_to_stocks: lv3_stocks.update(concept_to_stocks[concept_name]) if lv3_stocks: parent_concepts.append({ 'concept_id': generate_id(f"lv3_{lv3_name}"), 'concept_name': f"[三级] {lv3_name}", 'stocks': list(lv3_stocks), 'concept_type': 'lv3' }) lv2_stocks.update(lv3_stocks) else: for concept_name in child.get('concepts', []): if concept_name in concept_to_stocks: lv2_stocks.update(concept_to_stocks[concept_name]) if lv2_stocks: parent_concepts.append({ 'concept_id': generate_id(f"lv2_{lv2_name}"), 'concept_name': f"[二级] {lv2_name}", 'stocks': list(lv2_stocks), 'concept_type': 'lv2' }) lv1_stocks.update(lv2_stocks) if lv1_stocks: parent_concepts.append({ 'concept_id': generate_id(f"lv1_{lv1_name}"), 'concept_name': f"[一级] {lv1_name}", 'stocks': list(lv1_stocks), 'concept_type': 'lv1' }) return parent_concepts # ==================== 价格数据获取 ==================== def get_base_prices(stock_codes: list, current_date: str) -> dict: """获取昨收价作为基准""" if not stock_codes: return {} valid_codes = [code for code in stock_codes if code and len(code) == 6 and code.isdigit()] if not valid_codes: return {} stock_codes_str = "','".join(valid_codes) query = f""" SELECT SECCODE, F002N FROM ea_trade WHERE SECCODE IN ('{stock_codes_str}') AND TRADEDATE = ( SELECT MAX(TRADEDATE) FROM ea_trade WHERE TRADEDATE <= '{current_date}' ) AND F002N IS NOT NULL AND F002N > 0 """ try: with MYSQL_ENGINE.connect() as conn: result = conn.execute(text(query)) base_prices = {row[0]: float(row[1]) for row in result if row[1] and float(row[1]) > 0} return base_prices except Exception as e: logger.error(f"获取基准价格失败: {e}") return {} def get_latest_prices(stock_codes: list) -> dict: """从ClickHouse获取最新价格""" if not stock_codes: return {} client = get_ch_client() ch_codes = [] code_mapping = {} for code in stock_codes: ch_code = code_to_ch_format(code) if ch_code: ch_codes.append(ch_code) code_mapping[ch_code] = code if not ch_codes: return {} ch_codes_str = "','".join(ch_codes) query = f""" SELECT code, close, timestamp FROM ( SELECT code, close, timestamp, ROW_NUMBER() OVER (PARTITION BY code ORDER BY timestamp DESC) as rn FROM stock_minute WHERE code IN ('{ch_codes_str}') AND toDate(timestamp) = today() ) WHERE rn = 1 """ try: result = client.execute(query) if not result: return {} latest_prices = {} for row in result: ch_code, close, ts = row if close and close > 0: pure_code = code_mapping.get(ch_code) if pure_code: latest_prices[pure_code] = { 'close': float(close), 'timestamp': ts } return latest_prices except Exception as e: logger.error(f"获取最新价格失败: {e}") return {} def get_index_realtime(index_code: str = REFERENCE_INDEX) -> dict: """获取指数实时数据""" client = get_ch_client() try: # 从 index_minute 表获取最新数据 query = f""" SELECT close, timestamp FROM index_minute WHERE code = '{index_code}' AND toDate(timestamp) = today() ORDER BY timestamp DESC LIMIT 1 """ result = client.execute(query) if not result: return None close, ts = result[0] # 获取昨收价 prev_close = None code_no_suffix = index_code.split('.')[0] with MYSQL_ENGINE.connect() as conn: prev_result = conn.execute(text(""" SELECT F006N FROM ea_exchangetrade WHERE INDEXCODE = :code AND TRADEDATE < CURDATE() ORDER BY TRADEDATE DESC LIMIT 1 """), {'code': code_no_suffix}).fetchone() if prev_result and prev_result[0]: prev_close = float(prev_result[0]) change_pct = None if close and prev_close and prev_close > 0: change_pct = (float(close) - prev_close) / prev_close * 100 return { 'code': index_code, 'price': float(close), 'prev_close': prev_close, 'change_pct': round(change_pct, 4) if change_pct else None, 'timestamp': ts } except Exception as e: logger.error(f"获取指数数据失败: {e}") return None # ==================== 涨跌幅计算 ==================== def calculate_change_pct(base_prices: dict, latest_prices: dict) -> dict: """计算涨跌幅""" changes = {} for code, latest in latest_prices.items(): if code in base_prices and base_prices[code] > 0: base = base_prices[code] close = latest['close'] change_pct = (close - base) / base * 100 changes[code] = { 'change_pct': round(change_pct, 4), 'close': close, 'base': base } return changes def calculate_concept_stats(concepts: list, stock_changes: dict) -> list: """计算概念统计(包含涨停数)""" stats = [] for concept in concepts: concept_id = concept['concept_id'] concept_name = concept['concept_name'] stock_codes = concept['stocks'] concept_type = concept.get('concept_type', 'leaf') changes = [] limit_up_count = 0 limit_up_stocks = [] for code in stock_codes: if code in stock_changes: change_info = stock_changes[code] change_pct = change_info['change_pct'] changes.append(change_pct) # 涨停判断(涨幅 >= 9.8%) if change_pct >= 9.8: limit_up_count += 1 limit_up_stocks.append(code) if not changes: continue avg_change_pct = round(np.mean(changes), 4) stats.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'avg_change_pct': avg_change_pct, 'stock_count': len(changes), 'concept_type': concept_type, 'limit_up_count': limit_up_count, 'limit_up_stocks': limit_up_stocks }) # 按涨幅排序并添加排名 stats.sort(key=lambda x: x['avg_change_pct'], reverse=True) for i, item in enumerate(stats): item['rank'] = i + 1 return stats # ==================== 异动检测 ==================== def check_cooldown(concept_id: str, alert_type: str, cooldown_minutes: int) -> bool: """检查是否在冷却期内""" key = (concept_id, alert_type) if key in cooldown_cache: last_alert = cooldown_cache[key] if datetime.now() - last_alert < timedelta(minutes=cooldown_minutes): return True return False def set_cooldown(concept_id: str, alert_type: str): """设置冷却""" cooldown_cache[(concept_id, alert_type)] = datetime.now() def update_history(concept_id: str, timestamp: datetime, change_pct: float, rank: int, limit_up_count: int): """更新历史缓存""" if concept_id not in history_cache: history_cache[concept_id] = deque(maxlen=HISTORY_WINDOW) history_cache[concept_id].append({ 'timestamp': timestamp, 'change_pct': change_pct, 'rank': rank, 'limit_up_count': limit_up_count }) def get_history(concept_id: str, minutes_ago: int) -> dict: """获取N分钟前的历史数据""" if concept_id not in history_cache: return None history = history_cache[concept_id] if not history: return None target_time = datetime.now() - timedelta(minutes=minutes_ago) # 找到最接近目标时间的记录 for record in history: if record['timestamp'] <= target_time: return record # 如果没有足够早的数据,返回最早的记录 return history[0] if history else None def detect_alerts(current_stats: list, index_data: dict, trade_date: str) -> list: """检测异动""" alerts = [] now = datetime.now() for stat in current_stats: concept_id = stat['concept_id'] concept_name = stat['concept_name'] change_pct = stat['avg_change_pct'] rank = stat['rank'] limit_up_count = stat['limit_up_count'] stock_count = stat['stock_count'] concept_type = stat['concept_type'] # 更新历史 update_history(concept_id, now, change_pct, rank, limit_up_count) # 1. 急涨检测 if ALERT_CONFIG['surge']['enabled']: cfg = ALERT_CONFIG['surge'] if change_pct >= cfg['min_change_pct']: # 最低涨幅要求 if not check_cooldown(concept_id, 'surge', cfg['cooldown_minutes']): prev_data = get_history(concept_id, cfg['window_minutes']) if prev_data: change_delta = change_pct - prev_data['change_pct'] if change_delta >= cfg['threshold_pct']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'surge', 'alert_time': now, 'trade_date': trade_date, 'change_pct': change_pct, 'prev_change_pct': prev_data['change_pct'], 'change_delta': round(change_delta, 4), 'limit_up_count': limit_up_count, 'rank_position': rank, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, }) set_cooldown(concept_id, 'surge') logger.info(f"🔥 急涨异动: {concept_name} 涨幅 {prev_data['change_pct']:.2f}% -> {change_pct:.2f}% (+{change_delta:.2f}%)") # 2. 涨停数增加检测 if ALERT_CONFIG['limit_up']['enabled']: cfg = ALERT_CONFIG['limit_up'] if limit_up_count > 0: if not check_cooldown(concept_id, 'limit_up', cfg['cooldown_minutes']): prev_data = get_history(concept_id, 1) # 对比上一分钟 if prev_data: limit_up_delta = limit_up_count - prev_data['limit_up_count'] if limit_up_delta >= cfg['threshold_count']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'limit_up', 'alert_time': now, 'trade_date': trade_date, 'change_pct': change_pct, 'limit_up_count': limit_up_count, 'prev_limit_up_count': prev_data['limit_up_count'], 'limit_up_delta': limit_up_delta, 'rank_position': rank, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, 'extra_info': {'limit_up_stocks': stat.get('limit_up_stocks', [])} }) set_cooldown(concept_id, 'limit_up') logger.info(f"🚀 涨停异动: {concept_name} 涨停数 {prev_data['limit_up_count']} -> {limit_up_count} (+{limit_up_delta})") # 3. 排名跃升检测 if ALERT_CONFIG['rank_jump']['enabled']: cfg = ALERT_CONFIG['rank_jump'] if rank <= cfg['max_rank']: # 只关注前N名 if not check_cooldown(concept_id, 'rank_jump', cfg['cooldown_minutes']): prev_data = get_history(concept_id, cfg['window_minutes']) if prev_data and prev_data['rank'] > cfg['max_rank']: # 从榜外进入前N rank_delta = prev_data['rank'] - rank # 正数表示上升 if rank_delta >= cfg['threshold_rank']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'rank_jump', 'alert_time': now, 'trade_date': trade_date, 'change_pct': change_pct, 'rank_position': rank, 'prev_rank_position': prev_data['rank'], 'rank_delta': -rank_delta, # 负数表示上升 'limit_up_count': limit_up_count, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, }) set_cooldown(concept_id, 'rank_jump') logger.info(f"📈 排名跃升: {concept_name} 排名 {prev_data['rank']} -> {rank} (上升{rank_delta}名)") return alerts # ==================== 数据持久化 ==================== def save_alerts_to_mysql(alerts: list): """保存异动数据到MySQL""" if not alerts: return 0 saved = 0 with MYSQL_ENGINE.begin() as conn: for alert in alerts: try: insert_sql = text(""" INSERT INTO concept_minute_alert (concept_id, concept_name, alert_time, alert_type, trade_date, change_pct, prev_change_pct, change_delta, limit_up_count, prev_limit_up_count, limit_up_delta, rank_position, prev_rank_position, rank_delta, index_code, index_price, index_change_pct, stock_count, concept_type, extra_info) VALUES (:concept_id, :concept_name, :alert_time, :alert_type, :trade_date, :change_pct, :prev_change_pct, :change_delta, :limit_up_count, :prev_limit_up_count, :limit_up_delta, :rank_position, :prev_rank_position, :rank_delta, :index_code, :index_price, :index_change_pct, :stock_count, :concept_type, :extra_info) """) params = { 'concept_id': alert['concept_id'], 'concept_name': alert['concept_name'], 'alert_time': alert['alert_time'], 'alert_type': alert['alert_type'], 'trade_date': alert['trade_date'], 'change_pct': alert.get('change_pct'), 'prev_change_pct': alert.get('prev_change_pct'), 'change_delta': alert.get('change_delta'), 'limit_up_count': alert.get('limit_up_count', 0), 'prev_limit_up_count': alert.get('prev_limit_up_count', 0), 'limit_up_delta': alert.get('limit_up_delta', 0), 'rank_position': alert.get('rank_position'), 'prev_rank_position': alert.get('prev_rank_position'), 'rank_delta': alert.get('rank_delta'), 'index_code': REFERENCE_INDEX, 'index_price': alert.get('index_price'), 'index_change_pct': alert.get('index_change_pct'), 'stock_count': alert.get('stock_count'), 'concept_type': alert.get('concept_type', 'leaf'), 'extra_info': json.dumps(alert.get('extra_info')) if alert.get('extra_info') else None } conn.execute(insert_sql, params) saved += 1 except Exception as e: logger.error(f"保存异动失败: {alert['concept_name']} - {e}") return saved def save_index_snapshot(index_data: dict, trade_date: str): """保存指数快照""" if not index_data: return try: with MYSQL_ENGINE.begin() as conn: upsert_sql = text(""" REPLACE INTO index_minute_snapshot (index_code, trade_date, snapshot_time, price, prev_close, change_pct) VALUES (:index_code, :trade_date, :snapshot_time, :price, :prev_close, :change_pct) """) conn.execute(upsert_sql, { 'index_code': index_data['code'], 'trade_date': trade_date, 'snapshot_time': index_data['timestamp'], 'price': index_data['price'], 'prev_close': index_data.get('prev_close'), 'change_pct': index_data.get('change_pct') }) except Exception as e: logger.error(f"保存指数快照失败: {e}") # ==================== 交易时间判断 ==================== def is_trading_time() -> bool: """判断当前是否为交易时间""" now = datetime.now() weekday = now.weekday() if weekday >= 5: return False hour, minute = now.hour, now.minute current_time = hour * 60 + minute morning_start = 9 * 60 + 30 morning_end = 11 * 60 + 30 afternoon_start = 13 * 60 afternoon_end = 15 * 60 return (morning_start <= current_time <= morning_end) or \ (afternoon_start <= current_time <= afternoon_end) def get_next_update_time() -> int: """获取距离下次更新的秒数""" now = datetime.now() if is_trading_time(): return 60 - now.second else: hour, minute = now.hour, now.minute if hour < 9 or (hour == 9 and minute < 30): target = now.replace(hour=9, minute=30, second=0, microsecond=0) elif (hour == 11 and minute >= 30) or hour == 12: target = now.replace(hour=13, minute=0, second=0, microsecond=0) elif hour >= 15: target = (now + timedelta(days=1)).replace(hour=9, minute=30, second=0, microsecond=0) else: target = now + timedelta(minutes=1) wait_seconds = (target - now).total_seconds() return max(60, int(wait_seconds)) # ==================== 主运行逻辑 ==================== def run_once(concepts: list, all_stocks: list) -> tuple: """执行一次检测,返回 (更新数, 异动数)""" now = datetime.now() trade_date = now.strftime('%Y-%m-%d') # 获取基准价格 base_prices = get_base_prices(all_stocks, trade_date) if not base_prices: logger.warning("无法获取基准价格") return 0, 0 # 获取最新价格 latest_prices = get_latest_prices(all_stocks) if not latest_prices: logger.warning("无法获取最新价格") return 0, 0 # 获取指数数据 index_data = get_index_realtime(REFERENCE_INDEX) if index_data: save_index_snapshot(index_data, trade_date) # 计算涨跌幅 stock_changes = calculate_change_pct(base_prices, latest_prices) if not stock_changes: logger.warning("无涨跌幅数据") return 0, 0 logger.info(f"获取到 {len(stock_changes)} 只股票的涨跌幅") # 计算概念统计 stats = calculate_concept_stats(concepts, stock_changes) logger.info(f"计算了 {len(stats)} 个概念的涨跌幅") # 检测异动 alerts = detect_alerts(stats, index_data, trade_date) # 保存异动 if alerts: saved = save_alerts_to_mysql(alerts) logger.info(f"💾 保存了 {saved} 条异动记录") return len(stats), len(alerts) def run_realtime(): """实时检测主循环""" logger.info("=" * 60) logger.info("🚀 启动概念异动实时检测服务") logger.info("=" * 60) logger.info(f"异动配置: {json.dumps(ALERT_CONFIG, indent=2, ensure_ascii=False)}") # 加载概念数据 logger.info("加载概念数据...") leaf_concepts = get_all_concepts() logger.info(f"获取到 {len(leaf_concepts)} 个叶子概念") parent_concepts = load_hierarchy_concepts(leaf_concepts) logger.info(f"生成了 {len(parent_concepts)} 个母概念") all_concepts = leaf_concepts + parent_concepts logger.info(f"总计 {len(all_concepts)} 个概念") # 收集所有股票代码 all_stocks = set() for c in all_concepts: all_stocks.update(c['stocks']) all_stocks = list(all_stocks) logger.info(f"监控 {len(all_stocks)} 只股票") last_concept_update = datetime.now() total_alerts = 0 while True: try: now = datetime.now() # 每小时重新加载概念数据 if (now - last_concept_update).total_seconds() > 3600: logger.info("重新加载概念数据...") leaf_concepts = get_all_concepts() parent_concepts = load_hierarchy_concepts(leaf_concepts) all_concepts = leaf_concepts + parent_concepts all_stocks = set() for c in all_concepts: all_stocks.update(c['stocks']) all_stocks = list(all_stocks) last_concept_update = now logger.info(f"更新完成: {len(all_concepts)} 个概念, {len(all_stocks)} 只股票") # 检查是否交易时间 if not is_trading_time(): wait_sec = get_next_update_time() wait_min = wait_sec // 60 logger.info(f"⏰ 非交易时间,等待 {wait_min} 分钟后重试...") time.sleep(min(wait_sec, 300)) continue # 执行检测 logger.info(f"\n{'=' * 40}") logger.info(f"🔍 检测时间: {now.strftime('%Y-%m-%d %H:%M:%S')}") updated, alert_count = run_once(all_concepts, all_stocks) total_alerts += alert_count if alert_count > 0: logger.info(f"📊 本次检测到 {alert_count} 条异动,累计 {total_alerts} 条") # 等待下一分钟 sleep_sec = 60 - datetime.now().second logger.info(f"⏳ 等待 {sleep_sec} 秒后继续...") time.sleep(sleep_sec) except KeyboardInterrupt: logger.info("\n收到退出信号,停止服务...") break except Exception as e: logger.error(f"发生错误: {e}") import traceback traceback.print_exc() time.sleep(60) def run_single(): """单次运行""" logger.info("单次检测模式") leaf_concepts = get_all_concepts() parent_concepts = load_hierarchy_concepts(leaf_concepts) all_concepts = leaf_concepts + parent_concepts all_stocks = set() for c in all_concepts: all_stocks.update(c['stocks']) all_stocks = list(all_stocks) logger.info(f"概念数: {len(all_concepts)}, 股票数: {len(all_stocks)}") updated, alerts = run_once(all_concepts, all_stocks) logger.info(f"检测完成: {updated} 个概念, {alerts} 条异动") def show_status(): """显示状态""" print("\n" + "=" * 60) print("概念异动实时检测服务 - 状态") print("=" * 60) now = datetime.now() print(f"\n当前时间: {now.strftime('%Y-%m-%d %H:%M:%S')}") print(f"是否交易时间: {'是' if is_trading_time() else '否'}") # 今日异动统计 print("\n今日异动统计:") try: with MYSQL_ENGINE.connect() as conn: result = conn.execute(text(""" SELECT alert_type, COUNT(*) as cnt FROM concept_minute_alert WHERE trade_date = CURDATE() GROUP BY alert_type """)) rows = list(result) if rows: for row in rows: alert_type_name = { 'surge': '急涨', 'limit_up': '涨停增加', 'rank_jump': '排名跃升' }.get(row[0], row[0]) print(f" {alert_type_name}: {row[1]} 条") else: print(" 今日暂无异动") # 最新异动 print("\n最新异动 (前10条):") result = conn.execute(text(""" SELECT concept_name, alert_type, alert_time, change_pct, limit_up_count, index_price FROM concept_minute_alert WHERE trade_date = CURDATE() ORDER BY alert_time DESC LIMIT 10 """)) rows = list(result) if rows: print(f" {'概念':<20} | {'类型':<8} | {'时间':<8} | {'涨幅':>6} | {'涨停':>4} | {'指数':>8}") print(" " + "-" * 70) for row in rows: name = row[0][:18] if len(row[0]) > 18 else row[0] alert_type = {'surge': '急涨', 'limit_up': '涨停', 'rank_jump': '排名'}.get(row[1], row[1]) time_str = row[2].strftime('%H:%M') if row[2] else '-' change = f"{row[3]:.2f}%" if row[3] else '-' limit_up = str(row[4]) if row[4] else '-' index_p = f"{row[5]:.2f}" if row[5] else '-' print(f" {name:<20} | {alert_type:<8} | {time_str:<8} | {change:>6} | {limit_up:>4} | {index_p:>8}") else: print(" 暂无异动记录") except Exception as e: print(f" 查询失败: {e}") def init_tables(): """初始化数据库表""" print("初始化数据库表...") sql_file = os.path.join(os.path.dirname(__file__), 'sql', 'concept_minute_alert.sql') if not os.path.exists(sql_file): print(f"SQL文件不存在: {sql_file}") return with open(sql_file, 'r', encoding='utf-8') as f: sql_content = f.read() # 分割多个语句 statements = [s.strip() for s in sql_content.split(';') if s.strip() and not s.strip().startswith('--')] with MYSQL_ENGINE.begin() as conn: for stmt in statements: if stmt: try: conn.execute(text(stmt)) print(f"✅ 执行成功") except Exception as e: print(f"❌ 执行失败: {e}") print("初始化完成") # ==================== 回测功能 ==================== def get_minute_timestamps(trade_date: str) -> list: """获取指定交易日的所有分钟时间戳""" client = get_ch_client() query = f""" SELECT DISTINCT timestamp FROM stock_minute WHERE toDate(timestamp) = '{trade_date}' ORDER BY timestamp """ result = client.execute(query) return [row[0] for row in result] def get_prices_at_time(stock_codes: list, timestamp: datetime) -> dict: """获取指定时间点的股票价格 Args: stock_codes: 纯6位股票代码列表 timestamp: 指定的时间点 Returns: dict: {纯6位代码: {'close': 价格, 'timestamp': 时间}} """ if not stock_codes: return {} client = get_ch_client() # 转换为ClickHouse格式 ch_codes = [] code_mapping = {} for code in stock_codes: ch_code = code_to_ch_format(code) if ch_code: ch_codes.append(ch_code) code_mapping[ch_code] = code if not ch_codes: return {} ch_codes_str = "','".join(ch_codes) # 获取指定时间点的数据 query = f""" SELECT code, close, timestamp FROM stock_minute WHERE code IN ('{ch_codes_str}') AND timestamp = '{timestamp.strftime('%Y-%m-%d %H:%M:%S')}' """ try: result = client.execute(query) prices = {} for row in result: ch_code, close, ts = row if close and close > 0: pure_code = code_mapping.get(ch_code) if pure_code: prices[pure_code] = { 'close': float(close), 'timestamp': ts } return prices except Exception as e: logger.error(f"获取历史价格失败: {e}") return {} def get_index_at_time(index_code: str, timestamp: datetime, prev_close: float) -> dict: """获取指定时间点的指数数据""" client = get_ch_client() query = f""" SELECT close, timestamp FROM index_minute WHERE code = '{index_code}' AND timestamp = '{timestamp.strftime('%Y-%m-%d %H:%M:%S')}' LIMIT 1 """ try: result = client.execute(query) if not result: return None close, ts = result[0] change_pct = None if close and prev_close and prev_close > 0: change_pct = (float(close) - prev_close) / prev_close * 100 return { 'code': index_code, 'price': float(close), 'prev_close': prev_close, 'change_pct': round(change_pct, 4) if change_pct else None, 'timestamp': ts } except Exception as e: logger.error(f"获取指数数据失败: {e}") return None def get_index_prev_close(index_code: str, trade_date: str) -> float: """获取指数昨收价""" code_no_suffix = index_code.split('.')[0] with MYSQL_ENGINE.connect() as conn: result = conn.execute(text(""" SELECT F006N FROM ea_exchangetrade WHERE INDEXCODE = :code AND TRADEDATE < :today ORDER BY TRADEDATE DESC LIMIT 1 """), { 'code': code_no_suffix, 'today': trade_date }).fetchone() if result and result[0]: return float(result[0]) return None def run_backtest(trade_date: str, clear_existing: bool = True): """ 回测指定日期的异动检测 Args: trade_date: 交易日期,格式 'YYYY-MM-DD' clear_existing: 是否清除该日期已有的异动数据 """ global history_cache, cooldown_cache logger.info("=" * 60) logger.info(f"🔄 开始回测: {trade_date}") logger.info("=" * 60) # 清空缓存 history_cache = {} cooldown_cache = {} # 清除已有数据 if clear_existing: with MYSQL_ENGINE.begin() as conn: conn.execute(text("DELETE FROM concept_minute_alert WHERE trade_date = :date"), {'date': trade_date}) conn.execute(text("DELETE FROM index_minute_snapshot WHERE trade_date = :date"), {'date': trade_date}) logger.info(f"已清除 {trade_date} 的已有数据") # 加载概念数据 logger.info("加载概念数据...") leaf_concepts = get_all_concepts() logger.info(f"获取到 {len(leaf_concepts)} 个叶子概念") parent_concepts = load_hierarchy_concepts(leaf_concepts) logger.info(f"生成了 {len(parent_concepts)} 个母概念") all_concepts = leaf_concepts + parent_concepts logger.info(f"总计 {len(all_concepts)} 个概念") # 收集所有股票代码 all_stocks = set() for c in all_concepts: all_stocks.update(c['stocks']) all_stocks = list(all_stocks) logger.info(f"监控 {len(all_stocks)} 只股票") # 获取基准价格(昨收价) base_prices = get_base_prices(all_stocks, trade_date) if not base_prices: logger.error("无法获取基准价格,退出回测") return logger.info(f"获取到 {len(base_prices)} 个基准价格") # 获取指数昨收价 index_prev_close = get_index_prev_close(REFERENCE_INDEX, trade_date) logger.info(f"指数昨收价: {index_prev_close}") # 获取所有分钟时间戳 timestamps = get_minute_timestamps(trade_date) if not timestamps: logger.error(f"未找到 {trade_date} 的分钟数据") return logger.info(f"找到 {len(timestamps)} 个分钟时间点") total_alerts = 0 processed = 0 # 逐分钟处理 for ts in timestamps: processed += 1 # 获取该时间点的价格 latest_prices = get_prices_at_time(all_stocks, ts) if not latest_prices: continue # 获取指数数据 index_data = get_index_at_time(REFERENCE_INDEX, ts, index_prev_close) if index_data: save_index_snapshot(index_data, trade_date) # 计算涨跌幅 stock_changes = calculate_change_pct(base_prices, latest_prices) if not stock_changes: continue # 计算概念统计 stats = calculate_concept_stats(all_concepts, stock_changes) # 检测异动(使用回测专用函数) alerts = detect_alerts_backtest(stats, index_data, trade_date, ts) # 保存异动 if alerts: saved = save_alerts_to_mysql(alerts) total_alerts += saved # 进度显示 if processed % 30 == 0: logger.info(f"进度: {processed}/{len(timestamps)} ({processed*100//len(timestamps)}%), 已检测到 {total_alerts} 条异动") logger.info("=" * 60) logger.info(f"✅ 回测完成!") logger.info(f" 处理分钟数: {processed}") logger.info(f" 检测到异动: {total_alerts} 条") logger.info("=" * 60) def detect_alerts_backtest(current_stats: list, index_data: dict, trade_date: str, current_time: datetime) -> list: """ 回测模式的异动检测(使用指定时间而非当前时间) """ alerts = [] for stat in current_stats: concept_id = stat['concept_id'] concept_name = stat['concept_name'] change_pct = stat['avg_change_pct'] rank = stat['rank'] limit_up_count = stat['limit_up_count'] stock_count = stat['stock_count'] concept_type = stat['concept_type'] # 更新历史(使用指定时间) if concept_id not in history_cache: history_cache[concept_id] = deque(maxlen=HISTORY_WINDOW) history_cache[concept_id].append({ 'timestamp': current_time, 'change_pct': change_pct, 'rank': rank, 'limit_up_count': limit_up_count }) # 获取历史数据的辅助函数(回测专用) def get_history_backtest(concept_id: str, minutes_ago: int): if concept_id not in history_cache: return None history = history_cache[concept_id] if not history: return None target_time = current_time - timedelta(minutes=minutes_ago) for record in reversed(list(history)): if record['timestamp'] <= target_time: return record return None # 检查冷却(回测专用) def check_cooldown_backtest(concept_id: str, alert_type: str, cooldown_minutes: int) -> bool: key = (concept_id, alert_type) if key in cooldown_cache: last_alert = cooldown_cache[key] if current_time - last_alert < timedelta(minutes=cooldown_minutes): return True return False def set_cooldown_backtest(concept_id: str, alert_type: str): cooldown_cache[(concept_id, alert_type)] = current_time # 1. 急涨检测 if ALERT_CONFIG['surge']['enabled']: cfg = ALERT_CONFIG['surge'] if change_pct >= cfg['min_change_pct']: if not check_cooldown_backtest(concept_id, 'surge', cfg['cooldown_minutes']): prev_data = get_history_backtest(concept_id, cfg['window_minutes']) if prev_data: change_delta = change_pct - prev_data['change_pct'] if change_delta >= cfg['threshold_pct']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'surge', 'alert_time': current_time, 'trade_date': trade_date, 'change_pct': change_pct, 'prev_change_pct': prev_data['change_pct'], 'change_delta': round(change_delta, 4), 'limit_up_count': limit_up_count, 'rank_position': rank, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, }) set_cooldown_backtest(concept_id, 'surge') logger.debug(f"🔥 急涨: {concept_name} {prev_data['change_pct']:.2f}% -> {change_pct:.2f}%") # 2. 涨停数增加检测 if ALERT_CONFIG['limit_up']['enabled']: cfg = ALERT_CONFIG['limit_up'] if limit_up_count > 0: if not check_cooldown_backtest(concept_id, 'limit_up', cfg['cooldown_minutes']): prev_data = get_history_backtest(concept_id, 1) if prev_data: limit_up_delta = limit_up_count - prev_data['limit_up_count'] if limit_up_delta >= cfg['threshold_count']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'limit_up', 'alert_time': current_time, 'trade_date': trade_date, 'change_pct': change_pct, 'limit_up_count': limit_up_count, 'prev_limit_up_count': prev_data['limit_up_count'], 'limit_up_delta': limit_up_delta, 'rank_position': rank, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, 'extra_info': {'limit_up_stocks': stat.get('limit_up_stocks', [])} }) set_cooldown_backtest(concept_id, 'limit_up') logger.debug(f"🚀 涨停: {concept_name} 涨停数 +{limit_up_delta}") # 3. 排名跃升检测 if ALERT_CONFIG['rank_jump']['enabled']: cfg = ALERT_CONFIG['rank_jump'] if rank <= cfg['max_rank']: if not check_cooldown_backtest(concept_id, 'rank_jump', cfg['cooldown_minutes']): prev_data = get_history_backtest(concept_id, cfg['window_minutes']) if prev_data and prev_data['rank'] > cfg['max_rank']: rank_delta = prev_data['rank'] - rank if rank_delta >= cfg['threshold_rank']: alerts.append({ 'concept_id': concept_id, 'concept_name': concept_name, 'alert_type': 'rank_jump', 'alert_time': current_time, 'trade_date': trade_date, 'change_pct': change_pct, 'rank_position': rank, 'prev_rank_position': prev_data['rank'], 'rank_delta': -rank_delta, 'limit_up_count': limit_up_count, 'stock_count': stock_count, 'concept_type': concept_type, 'index_price': index_data['price'] if index_data else None, 'index_change_pct': index_data['change_pct'] if index_data else None, }) set_cooldown_backtest(concept_id, 'rank_jump') logger.debug(f"📈 排名跃升: {concept_name} 排名 {prev_data['rank']} -> {rank}") return alerts # ==================== 主函数 ==================== def main(): parser = argparse.ArgumentParser(description='概念异动实时检测服务') parser.add_argument('command', nargs='?', default='realtime', choices=['realtime', 'once', 'status', 'init', 'backtest'], help='命令: realtime(实时运行), once(单次运行), status(状态查看), init(初始化表), backtest(回测历史)') parser.add_argument('--date', '-d', type=str, default=None, help='回测日期,格式: YYYY-MM-DD,默认为今天') parser.add_argument('--keep', '-k', action='store_true', help='回测时保留已有数据(默认会清除)') args = parser.parse_args() if args.command == 'realtime': run_realtime() elif args.command == 'once': run_single() elif args.command == 'status': show_status() elif args.command == 'init': init_tables() elif args.command == 'backtest': # 回测模式 trade_date = args.date or datetime.now().strftime('%Y-%m-%d') clear_existing = not args.keep run_backtest(trade_date, clear_existing) if __name__ == "__main__": main()