update pay ui

This commit is contained in:
2025-12-14 16:06:06 +08:00
parent b6b9a6b5dd
commit 024a34cdd0
6 changed files with 1034 additions and 50 deletions

3
.gitignore vendored
View File

@@ -57,3 +57,6 @@ Thumbs.db
docs/ docs/
src/assets/img/original-backup/ src/assets/img/original-backup/
# 涨停分析静态数据(由 export_zt_data.py 生成,不提交到 Git
public/data/zt/

342
export_zt_data.py Normal file
View File

@@ -0,0 +1,342 @@
#!/usr/bin/env python3
"""
涨停分析数据导出脚本
从 Elasticsearch 导出数据到静态 JSON 文件,供前端直接读取
使用方法:
python export_zt_data.py # 导出最近 30 天数据
python export_zt_data.py --days 7 # 导出最近 7 天
python export_zt_data.py --date 20251212 # 导出指定日期
python export_zt_data.py --all # 导出所有数据
输出目录data/zt/
├── dates.json # 可用日期列表
├── daily/
│ └── {date}.json # 每日分析数据
└── stocks.jsonl # 所有股票记录(用于关键词搜索)
"""
import os
import json
import argparse
from datetime import datetime, timedelta
from collections import defaultdict
from elasticsearch import Elasticsearch
import logging
# 配置
ES_HOST = os.environ.get('ES_HOST', 'http://127.0.0.1:9200')
# 输出到 public 目录,这样前端可以直接访问
OUTPUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'public', 'data', 'zt')
# 日志配置
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# ES 连接
es = Elasticsearch([ES_HOST], timeout=60, retry_on_timeout=True, max_retries=3)
def ensure_dirs():
"""确保输出目录存在"""
os.makedirs(os.path.join(OUTPUT_DIR, 'daily'), exist_ok=True)
logger.info(f"输出目录: {OUTPUT_DIR}")
def get_available_dates():
"""获取所有可用日期"""
query = {
"size": 0,
"aggs": {
"dates": {
"terms": {
"field": "date",
"size": 10000,
"order": {"_key": "desc"}
},
"aggs": {
"stock_count": {
"cardinality": {"field": "scode"}
}
}
}
}
}
result = es.search(index="zt_stocks", body=query)
dates = []
for bucket in result['aggregations']['dates']['buckets']:
date = bucket['key']
count = bucket['doc_count']
# 格式化日期 YYYYMMDD -> YYYY-MM-DD
formatted = f"{date[:4]}-{date[4:6]}-{date[6:]}"
dates.append({
'date': date,
'formatted_date': formatted,
'count': count
})
return dates
def get_daily_stats(date):
"""获取指定日期的统计数据"""
query = {
"query": {"term": {"date": date}},
"_source": ["sector_stats", "word_freq", "chart_data"]
}
result = es.search(index="zt_daily_stats", body=query, size=1)
if result['hits']['total']['value'] > 0:
return result['hits']['hits'][0]['_source']
return {}
def get_daily_stocks(date):
"""获取指定日期的所有股票"""
query = {
"query": {"term": {"date": date}},
"size": 10000,
"sort": [{"zt_time": "asc"}],
"_source": {
"exclude": ["content_embedding"] # 排除向量字段
}
}
result = es.search(index="zt_stocks", body=query)
stocks = []
for hit in result['hits']['hits']:
stock = hit['_source']
# 格式化涨停时间
if 'zt_time' in stock:
try:
zt_time = datetime.fromisoformat(stock['zt_time'].replace('Z', '+00:00'))
stock['formatted_time'] = zt_time.strftime('%H:%M:%S')
except:
stock['formatted_time'] = ''
stocks.append(stock)
return stocks
def process_sector_data(sector_stats, stocks):
"""处理板块数据"""
if sector_stats:
# 从预计算的 sector_stats 生成
sector_data = {}
for sector_info in sector_stats:
sector_name = sector_info['sector_name']
sector_data[sector_name] = {
'count': sector_info['count'],
'stock_codes': sector_info.get('stock_codes', [])
}
else:
# 从股票数据生成
sector_stocks = defaultdict(list)
sector_counts = defaultdict(int)
for stock in stocks:
for sector in stock.get('core_sectors', []):
sector_counts[sector] += 1
small_sectors = {s for s, c in sector_counts.items() if c < 2}
for stock in stocks:
scode = stock.get('scode', '')
valid_sectors = [s for s in stock.get('core_sectors', []) if s not in small_sectors]
if valid_sectors:
for sector in valid_sectors:
sector_stocks[sector].append(scode)
else:
sector_stocks['其他'].append(scode)
sector_data = {
sector: {'count': len(codes), 'stock_codes': codes}
for sector, codes in sector_stocks.items()
}
# 排序:公告优先,然后按数量降序,其他放最后
sorted_items = []
announcement = sector_data.pop('公告', None)
other = sector_data.pop('其他', None)
normal_items = sorted(sector_data.items(), key=lambda x: -x[1]['count'])
if announcement:
sorted_items.append(('公告', announcement))
sorted_items.extend(normal_items)
if other:
sorted_items.append(('其他', other))
return dict(sorted_items)
def calculate_sector_relations_top10(stocks):
"""计算板块关联 TOP10"""
relations = defaultdict(int)
stock_sectors = defaultdict(set)
for stock in stocks:
scode = stock['scode']
for sector in stock.get('core_sectors', []):
stock_sectors[scode].add(sector)
for scode, sectors in stock_sectors.items():
sector_list = list(sectors)
for i in range(len(sector_list)):
for j in range(i + 1, len(sector_list)):
pair = tuple(sorted([sector_list[i], sector_list[j]]))
relations[pair] += 1
sorted_relations = sorted(relations.items(), key=lambda x: -x[1])[:10]
return {
'labels': [f"{p[0]} - {p[1]}" for p, _ in sorted_relations],
'counts': [c for _, c in sorted_relations]
}
def export_daily_analysis(date):
"""导出单日分析数据"""
logger.info(f"导出日期: {date}")
# 获取数据
stats = get_daily_stats(date)
stocks = get_daily_stocks(date)
if not stocks:
logger.warning(f"日期 {date} 无数据")
return None
# 处理板块数据
sector_data = process_sector_data(stats.get('sector_stats', []), stocks)
# 计算板块关联
sector_relations = calculate_sector_relations_top10(stocks)
# 生成图表数据
chart_data = stats.get('chart_data', {
'labels': [s for s in sector_data.keys() if s not in ['其他', '公告']],
'counts': [d['count'] for s, d in sector_data.items() if s not in ['其他', '公告']]
})
# 组装分析数据
analysis = {
'date': date,
'formatted_date': f"{date[:4]}-{date[4:6]}-{date[6:]}",
'total_stocks': len(stocks),
'sector_data': sector_data,
'chart_data': chart_data,
'word_freq_data': stats.get('word_freq', []),
'sector_relations_top10': sector_relations,
'stocks': stocks # 包含完整股票列表
}
# 保存文件
output_path = os.path.join(OUTPUT_DIR, 'daily', f'{date}.json')
with open(output_path, 'w', encoding='utf-8') as f:
json.dump(analysis, f, ensure_ascii=False, indent=2)
logger.info(f"已保存: {output_path} ({len(stocks)} 只股票)")
return analysis
def export_dates_index(dates):
"""导出日期索引"""
output_path = os.path.join(OUTPUT_DIR, 'dates.json')
with open(output_path, 'w', encoding='utf-8') as f:
json.dump({
'dates': dates,
'total': len(dates),
'updated_at': datetime.now().isoformat()
}, f, ensure_ascii=False, indent=2)
logger.info(f"已保存日期索引: {output_path} ({len(dates)} 个日期)")
def export_stocks_for_search(dates_to_export):
"""导出股票数据用于搜索JSONL 格式)"""
output_path = os.path.join(OUTPUT_DIR, 'stocks.jsonl')
total_count = 0
with open(output_path, 'w', encoding='utf-8') as f:
for date_info in dates_to_export:
date = date_info['date']
stocks = get_daily_stocks(date)
for stock in stocks:
# 只保留搜索需要的字段
search_record = {
'date': stock.get('date'),
'scode': stock.get('scode'),
'sname': stock.get('sname'),
'brief': stock.get('brief', ''),
'core_sectors': stock.get('core_sectors', []),
'zt_time': stock.get('zt_time'),
'formatted_time': stock.get('formatted_time', ''),
'continuous_days': stock.get('continuous_days', '')
}
f.write(json.dumps(search_record, ensure_ascii=False) + '\n')
total_count += 1
logger.info(f"已保存搜索数据: {output_path} ({total_count} 条记录)")
def main():
parser = argparse.ArgumentParser(description='导出涨停分析数据到 JSON 文件')
parser.add_argument('--days', type=int, default=30, help='导出最近 N 天的数据')
parser.add_argument('--date', type=str, help='导出指定日期 (YYYYMMDD)')
parser.add_argument('--all', action='store_true', help='导出所有数据')
parser.add_argument('--no-search', action='store_true', help='不导出搜索数据')
args = parser.parse_args()
ensure_dirs()
# 获取所有可用日期
all_dates = get_available_dates()
logger.info(f"ES 中共有 {len(all_dates)} 个日期的数据")
if not all_dates:
logger.error("未找到任何数据")
return
# 确定要导出的日期
if args.date:
dates_to_export = [d for d in all_dates if d['date'] == args.date]
if not dates_to_export:
logger.error(f"未找到日期 {args.date} 的数据")
return
elif args.all:
dates_to_export = all_dates
else:
# 默认导出最近 N 天
dates_to_export = all_dates[:args.days]
logger.info(f"将导出 {len(dates_to_export)} 个日期的数据")
# 导出每日分析数据
for date_info in dates_to_export:
try:
export_daily_analysis(date_info['date'])
except Exception as e:
logger.error(f"导出 {date_info['date']} 失败: {e}")
# 导出日期索引(使用所有日期)
export_dates_index(all_dates)
# 导出搜索数据
if not args.no_search:
export_stocks_for_search(dates_to_export)
logger.info("导出完成!")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,309 @@
/**
* 涨停分析静态数据服务
* 从 /data/zt/ 目录读取预生成的 JSON 文件
* 不依赖后端 API适合静态部署
*/
// 数据基础路径
const DATA_BASE_URL = '/data/zt';
// 内存缓存
const cache = {
dates: null,
daily: new Map(),
};
/**
* 获取可用日期列表
*/
export const fetchAvailableDates = async () => {
try {
// 使用缓存
if (cache.dates) {
return { success: true, events: cache.dates };
}
const response = await fetch(`${DATA_BASE_URL}/dates.json`);
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
const data = await response.json();
// 转换为日历事件格式
const events = (data.dates || []).map(d => ({
title: `${d.count}`,
start: d.formatted_date,
end: d.formatted_date,
className: 'bg-gradient-primary',
allDay: true,
date: d.date,
count: d.count,
}));
// 缓存结果
cache.dates = events;
return { success: true, events, total: events.length };
} catch (error) {
console.error('[ztStaticService] fetchAvailableDates error:', error);
return { success: false, error: error.message, events: [] };
}
};
/**
* 获取指定日期的分析数据
*/
export const fetchDailyAnalysis = async (date) => {
try {
// 使用缓存
if (cache.daily.has(date)) {
return { success: true, data: cache.daily.get(date), from_cache: true };
}
const response = await fetch(`${DATA_BASE_URL}/daily/${date}.json`);
if (!response.ok) {
if (response.status === 404) {
return { success: false, error: `日期 ${date} 的数据不存在` };
}
throw new Error(`HTTP ${response.status}`);
}
const data = await response.json();
// 缓存结果
cache.daily.set(date, data);
return { success: true, data, from_cache: false };
} catch (error) {
console.error('[ztStaticService] fetchDailyAnalysis error:', error);
return { success: false, error: error.message };
}
};
/**
* 获取词云数据
* 从每日分析数据中提取
*/
export const fetchWordCloudData = async (date) => {
try {
const result = await fetchDailyAnalysis(date);
if (!result.success) {
return result;
}
const wordFreqData = result.data.word_freq_data || [];
return { success: true, data: wordFreqData };
} catch (error) {
console.error('[ztStaticService] fetchWordCloudData error:', error);
return { success: false, error: error.message, data: [] };
}
};
/**
* 获取高位股统计
* 从每日分析数据中的 stocks 计算
*/
export const fetchHighPositionStocks = async (date) => {
try {
const result = await fetchDailyAnalysis(date);
if (!result.success) {
return result;
}
const stocks = result.data.stocks || [];
// 筛选连板股continuous_days 包含数字 >= 2
const highPositionStocks = stocks
.filter(stock => {
const days = parseContinuousDays(stock.continuous_days);
return days >= 2;
})
.map(stock => {
const days = parseContinuousDays(stock.continuous_days);
return {
stock_code: stock.scode,
stock_name: stock.sname,
price: '-', // 静态数据中没有实时价格
increase_rate: 10.0, // 涨停固定 10%
continuous_limit_up: days,
industry: (stock.core_sectors || [])[0] || '未知',
turnover_rate: '-', // 静态数据中没有换手率
brief: stock.brief || '',
};
})
.sort((a, b) => b.continuous_limit_up - a.continuous_limit_up);
// 计算统计数据
const totalCount = highPositionStocks.length;
const maxDays = highPositionStocks.length > 0
? Math.max(...highPositionStocks.map(s => s.continuous_limit_up))
: 0;
const avgDays = highPositionStocks.length > 0
? (highPositionStocks.reduce((sum, s) => sum + s.continuous_limit_up, 0) / totalCount).toFixed(1)
: 0;
return {
success: true,
data: {
stocks: highPositionStocks,
statistics: {
total_count: totalCount,
max_continuous_days: maxDays,
avg_continuous_days: avgDays,
},
},
};
} catch (error) {
console.error('[ztStaticService] fetchHighPositionStocks error:', error);
return { success: false, error: error.message };
}
};
/**
* 解析连板天数
* 例如 "2连板" -> 2, "首板" -> 1
*/
const parseContinuousDays = (str) => {
if (!str) return 1;
const match = str.match(/(\d+)/);
if (match) {
return parseInt(match[1], 10);
}
if (str.includes('首板')) return 1;
return 1;
};
/**
* 关键词搜索股票
* 从缓存的数据中搜索
*/
export const searchStocks = async (searchParams) => {
try {
const { query, date, date_range, page = 1, page_size = 20 } = searchParams;
if (!query || query.trim() === '') {
return { success: false, error: '搜索关键词不能为空' };
}
const queryLower = query.toLowerCase().trim();
let allStocks = [];
// 确定要搜索的日期范围
let datesToSearch = [];
if (date) {
datesToSearch = [date];
} else if (date_range?.start && date_range?.end) {
// 从缓存的日期中筛选
const datesResult = await fetchAvailableDates();
if (datesResult.success) {
datesToSearch = datesResult.events
.filter(d => d.date >= date_range.start && d.date <= date_range.end)
.map(d => d.date);
}
} else {
// 默认搜索最近 30 天
const datesResult = await fetchAvailableDates();
if (datesResult.success) {
datesToSearch = datesResult.events.slice(0, 30).map(d => d.date);
}
}
// 从每个日期的数据中搜索
for (const d of datesToSearch) {
const result = await fetchDailyAnalysis(d);
if (result.success && result.data.stocks) {
const stocks = result.data.stocks.map(s => ({ ...s, date: d }));
allStocks = allStocks.concat(stocks);
}
}
// 关键词匹配
const results = allStocks
.map(stock => {
let score = 0;
// 精确匹配股票代码
if (queryLower === (stock.scode || '').toLowerCase()) {
score = 100;
}
// 精确匹配股票名称
else if (queryLower === (stock.sname || '').toLowerCase()) {
score = 90;
}
// 部分匹配股票名称
else if ((stock.sname || '').toLowerCase().includes(queryLower)) {
score = 80;
}
// 匹配板块
else if ((stock.core_sectors || []).some(s => s.toLowerCase().includes(queryLower))) {
score = 70;
}
// 匹配涨停原因
else if ((stock.brief || '').toLowerCase().includes(queryLower)) {
score = 60;
}
return { ...stock, _score: score };
})
.filter(s => s._score > 0)
.sort((a, b) => b._score - a._score || b.date.localeCompare(a.date));
// 分页
const total = results.length;
const start = (page - 1) * page_size;
const pageResults = results.slice(start, start + page_size);
return {
success: true,
data: {
stocks: pageResults,
total,
page,
page_size,
total_pages: Math.ceil(total / page_size),
search_mode: 'keyword',
},
};
} catch (error) {
console.error('[ztStaticService] searchStocks error:', error);
return { success: false, error: error.message };
}
};
/**
* 批量获取股票详情
*/
export const fetchStocksBatchDetail = async (codes, date) => {
try {
const result = await fetchDailyAnalysis(date);
if (!result.success) {
return result;
}
const stocks = (result.data.stocks || []).filter(s => codes.includes(s.scode));
return { success: true, data: stocks };
} catch (error) {
console.error('[ztStaticService] fetchStocksBatchDetail error:', error);
return { success: false, error: error.message };
}
};
/**
* 清除缓存
*/
export const clearCache = () => {
cache.dates = null;
cache.daily.clear();
};
export default {
fetchAvailableDates,
fetchDailyAnalysis,
fetchWordCloudData,
fetchHighPositionStocks,
searchStocks,
fetchStocksBatchDetail,
clearCache,
};

View File

@@ -28,7 +28,7 @@ import {
} from '@chakra-ui/react'; } from '@chakra-ui/react';
import { StarIcon, TriangleUpIcon } from '@chakra-ui/icons'; import { StarIcon, TriangleUpIcon } from '@chakra-ui/icons';
import { logger } from '../../../utils/logger'; import { logger } from '../../../utils/logger';
import { getApiBase } from '../../../utils/apiConfig'; import ztStaticService from '../../../services/ztStaticService';
const HighPositionStocks = ({ dateStr }) => { const HighPositionStocks = ({ dateStr }) => {
const [highPositionData, setHighPositionData] = useState(null); const [highPositionData, setHighPositionData] = useState(null);
@@ -38,26 +38,26 @@ const HighPositionStocks = ({ dateStr }) => {
const accentColor = useColorModeValue('blue.500', 'blue.300'); const accentColor = useColorModeValue('blue.500', 'blue.300');
useEffect(() => { useEffect(() => {
if (dateStr) {
fetchHighPositionStocks(); fetchHighPositionStocks();
}
}, [dateStr]); }, [dateStr]);
const fetchHighPositionStocks = async () => { const fetchHighPositionStocks = async () => {
setLoading(true); setLoading(true);
try { try {
const API_URL = process.env.NODE_ENV === 'production' ? `${getApiBase()}/report-api` : 'http://111.198.58.126:5001'; const data = await ztStaticService.fetchHighPositionStocks(dateStr);
const response = await fetch(`${API_URL}/api/limit-analyse/high-position-stocks?date=${dateStr}`);
const data = await response.json();
logger.debug('HighPositionStocks', 'API响应', { logger.debug('HighPositionStocks', '静态数据响应', {
date: dateStr, date: dateStr,
success: data.success, success: data.success,
dataLength: data.data?.length stockCount: data.data?.stocks?.length
}); });
if (data.success) { if (data.success) {
setHighPositionData(data.data); setHighPositionData(data.data);
} else { } else {
logger.warn('HighPositionStocks', 'API返回失败', { logger.warn('HighPositionStocks', '数据获取失败', {
date: dateStr, date: dateStr,
error: data.error error: data.error
}); });

View File

@@ -33,10 +33,8 @@ import {
// 注意:在实际使用中,这些组件应该被拆分到独立的文件中 // 注意:在实际使用中,这些组件应该被拆分到独立的文件中
// 这里为了演示,我们假设它们已经被正确导出 // 这里为了演示,我们假设它们已经被正确导出
import { getApiBase } from '../../utils/apiConfig'; // 使用静态数据服务(从 /data/zt/ 读取 JSON 文件)
import ztStaticService from '../../services/ztStaticService';
// API配置
const API_URL = process.env.NODE_ENV === 'production' ? `${getApiBase()}/report-api` : 'http://111.198.58.126:5001';
// 导入的组件(实际使用时应该从独立文件导入) // 导入的组件(实际使用时应该从独立文件导入)
// 恢复使用本页自带的轻量日历 // 恢复使用本页自带的轻量日历
@@ -112,14 +110,13 @@ export default function LimitAnalyse() {
} }
}, [availableDates]); }, [availableDates]);
// API调用函数 // 使用静态数据服务获取数据
const fetchAvailableDates = async () => { const fetchAvailableDates = async () => {
try { try {
const response = await fetch(`${API_URL}/api/v1/dates/available`); const data = await ztStaticService.fetchAvailableDates();
const data = await response.json();
if (data.success) { if (data.success) {
setAvailableDates(data.events); setAvailableDates(data.events);
logger.debug('LimitAnalyse', '可用日期加载成功', { logger.debug('LimitAnalyse', '可用日期加载成功(静态文件)', {
count: data.events?.length || 0 count: data.events?.length || 0
}); });
} }
@@ -131,47 +128,29 @@ export default function LimitAnalyse() {
const fetchDailyAnalysis = async (date) => { const fetchDailyAnalysis = async (date) => {
setLoading(true); setLoading(true);
try { try {
const response = await fetch(`${API_URL}/api/v1/analysis/daily/${date}`); const data = await ztStaticService.fetchDailyAnalysis(date);
const data = await response.json();
if (data.success) { if (data.success) {
setDailyData(data.data); setDailyData(data.data);
// 🎯 追踪每日统计数据查看 // 🎯 追踪每日统计数据查看
trackDailyStatsViewed(data.data, date); trackDailyStatsViewed(data.data, date);
// 获取词云数据 // 词云数据已包含在分析数据中
fetchWordCloudData(date); setWordCloudData(data.data.word_freq_data || []);
logger.debug('LimitAnalyse', '每日分析数据加载成功', { logger.debug('LimitAnalyse', '每日分析数据加载成功(静态文件)', {
date, date,
totalStocks: data.data?.total_stocks || 0 totalStocks: data.data?.total_stocks || 0,
fromCache: data.from_cache
}); });
// ❌ 移除数据加载成功 toast非关键操作
} }
} catch (error) { } catch (error) {
logger.error('LimitAnalyse', 'fetchDailyAnalysis', error, { date }); logger.error('LimitAnalyse', 'fetchDailyAnalysis', error, { date });
// ❌ 移除数据加载失败 toast非关键操作
} finally { } finally {
setLoading(false); setLoading(false);
} }
}; };
const fetchWordCloudData = async (date) => {
try {
const response = await fetch(`${API_URL}/api/v1/analysis/wordcloud/${date}`);
const data = await response.json();
if (data.success) {
setWordCloudData(data.data);
logger.debug('LimitAnalyse', '词云数据加载成功', {
date,
count: data.data?.length || 0
});
}
} catch (error) {
logger.error('LimitAnalyse', 'fetchWordCloudData', error, { date });
}
};
// 格式化日期 // 格式化日期
const formatDateStr = (date) => { const formatDateStr = (date) => {
const year = date.getFullYear(); const year = date.getFullYear();
@@ -193,27 +172,22 @@ export default function LimitAnalyse() {
fetchDailyAnalysis(dateString); fetchDailyAnalysis(dateString);
}; };
// 处理搜索 // 处理搜索(使用静态数据关键词搜索)
const handleSearch = async (searchParams) => { const handleSearch = async (searchParams) => {
// 🎯 追踪搜索开始 // 🎯 追踪搜索开始
trackSearchInitiated( trackSearchInitiated(
searchParams.query, searchParams.query,
searchParams.type || 'all', searchParams.type || 'all',
searchParams.mode || 'hybrid' searchParams.mode || 'keyword' // 静态模式只支持关键词搜索
); );
setLoading(true); setLoading(true);
try { try {
const response = await fetch(`${API_URL}/api/v1/stocks/search/hybrid`, { const data = await ztStaticService.searchStocks(searchParams);
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(searchParams),
});
const data = await response.json();
if (data.success) { if (data.success) {
setSearchResults(data.data); setSearchResults(data.data);
setIsSearchOpen(true); setIsSearchOpen(true);
logger.info('LimitAnalyse', '搜索完成', { logger.info('LimitAnalyse', '搜索完成(静态文件)', {
resultCount: data.data?.total || 0, resultCount: data.data?.total || 0,
searchParams searchParams
}); });
@@ -223,6 +197,13 @@ export default function LimitAnalyse() {
status: 'success', status: 'success',
duration: 3000, duration: 3000,
}); });
} else {
toast({
title: '搜索失败',
description: data.error || '请稍后重试',
status: 'error',
duration: 3000,
});
} }
} catch (error) { } catch (error) {
logger.error('LimitAnalyse', 'handleSearch', error, { searchParams }); logger.error('LimitAnalyse', 'handleSearch', error, { searchParams });

349
zt_api_static.py Normal file
View File

@@ -0,0 +1,349 @@
#!/usr/bin/env python3
"""
涨停分析 API静态文件版本
从 data/zt/ 目录读取预生成的 JSON 文件,不依赖 Elasticsearch
启动方式:
python zt_api_static.py
端口8800与原 report_zt_api.py 相同,可替换使用)
"""
import os
import json
from flask import Flask, request, jsonify, send_from_directory
from flask_cors import CORS
from datetime import datetime
import logging
app = Flask(__name__)
CORS(app)
# 配置
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', 'zt')
# 日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# 内存缓存
_dates_cache = None
_stocks_cache = None
def load_dates():
"""加载日期索引"""
global _dates_cache
if _dates_cache is None:
dates_file = os.path.join(DATA_DIR, 'dates.json')
if os.path.exists(dates_file):
with open(dates_file, 'r', encoding='utf-8') as f:
_dates_cache = json.load(f)
else:
_dates_cache = {'dates': [], 'total': 0}
return _dates_cache
def load_daily_analysis(date):
"""加载指定日期的分析数据"""
daily_file = os.path.join(DATA_DIR, 'daily', f'{date}.json')
if os.path.exists(daily_file):
with open(daily_file, 'r', encoding='utf-8') as f:
return json.load(f)
return None
def load_stocks_for_search():
"""加载股票数据用于搜索"""
global _stocks_cache
if _stocks_cache is None:
stocks_file = os.path.join(DATA_DIR, 'stocks.jsonl')
if os.path.exists(stocks_file):
_stocks_cache = []
with open(stocks_file, 'r', encoding='utf-8') as f:
for line in f:
if line.strip():
_stocks_cache.append(json.loads(line))
logger.info(f"已加载 {len(_stocks_cache)} 条股票记录用于搜索")
else:
_stocks_cache = []
return _stocks_cache
# ==================== API 路由 ====================
@app.route('/api/v1/dates/available', methods=['GET'])
def get_available_dates():
"""获取所有可用日期"""
try:
data = load_dates()
# 转换为日历事件格式
events = []
for d in data.get('dates', []):
events.append({
'title': f"{d['count']}",
'start': d['formatted_date'],
'end': d['formatted_date'],
'className': 'bg-gradient-primary',
'allDay': True,
'date': d['date'],
'count': d['count']
})
return jsonify({
'success': True,
'events': events,
'total': len(events)
})
except Exception as e:
logger.error(f"获取日期列表失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/v1/analysis/daily/<date>', methods=['GET'])
def get_daily_analysis(date):
"""获取指定日期的分析数据"""
try:
data = load_daily_analysis(date)
if data is None:
return jsonify({
'success': False,
'error': f'日期 {date} 的数据不存在'
}), 404
# 返回数据(与原接口格式兼容)
return jsonify({
'success': True,
'data': {
'date': data['date'],
'formatted_date': data['formatted_date'],
'total_stocks': data['total_stocks'],
'sector_data': data['sector_data'],
'chart_data': data['chart_data'],
'word_freq_data': data['word_freq_data'],
'sector_relations_top10': data['sector_relations_top10']
},
'from_cache': True,
'cache_source': 'static_file'
})
except Exception as e:
logger.error(f"获取日期 {date} 分析数据失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/v1/stocks/batch-detail', methods=['POST'])
def get_stocks_batch_detail():
"""批量获取股票详情"""
try:
data = request.json
stock_codes = data.get('codes', [])
date = data.get('date')
if not stock_codes or not date:
return jsonify({'success': False, 'error': '缺少参数'}), 400
# 从日分析数据中获取股票详情
daily_data = load_daily_analysis(date)
if not daily_data:
return jsonify({'success': False, 'error': f'日期 {date} 数据不存在'}), 404
# 过滤指定股票
stocks = [s for s in daily_data.get('stocks', []) if s.get('scode') in stock_codes]
return jsonify({
'success': True,
'data': stocks
})
except Exception as e:
logger.error(f"批量获取股票详情失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/v1/stocks/search/hybrid', methods=['POST'])
def search_stocks():
"""
关键词搜索股票
支持搜索股票代码、股票名称、涨停原因brief、板块
"""
try:
data = request.json
query = data.get('query', '').strip().lower()
date = data.get('date')
date_range = data.get('date_range', {})
page = data.get('page', 1)
page_size = data.get('page_size', 20)
if not query:
return jsonify({'success': False, 'error': '搜索关键词不能为空'}), 400
# 加载搜索数据
all_stocks = load_stocks_for_search()
# 过滤
results = []
for stock in all_stocks:
# 日期过滤
stock_date = stock.get('date', '')
if date and stock_date != date:
continue
if date_range:
if date_range.get('start') and stock_date < date_range['start']:
continue
if date_range.get('end') and stock_date > date_range['end']:
continue
# 关键词匹配
match_score = 0
# 精确匹配股票代码(最高优先级)
if query == stock.get('scode', '').lower():
match_score = 100
# 精确匹配股票名称
elif query == stock.get('sname', '').lower():
match_score = 90
# 部分匹配股票名称
elif query in stock.get('sname', '').lower():
match_score = 80
# 匹配板块
elif any(query in sector.lower() for sector in stock.get('core_sectors', [])):
match_score = 70
# 匹配涨停原因
elif query in stock.get('brief', '').lower():
match_score = 60
if match_score > 0:
stock_copy = stock.copy()
stock_copy['_score'] = match_score
results.append(stock_copy)
# 按分数和日期排序
results.sort(key=lambda x: (-x['_score'], -int(x.get('date', '0'))))
# 分页
total = len(results)
start = (page - 1) * page_size
end = start + page_size
page_results = results[start:end]
return jsonify({
'success': True,
'data': {
'stocks': page_results,
'total': total,
'page': page,
'page_size': page_size,
'total_pages': (total + page_size - 1) // page_size,
'search_mode': 'keyword'
}
})
except Exception as e:
logger.error(f"搜索失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/v1/init/data', methods=['GET'])
def init_data():
"""初始化数据(获取最新日期的分析数据)"""
try:
dates_data = load_dates()
dates = dates_data.get('dates', [])
if not dates:
return jsonify({'success': False, 'error': '无可用数据'}), 404
latest_date = dates[0]['date']
analysis = load_daily_analysis(latest_date)
if not analysis:
return jsonify({'success': False, 'error': '数据加载失败'}), 500
# 转换日期为日历事件格式
events = [{
'title': f"{d['count']}",
'start': d['formatted_date'],
'end': d['formatted_date'],
'className': 'bg-gradient-primary',
'allDay': True,
'date': d['date'],
'count': d['count']
} for d in dates]
return jsonify({
'success': True,
'data': {
'latest_date': latest_date,
'formatted_date': analysis['formatted_date'],
'analysis': {
'date': analysis['date'],
'formatted_date': analysis['formatted_date'],
'total_stocks': analysis['total_stocks'],
'sector_data': analysis['sector_data'],
'chart_data': analysis['chart_data'],
'word_freq_data': analysis['word_freq_data'],
'sector_relations_top10': analysis['sector_relations_top10']
},
'available_dates': events
}
})
except Exception as e:
logger.error(f"初始化数据失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/v1/health', methods=['GET'])
def health_check():
"""健康检查"""
dates_data = load_dates()
return jsonify({
'success': True,
'status': 'healthy',
'mode': 'static_file',
'data_dir': DATA_DIR,
'total_dates': dates_data.get('total', 0),
'updated_at': dates_data.get('updated_at', 'unknown')
})
@app.route('/api/v1/cache/reload', methods=['POST'])
def reload_cache():
"""重新加载缓存"""
global _dates_cache, _stocks_cache
_dates_cache = None
_stocks_cache = None
# 重新加载
load_dates()
load_stocks_for_search()
return jsonify({
'success': True,
'message': '缓存已重新加载'
})
# 静态文件服务(可选,用于直接访问 JSON 文件)
@app.route('/data/zt/<path:filename>')
def serve_data_file(filename):
"""直接提供静态 JSON 文件"""
return send_from_directory(DATA_DIR, filename)
if __name__ == '__main__':
# 预加载数据
logger.info("预加载数据...")
load_dates()
load_stocks_for_search()
logger.info("数据加载完成")
app.run(debug=True, host='0.0.0.0', port=8800)