更新Company页面的UI为FUI风格
This commit is contained in:
68
app.py
68
app.py
@@ -10533,6 +10533,39 @@ def get_stock_list():
|
||||
}), 500
|
||||
|
||||
|
||||
# ==================== 事件列表缓存 ====================
|
||||
EVENTS_LIST_CACHE_PREFIX = "vf:events:list:"
|
||||
EVENTS_LIST_CACHE_EXPIRE = 30 # 30秒缓存(事件列表变化较快)
|
||||
|
||||
def get_events_cache_key(args_dict):
|
||||
"""生成事件列表缓存 key"""
|
||||
# 只对简单查询缓存(不缓存搜索、复杂过滤)
|
||||
simple_params = ['page', 'per_page', 'sort', 'importance', 'order']
|
||||
cache_parts = []
|
||||
for key in sorted(simple_params):
|
||||
val = args_dict.get(key, '')
|
||||
if val:
|
||||
cache_parts.append(f"{key}={val}")
|
||||
return EVENTS_LIST_CACHE_PREFIX + "&".join(cache_parts)
|
||||
|
||||
def is_simple_events_query(args_dict):
|
||||
"""判断是否为简单查询(可缓存)"""
|
||||
# 第1页不缓存(保证新事件推送后立即可见)
|
||||
page = int(args_dict.get('page', 1))
|
||||
if page <= 1:
|
||||
return False
|
||||
|
||||
# 如果有搜索、日期范围、行业等复杂过滤,不缓存
|
||||
complex_params = ['q', 'search_query', 'start_date', 'end_date', 'date_range',
|
||||
'industry_code', 'tag', 'tags', 'keywords', 'creator_id',
|
||||
'min_avg_return', 'max_avg_return', 'min_max_return', 'max_max_return',
|
||||
'min_week_return', 'max_week_return', 'min_hot_score', 'max_hot_score']
|
||||
for param in complex_params:
|
||||
if args_dict.get(param):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@app.route('/api/events', methods=['GET'], strict_slashes=False)
|
||||
def api_get_events():
|
||||
"""
|
||||
@@ -10543,6 +10576,21 @@ def api_get_events():
|
||||
page = max(1, request.args.get('page', 1, type=int))
|
||||
per_page = min(100, max(1, request.args.get('per_page', 10, type=int)))
|
||||
|
||||
# ==================== 缓存检查 ====================
|
||||
args_dict = request.args.to_dict()
|
||||
use_cache = is_simple_events_query(args_dict)
|
||||
cache_key = None
|
||||
|
||||
if use_cache:
|
||||
cache_key = get_events_cache_key(args_dict)
|
||||
try:
|
||||
cached = redis_client.get(cache_key)
|
||||
if cached:
|
||||
import json
|
||||
return jsonify(json.loads(cached))
|
||||
except Exception as e:
|
||||
print(f"[Events] 缓存读取失败: {e}")
|
||||
|
||||
# 基础筛选参数
|
||||
event_type = request.args.get('type', 'all')
|
||||
event_status = request.args.get('status', 'active')
|
||||
@@ -10592,7 +10640,10 @@ def api_get_events():
|
||||
include_related_data = request.args.get('include_related_data', 'false').lower() == 'true'
|
||||
|
||||
# ==================== 构建查询 ====================
|
||||
query = Event.query
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
# 使用 joinedload 预加载 creator,解决 N+1 查询问题
|
||||
query = Event.query.options(joinedload(Event.creator))
|
||||
|
||||
# 只返回有关联股票的事件(没有关联股票的事件不计入列表)
|
||||
from sqlalchemy import exists
|
||||
@@ -10803,7 +10854,8 @@ def api_get_events():
|
||||
if search_query:
|
||||
applied_filters['search_query'] = search_query
|
||||
applied_filters['search_type'] = search_type
|
||||
return jsonify({
|
||||
|
||||
response_data = {
|
||||
'success': True,
|
||||
'data': {
|
||||
'events': events_data,
|
||||
@@ -10820,7 +10872,17 @@ def api_get_events():
|
||||
'total_count': paginated.total
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
# ==================== 写入缓存 ====================
|
||||
if use_cache and cache_key:
|
||||
try:
|
||||
import json
|
||||
redis_client.setex(cache_key, EVENTS_LIST_CACHE_EXPIRE, json.dumps(response_data))
|
||||
except Exception as e:
|
||||
print(f"[Events] 缓存写入失败: {e}")
|
||||
|
||||
return jsonify(response_data)
|
||||
except Exception as e:
|
||||
app.logger.error(f"获取事件列表出错: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
|
||||
Reference in New Issue
Block a user