update pay ui

This commit is contained in:
2025-12-17 11:30:46 +08:00
parent 9e54dc1f7f
commit b5cd263e5b

View File

@@ -2005,12 +2005,12 @@ class MCPAgentIntegrated:
# 添加当前用户问题 # 添加当前用户问题
messages.append({"role": "user", "content": user_query}) messages.append({"role": "user", "content": user_query})
# 使用 Kimi 思考模型 # 使用 Kimi 思考模型(实际是 deepseekmax_tokens 限制 8192
response = self.kimi_client.chat.completions.create( response = self.kimi_client.chat.completions.create(
model=self.kimi_model, model=self.kimi_model,
messages=messages, messages=messages,
temperature=1.0, # Kimi 推荐 temperature=1.0,
max_tokens=32768, # 足够容纳 reasoning_content max_tokens=8192,
) )
choice = response.choices[0] choice = response.choices[0]
@@ -2085,7 +2085,7 @@ class MCPAgentIntegrated:
model=self.deepmoney_model, model=self.deepmoney_model,
messages=messages, messages=messages,
temperature=0.7, temperature=0.7,
max_tokens=32784, max_tokens=8192,
) )
summary = response.choices[0].message.content summary = response.choices[0].message.content
@@ -2279,7 +2279,7 @@ class MCPAgentIntegrated:
model="kimi-k2-turbo-preview", # 使用非思考模型,更快 model="kimi-k2-turbo-preview", # 使用非思考模型,更快
messages=messages, messages=messages,
temperature=0.7, temperature=0.7,
max_tokens=32768, # 增加 token 限制以支持图表配置 max_tokens=8192,
) )
summary = response.choices[0].message.content summary = response.choices[0].message.content
@@ -2366,7 +2366,7 @@ class MCPAgentIntegrated:
model=self.deepmoney_model, model=self.deepmoney_model,
messages=messages, messages=messages,
temperature=0.3, temperature=0.3,
max_tokens=32768, max_tokens=8192,
) )
title = response.choices[0].message.content.strip() title = response.choices[0].message.content.strip()
@@ -2507,7 +2507,7 @@ class MCPAgentIntegrated:
model=self.deepmoney_model, model=self.deepmoney_model,
messages=messages, messages=messages,
temperature=0.7, temperature=0.7,
max_tokens=32768, max_tokens=8192,
) )
plan_content = fallback_response.choices[0].message.content plan_content = fallback_response.choices[0].message.content
@@ -2703,7 +2703,7 @@ class MCPAgentIntegrated:
model="kimi-k2-turbo-preview", model="kimi-k2-turbo-preview",
messages=messages, messages=messages,
temperature=0.7, temperature=0.7,
max_tokens=32768, max_tokens=8192,
stream=True, # 启用流式输出 stream=True, # 启用流式输出
) )