Import 9 alphaear finance skills
- alphaear-deepear-lite: DeepEar Lite API integration - alphaear-logic-visualizer: Draw.io XML finance diagrams - alphaear-news: Real-time finance news (10+ sources) - alphaear-predictor: Kronos time-series forecasting - alphaear-reporter: Professional financial reports - alphaear-search: Web search + local RAG - alphaear-sentiment: FinBERT/LLM sentiment analysis - alphaear-signal-tracker: Signal evolution tracking - alphaear-stock: A-Share/HK/US stock data Updates: - All scripts updated to use universal .env path - Added JINA_API_KEY, LLM_*, DEEPSEEK_API_KEY to .env.example - Updated load_dotenv() to use ~/.config/opencode/.env
This commit is contained in:
81
skills/alphaear-predictor/scripts/utils/llm/router.py
Normal file
81
skills/alphaear-predictor/scripts/utils/llm/router.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import os
|
||||
from typing import Optional, List, Dict, Any, Union
|
||||
from agno.models.base import Model
|
||||
from loguru import logger
|
||||
from dotenv import load_dotenv
|
||||
from ..llm.factory import get_model
|
||||
from ..llm.capability import ModelCapabilityRegistry
|
||||
|
||||
# Load environment variables from universal .env
|
||||
load_dotenv(os.path.expanduser("~/.config/opencode/.env"))
|
||||
|
||||
|
||||
class ModelRouter:
|
||||
"""
|
||||
模型路由管理器
|
||||
|
||||
功能:
|
||||
1. 管理“推理/写作模型” (Reasoning Model) 和“工具调用模型” (Tool Model)。
|
||||
2. 根据任务需求自动选择合适的模型。
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# 默认从环境变量读取
|
||||
self.reasoning_provider = os.getenv(
|
||||
"REASONING_MODEL_PROVIDER", os.getenv("LLM_PROVIDER", "openai")
|
||||
)
|
||||
self.reasoning_id = os.getenv(
|
||||
"REASONING_MODEL_ID", os.getenv("LLM_MODEL", "gpt-4o")
|
||||
)
|
||||
self.reasoning_host = os.getenv("REASONING_MODEL_HOST", os.getenv("LLM_HOST"))
|
||||
|
||||
self.tool_provider = os.getenv("TOOL_MODEL_PROVIDER", self.reasoning_provider)
|
||||
self.tool_id = os.getenv("TOOL_MODEL_ID", self.reasoning_id)
|
||||
self.tool_host = os.getenv("TOOL_MODEL_HOST", self.reasoning_host)
|
||||
|
||||
self._reasoning_model = None
|
||||
self._tool_model = None
|
||||
|
||||
logger.info(
|
||||
f"🤖 ModelRouter initialized: Reasoning={self.reasoning_id} ({self.reasoning_host or 'default'}), Tool={self.tool_id} ({self.tool_host or 'default'})"
|
||||
)
|
||||
|
||||
def get_reasoning_model(self, **kwargs) -> Model:
|
||||
if not self._reasoning_model:
|
||||
# 优先使用路由配置的 host
|
||||
if self.reasoning_host and "host" not in kwargs:
|
||||
kwargs["host"] = self.reasoning_host
|
||||
self._reasoning_model = get_model(
|
||||
self.reasoning_provider, self.reasoning_id, **kwargs
|
||||
)
|
||||
return self._reasoning_model
|
||||
|
||||
def get_tool_model(self, **kwargs) -> Model:
|
||||
if not self._tool_model:
|
||||
# 优先使用路由配置的 host
|
||||
if self.tool_host and "host" not in kwargs:
|
||||
kwargs["host"] = self.tool_host
|
||||
|
||||
# 检查 tool_model 是否真的支持 tool call
|
||||
caps = ModelCapabilityRegistry.get_capabilities(
|
||||
self.tool_provider, self.tool_id, **kwargs
|
||||
)
|
||||
if not caps["supports_tool_call"]:
|
||||
logger.warning(
|
||||
f"⚠️ Configured tool model {self.tool_id} might not support native tool calls! Consider using ReAct mode or a different model."
|
||||
)
|
||||
|
||||
self._tool_model = get_model(self.tool_provider, self.tool_id, **kwargs)
|
||||
return self._tool_model
|
||||
|
||||
def get_model_for_agent(self, has_tools: bool = False, **kwargs) -> Model:
|
||||
"""
|
||||
根据 Agent 是否包含工具来返回合适的模型。
|
||||
"""
|
||||
if has_tools:
|
||||
return self.get_tool_model(**kwargs)
|
||||
return self.get_reasoning_model(**kwargs)
|
||||
|
||||
|
||||
# 全局单例
|
||||
router = ModelRouter()
|
||||
Reference in New Issue
Block a user