"""Shared LLM model registry for agents.Configures multiple model backends (OpenAI, OpenRouter, Ollama-compatible) andexposes typed model objects for use with the Agents SDK.Environment variables:- ``OPENAI_API_KEY``- ``OPENROUTER_API_KEY``:ivar AGENT_MODEL: Default chat model for text agents.:ivar MULTIMODAL_MODEL: Default model for multimodal agents."""importosfromtypingimportOptional,TYPE_CHECKINGfromdotenvimportload_dotenvifTYPE_CHECKING:fromagentsimportOpenAIChatCompletionsModelfromopenaiimportAsyncOpenAIload_dotenv(override=True)# Lazy client initialization _openai_client:Optional["AsyncOpenAI"]=None_ollama_client:Optional["AsyncOpenAI"]=None_open_router:Optional["AsyncOpenAI"]=None_agent_model:Optional["OpenAIChatCompletionsModel"]=None_multimodal_model:Optional["OpenAIChatCompletionsModel"]=Nonedef_get_openai_client()->"AsyncOpenAI":"""Lazy initialization of OpenAI client."""global_openai_clientif_openai_clientisNone:fromopenaiimportAsyncOpenAI_openai_client=AsyncOpenAI(base_url="https://api.openai.com/v1",api_key=os.getenv("OPENAI_API_KEY"))return_openai_clientdef_get_ollama_client()->"AsyncOpenAI":"""Lazy initialization of Ollama client."""global_ollama_clientif_ollama_clientisNone:fromopenaiimportAsyncOpenAI_ollama_client=AsyncOpenAI(base_url="http://localhost:11434/v1",api_key="ollama")return_ollama_clientdef_get_open_router()->"AsyncOpenAI":"""Lazy initialization of OpenRouter client."""global_open_routerif_open_routerisNone:fromopenaiimportAsyncOpenAI_open_router=AsyncOpenAI(base_url="https://openrouter.ai/api/v1",api_key=os.getenv("OPENROUTER_API_KEY"))return_open_router
[docs]defget_agent_model()->"OpenAIChatCompletionsModel":"""Get the default agent model with lazy initialization."""global_agent_modelif_agent_modelisNone:fromagentsimportOpenAIChatCompletionsModel_agent_model=OpenAIChatCompletionsModel(model="deepseek/deepseek-chat-v3-0324:free",openai_client=_get_open_router())return_agent_model
[docs]defget_multimodal_model()->"OpenAIChatCompletionsModel":"""Get the default multimodal model with lazy initialization."""global_multimodal_modelif_multimodal_modelisNone:fromagentsimportOpenAIChatCompletionsModel_multimodal_model=OpenAIChatCompletionsModel(model="gpt-4o-mini",openai_client=_get_openai_client())return_multimodal_model
# For backward compatibilityAGENT_MODEL=property(lambdaself:get_agent_model())MULTIMODAL_MODEL=property(lambdaself:get_multimodal_model())