所有模型都可是直接使用openai类直接调用

from langchain_openai import ChatOpenAI
from dotenv import load_dotenv
import os
load_dotenv()
llm = ChatOpenAI(
    base_url=os.getenv('OPENAI_BASE_URL'),
    api_key=os.getenv('OPENAI_API_KEY'),
    model=os.getenv('OPENAI_MODEL'),
    temperature=0.5
)

res = llm.invoke('你好')
print(type(res))
print(res)
# 流式
for chunk in llm.stream('你好'):
    print(chunk, '--', type(chunk))


输出

<class 'langchain_core.messages.ai.AIMessage'>
content='你好!有什么我可以帮您的吗?' additional_kwargs={'refusal': None} response_metadata={'token_usage': {'completion_tokens': 9, 'prompt_tokens': 8, 'total_tokens': 17, 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0}, 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 0}}, 'model_provider': 'openai', 'model_name': 'gpt-4.1-nano-2025-04-14', 'system_fingerprint': 'fp_eb30fd4545', 'id': 'chatcmpl-CgSjOLntiTNwPC6ur3uFKQ8J41b2j', 'service_tier': 'default', 'finish_reason': 'stop', 'logprobs': None} id='lc_run--f52b4ee2-598d-429d-aeff-52b9c85d1893-0' usage_metadata={'input_tokens': 8, 'output_tokens': 9, 'total_tokens': 17, 'input_token_details': {'audio': 0, 'cache_read': 0}, 'output_token_details': {'audio': 0, 'reasoning': 0}}
-----------
content='' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='你好' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='!' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='有什么' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='我' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='可以' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='帮' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='你' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='的吗' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='?' additional_kwargs={} response_metadata={'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='' additional_kwargs={} response_metadata={'finish_reason': 'stop', 'model_name': 'gpt-4.1-nano-2025-04-14', 'system_fingerprint': 'fp_ef015fa747', 'service_tier': 'default', 'model_provider': 'openai'} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' chunk_position='last' -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='' additional_kwargs={} response_metadata={} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' usage_metadata={'input_tokens': 8, 'output_tokens': 9, 'total_tokens': 17, 'input_token_details': {'audio': 0, 'cache_read': 0}, 'output_token_details': {'audio': 0, 'reasoning': 0}} -- <class 'langchain_core.messages.ai.AIMessageChunk'>
content='' additional_kwargs={} response_metadata={} id='lc_run--cb8806d7-3a2a-490b-ba41-31d627e8225f' chunk_position='last' -- <class 'langchain_core.messages.ai.AIMessageChunk'>