
    dh                     \    S r SSKJr  SSKJrJrJr  SSKJr  SSK	J
r
Jr   " S S\\5      rg)	z$Wrapper around Moonshot chat models.    )Dict)convert_to_secret_strget_from_dict_or_envpre_init)
ChatOpenAI)MOONSHOT_SERVICE_URL_BASEMoonshotCommonc                   4    \ rS rSrSr\S\S\4S j5       rSrg)MoonshotChat   uE  Moonshot chat model integration.

Setup:
    Install ``openai`` and set environment variables ``MOONSHOT_API_KEY``.

    .. code-block:: bash

        pip install openai
        export MOONSHOT_API_KEY="your-api-key"

Key init args — completion params:
    model: str
        Name of Moonshot model to use.
    temperature: float
        Sampling temperature.
    max_tokens: Optional[int]
        Max number of tokens to generate.

Key init args — client params:
    api_key: Optional[str]
        Moonshot API KEY. If not passed in will be read from env var MOONSHOT_API_KEY.
    api_base: Optional[str]
        Base URL for API requests.

See full list of supported init args and their descriptions in the params section.

Instantiate:
    .. code-block:: python

        from langchain_community.chat_models import MoonshotChat

        chat = MoonshotChat(
            temperature=0.5,
            api_key="your-api-key",
            model="moonshot-v1-8k",
            # api_base="...",
            # other params...
        )

Invoke:
    .. code-block:: python

        messages = [
            ("system", "你是一名专业的翻译家，可以将用户的中文翻译为英文。"),
            ("human", "我喜欢编程。"),
        ]
        chat.invoke(messages)

    .. code-block:: python

        AIMessage(
            content='I like programming.',
            additional_kwargs={},
            response_metadata={
                'token_usage': {
                    'completion_tokens': 5,
                    'prompt_tokens': 27,
                    'total_tokens': 32
                },
                'model_name': 'moonshot-v1-8k',
                'system_fingerprint': None,
                'finish_reason': 'stop',
                'logprobs': None
            },
            id='run-71c03f4e-6628-41d5-beb6-d2559ae68266-0'
        )

Stream:
    .. code-block:: python

        for chunk in chat.stream(messages):
            print(chunk)

    .. code-block:: python

        content='' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
        content='I' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
        content=' like' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
        content=' programming' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
        content='.' additional_kwargs={} response_metadata={} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'
        content='' additional_kwargs={} response_metadata={'finish_reason': 'stop'} id='run-80d77096-8b83-4c39-a84d-71d9c746da92'

    .. code-block:: python

        stream = chat.stream(messages)
        full = next(stream)
        for chunk in stream:
            full += chunk
        full

    .. code-block:: python

        AIMessageChunk(
            content='I like programming.',
            additional_kwargs={},
            response_metadata={'finish_reason': 'stop'},
            id='run-10c80976-7aa5-4ff7-ba3e-1251665557ef'
        )

Async:
    .. code-block:: python

        await chat.ainvoke(messages)

        # stream:
        # async for chunk in chat.astream(messages):
        #    print(chunk)

        # batch:
        # await chat.abatch([messages])

    .. code-block:: python

        [AIMessage(content='I like programming.', additional_kwargs={}, response_metadata={'token_usage': {'completion_tokens': 5, 'prompt_tokens': 27, 'total_tokens': 32}, 'model_name': 'moonshot-v1-8k', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-2938b005-9204-4b9f-b273-1c3272fce9e5-0')]

Response metadata
    .. code-block:: python

        ai_msg = chat.invoke(messages)
        ai_msg.response_metadata

    .. code-block:: python

        {
            'token_usage': {
                'completion_tokens': 5,
                'prompt_tokens': 27,
                'total_tokens': 32
            },
            'model_name': 'moonshot-v1-8k',
            'system_fingerprint': None,
            'finish_reason': 'stop',
            'logprobs': None
        }

valuesreturnc                    [        [        U/ SQS5      5      US'    SSKnUS   R	                  5       SU;   a  US   O[
        S.nUR                  S	5      (       d)  UR                  " S0 UD6R                  R                  US	'   UR                  S
5      (       d)  UR                  " S0 UD6R                  R                  US
'   U$ ! [         a    [        S5      ef = f)z2Validate that the environment is set up correctly.)moonshot_api_keyapi_keyopenai_api_keyMOONSHOT_API_KEYr   r   NzTCould not import openai python package. Please install it with `pip install openai`.base_url)r   r   clientasync_client )r   r   openaiImportErrorget_secret_valuer   getOpenAIchatcompletionsAsyncOpenAI)clsr   r   client_paramss       `/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/chat_models/moonshot.pyvalidate_environment!MoonshotChat.validate_environment   s     &; A"&
!"	 01BBDV# z**	
 zz(##%}}=}=BBNNF8zz.))%+%7%7 &&d;; >" )  	? 	s   C Cr   N)	__name__
__module____qualname____firstlineno____doc__r   r   r#   __static_attributes__r       r"   r   r      s+    GR !$ !4 ! !r+   r   N)r)   typingr   langchain_core.utilsr   r   r   langchain_community.chat_modelsr   !langchain_community.llms.moonshotr   r	   r   r   r+   r"   <module>r0      s-    *   7 Wl>: lr+   