
    @hJ-                     4   S r SSKJrJr  SSKJr  SSKJr  SSKJ	r	  SSK
Jr  SSKJrJr  SSKJrJr  SS	KJrJr  SS
KJrJrJr  SSKJrJrJrJrJr  SSKJ r   SS\\S.S\S\S\\   S\\   S\!S\!S\\"\!\4   \4   4S jjr#\" SSSS9 " S S\5      5       r$g)z7Chain that combines documents by stuffing into context.    )AnyOptional)
deprecated)	Callbacks)Document)LanguageModelLike)BaseOutputParserStrOutputParser)BasePromptTemplateformat_document)RunnableRunnablePassthrough)
ConfigDictFieldmodel_validator)DEFAULT_DOCUMENT_PROMPTDEFAULT_DOCUMENT_SEPARATORDOCUMENTS_KEYBaseCombineDocumentsChain_validate_prompt)LLMChainN)output_parserdocument_promptdocument_separatordocument_variable_namellmpromptr   r   r   r   returnc                  ^^^ [        UT5        U=(       d    [        mU=(       d
    [        5       nS[        S[        4UUU4S jjn[
        R                  " S0 TU0D6R                  SS9U-  U -  U-  R                  SS9$ )a  Create a chain for passing a list of Documents to a model.

Args:
    llm: Language model.
    prompt: Prompt template. Must contain input variable "context" (override by
        setting document_variable), which will be used for passing in the formatted documents.
    output_parser: Output parser. Defaults to StrOutputParser.
    document_prompt: Prompt used for formatting each document into a string. Input
        variables can be "page_content" or any metadata keys that are in all
        documents. "page_content" will automatically retrieve the
        `Document.page_content`, and all other inputs variables will be
        automatically retrieved from the `Document.metadata` dictionary. Default to
        a prompt that only contains `Document.page_content`.
    document_separator: String separator to use between formatted document strings.
    document_variable_name: Variable name to use for the formatted documents in the prompt.
        Defaults to "context".

Returns:
    An LCEL Runnable. The input is a dictionary that must have a "context" key that
    maps to a List[Document], and any other input variables expected in the prompt.
    The Runnable return type depends on output_parser used.

Example:
    .. code-block:: python

        # pip install -U langchain langchain-community

        from langchain_community.chat_models import ChatOpenAI
        from langchain_core.documents import Document
        from langchain_core.prompts import ChatPromptTemplate
        from langchain.chains.combine_documents import create_stuff_documents_chain

        prompt = ChatPromptTemplate.from_messages(
            [("system", "What are everyone's favorite colors:\n\n{context}")]
        )
        llm = ChatOpenAI(model="gpt-3.5-turbo")
        chain = create_stuff_documents_chain(llm, prompt)

        docs = [
            Document(page_content="Jesse loves red but not yellow"),
            Document(page_content = "Jamal loves green but not as much as he loves orange")
        ]

        chain.invoke({"context": docs})
inputsr   c                 @   > TR                  U4S jU T    5       5      $ )Nc              3   :   >#    U H  n[        UT5      v   M     g 7fN)r   ).0doc_document_prompts     `/var/www/html/shao/venv/lib/python3.13/site-packages/langchain/chains/combine_documents/stuff.py	<genexpr>Dcreate_stuff_documents_chain.<locals>.format_docs.<locals>.<genexpr>T   s#      '
5 C!1225s   )join)r    r&   r   r   s    r'   format_docs1create_stuff_documents_chain.<locals>.format_docsS   s)    !&& '
45'
 
 	
    format_inputs)run_namestuff_documents_chain )r   r   r
   dictstrr   assignwith_config)	r   r   r   r   r   r   _output_parserr+   r&   s	       ``  @r'   create_stuff_documents_chainr7      s    n V34&A*A"7o&7N
D 
S 
 
 	""K&<k%JKWW$ 	X 	
 	 		
 	 k2k34r-   z0.2.13z1.0zThis class is deprecated. Use the `create_stuff_documents_chain` constructor instead. See migration guide here: https://python.langchain.com/docs/versions/migrating_chains/stuff_docs_chain/)sinceremovalmessagec                   z  ^  \ rS rSr% Sr\\S'    \" S S9r\	\S'    \
\S'    Sr\
\S	'    \" S
SS9r\" SS9\S\S\4S j5       5       r\S\\
   4U 4S jj5       rS\\   S\S\4S jrS\\   S\S\\   4S jr SS\\   S\S\S\\
\4   4S jjr SS\\   S\S\S\\
\4   4S jjr\S\
4S j5       rSr U =r!$ )StuffDocumentsChainc   a  Chain that combines documents by stuffing into context.

This chain takes a list of documents and first combines them into a single string.
It does this by formatting each document into a string with the `document_prompt`
and then joining them together with `document_separator`. It then adds that new
string to the inputs with the variable name set by `document_variable_name`.
Those inputs are then passed to the `llm_chain`.

Example:
    .. code-block:: python

        from langchain.chains import StuffDocumentsChain, LLMChain
        from langchain_core.prompts import PromptTemplate
        from langchain_community.llms import OpenAI

        # This controls how each document will be formatted. Specifically,
        # it will be passed to `format_document` - see that function for more
        # details.
        document_prompt = PromptTemplate(
            input_variables=["page_content"],
            template="{page_content}"
        )
        document_variable_name = "context"
        llm = OpenAI()
        # The prompt here should take as an input variable the
        # `document_variable_name`
        prompt = PromptTemplate.from_template(
            "Summarize this content: {context}"
        )
        llm_chain = LLMChain(llm=llm, prompt=prompt)
        chain = StuffDocumentsChain(
            llm_chain=llm_chain,
            document_prompt=document_prompt,
            document_variable_name=document_variable_name
        )
	llm_chainc                      [         $ r#   )r   r1   r-   r'   <lambda>StuffDocumentsChain.<lambda>   s     7r-   )default_factoryr   r   z

r   Tforbid)arbitrary_types_allowedextrabefore)modevaluesr   c                     US   R                   R                  nSU;  a&  [        U5      S:X  a
  US   US'   U$ Sn[        U5      eUS   U;  a  SUS    SU 3n[        U5      eU$ )zGet default document variable name, if not provided.

If only one variable is present in the llm_chain.prompt,
we can infer that the formatted documents should be passed in
with this variable name.
r>   r      r   zQdocument_variable_name must be provided if there are multiple llm_chain_variableszdocument_variable_name z- was not found in llm_chain input_variables: )r   input_variableslen
ValueError)clsrH   llm_chain_variablesmsgs       r'   "get_default_document_variable_name6StuffDocumentsChain.get_default_document_variable_name   s     %[188HH#61&'1,3Fq3I/0 3  !o%,-5HH)&1I*J)K L;;N:OQ  S/!r-   c                    > U R                   R                   Vs/ sH  oU R                  :w  d  M  UPM     nn[        TU ]  U-   $ s  snf r#   )r>   
input_keysr   super)selfk
extra_keys	__class__s      r'   rT   StuffDocumentsChain.input_keys   sN     ~~00
0!9T9T4TA0 	 
 w!J..
s
   AAdocskwargsc                 B   U Vs/ sH  n[        X0R                  5      PM     nnUR                  5        VVs0 sH,  u  pVXPR                  R                  R
                  ;   d  M*  XV_M.     nnnU R                  R                  U5      XpR                  '   U$ s  snf s  snnf )a  Construct inputs from kwargs and docs.

Format and then join all the documents together into one input with name
`self.document_variable_name`. Also pluck any additional variables
from **kwargs.

Args:
    docs: List of documents to format and then join into single input
    **kwargs: additional inputs to chain, will pluck any other required
        arguments from here.

Returns:
    dictionary of inputs to LLMChain
)	r   r   itemsr>   r   rK   r   r*   r   )rV   r[   r\   r%   doc_stringsrW   vr    s           r'   _get_inputsStuffDocumentsChain._get_inputs   s      NRRTcs,@,@ATR 
&NN))999 AD& 	 

 /3.E.E.J.J;.W**+ S
s   B(B$Bc                     U R                   " U40 UD6nU R                  R                  R                  " S0 UD6nU R                  R	                  U5      $ )a  Return the prompt length given the documents passed in.

This can be used by a caller to determine whether passing in a list
of documents would exceed a certain prompt length. This useful when
trying to ensure that the size of a prompt remains below a certain
context limit.

Args:
    docs: List[Document], a list of documents to use to calculate the
        total prompt length.

Returns:
    Returns None if the method does not depend on the prompt length,
    otherwise the length of the prompt in tokens.
r1   )ra   r>   r   format_get_num_tokens)rV   r[   r\   r    r   s        r'   prompt_length!StuffDocumentsChain.prompt_length   sK      !!$1&1&&--77~~--f55r-   	callbacksc                 h    U R                   " U40 UD6nU R                  R                  " SSU0UD60 4$ )a  Stuff all documents into one prompt and pass to LLM.

Args:
    docs: List of documents to join together into one variable
    callbacks: Optional callbacks to pass along
    **kwargs: additional parameters to use to get inputs to LLMChain.

Returns:
    The first element returned is the single string output. The second
    element returned is a dictionary of other keys to return.
rh   r1   )ra   r>   predictrV   r[   rh   r\   r    s        r'   combine_docs StuffDocumentsChain.combine_docs   s;    " !!$1&1~~%%D	DVDbHHr-   c                    #    U R                   " U40 UD6nU R                  R                  " SSU0UD6I Sh  vN 0 4$  N7f)a  Async stuff all documents into one prompt and pass to LLM.

Args:
    docs: List of documents to join together into one variable
    callbacks: Optional callbacks to pass along
    **kwargs: additional parameters to use to get inputs to LLMChain.

Returns:
    The first element returned is the single string output. The second
    element returned is a dictionary of other keys to return.
rh   Nr1   )ra   r>   apredictrk   s        r'   acombine_docs!StuffDocumentsChain.acombine_docs	  sD     " !!$1&1^^,,KyKFKKROOKs   5A >A c                     g)Nr0   r1   )rV   s    r'   _chain_typeStuffDocumentsChain._chain_type  s    &r-   r1   r#   )"__name__
__module____qualname____firstlineno____doc__r   __annotations__r   r   r   r3   r   r   model_configr   classmethodr2   r   rQ   propertylistrT   r   ra   r   intrf   r   tuplerl   rp   rs   __static_attributes____classcell__)rY   s   @r'   r<   r<   c   s   #J $*/7+O'  SI$$? $L
 (#    $2 /DI / /X # $ 46$x. 6C 6HSM 6.  $I8nI I 	I
 
sDy	I0  $P8nP P 	P
 
sDy	P* 'S ' 'r-   r<   )%ry   typingr   r   langchain_core._apir   langchain_core.callbacksr   langchain_core.documentsr   langchain_core.language_modelsr   langchain_core.output_parsersr	   r
   langchain_core.promptsr   r   langchain_core.runnablesr   r   pydanticr   r   r   'langchain.chains.combine_documents.baser   r   r   r   r   langchain.chains.llmr   r3   r2   r7   r<   r1   r-   r'   <module>r      s    =   * . - < K F B 7 7  * 15488"/H4	H4H4 ,-	H4
 01H4 H4  H4 d38nc!"H4V 
	X	t'3 t't'r-   