o
    ZhR                     @  s   d Z ddlmZ ddlmZmZmZmZmZm	Z	 ddl
mZ ddlmZmZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlm Z  eddddG dd de Z!G dd deee"e"f  Z#dS )z+Base classes for LLM-powered router chains.    )annotations)AnyDictListOptionalTypecast)
deprecated)AsyncCallbackManagerForChainRunCallbackManagerForChainRun)OutputParserException)BaseLanguageModel)BaseOutputParser)BasePromptTemplate)parse_and_check_json_markdown)model_validator)SelfLLMChain)RouterChainz0.2.12z1.0zUse RunnableLambda to select from multiple prompt templates. See example in API reference: https://api.python.langchain.com/en/latest/chains/langchain.chains.router.llm_router.LLMRouterChain.html)ZsinceZremovalmessagec                      sz   e Zd ZU dZded< 	 eddd#dd	Zed$ddZd% fddZ		d&d'ddZ
	d&d(ddZed)d!d"Z  ZS )*LLMRouterChaina
	  A router chain that uses an LLM chain to perform routing.

    This class is deprecated. See below for a replacement, which offers several
    benefits, including streaming and batch support.

    Below is an example implementation:

        .. code-block:: python

            from operator import itemgetter
            from typing import Literal
            from typing_extensions import TypedDict

            from langchain_core.output_parsers import StrOutputParser
            from langchain_core.prompts import ChatPromptTemplate
            from langchain_core.runnables import RunnableLambda, RunnablePassthrough
            from langchain_openai import ChatOpenAI

            llm = ChatOpenAI(model="gpt-4o-mini")

            prompt_1 = ChatPromptTemplate.from_messages(
                [
                    ("system", "You are an expert on animals."),
                    ("human", "{query}"),
                ]
            )
            prompt_2 = ChatPromptTemplate.from_messages(
                [
                    ("system", "You are an expert on vegetables."),
                    ("human", "{query}"),
                ]
            )

            chain_1 = prompt_1 | llm | StrOutputParser()
            chain_2 = prompt_2 | llm | StrOutputParser()

            route_system = "Route the user's query to either the animal or vegetable expert."
            route_prompt = ChatPromptTemplate.from_messages(
                [
                    ("system", route_system),
                    ("human", "{query}"),
                ]
            )


            class RouteQuery(TypedDict):
                """Route query to destination."""
                destination: Literal["animal", "vegetable"]


            route_chain = (
                route_prompt
                | llm.with_structured_output(RouteQuery)
                | itemgetter("destination")
            )

            chain = {
                "destination": route_chain,  # "animal" or "vegetable"
                "query": lambda x: x["query"],  # pass through input query
            } | RunnableLambda(
                # if animal, chain_1. otherwise, chain_2.
                lambda x: chain_1 if x["destination"] == "animal" else chain_2,
            )

            chain.invoke({"query": "what color are carrots"})
    r   	llm_chainafter)modereturnr   c                 C  s   | j j}|jd u rtd| S )NzLLMRouterChain requires base llm_chain prompt to have an output parser that converts LLM text output to a dictionary with keys 'destination' and 'next_inputs'. Received a prompt with no output parser.)r   promptoutput_parser
ValueError)selfr    r    Y/var/www/html/lang_env/lib/python3.10/site-packages/langchain/chains/router/llm_router.pyvalidate_prompth   s   
zLLMRouterChain.validate_prompt	List[str]c                 C  s   | j jS )zTWill be whatever keys the LLM chain prompt expects.

        :meta private:
        )r   
input_keys)r   r    r    r!   r$   t   s   zLLMRouterChain.input_keysoutputsDict[str, Any]Nonec                   s"   t  | t|d tstd S )Nnext_inputs)super_validate_outputs
isinstancedictr   )r   r%   	__class__r    r!   r*   |   s   z LLMRouterChain._validate_outputsNinputsrun_manager$Optional[CallbackManagerForChainRun]c                 C  sL   |pt  }| }| jjdd|i|}ttttf | jj	j
|}|S N	callbacksr    )r   get_noop_manager	get_childr   Zpredictr   r   strr   r   r   parse)r   r/   r0   _run_managerr3   Z
predictionoutputr    r    r!   _call   s   
zLLMRouterChain._call)Optional[AsyncCallbackManagerForChainRun]c                   sD   |pt  }| }ttttf | jjdd|i|I d H }|S r2   )	r   r4   r5   r   r   r6   r   r   Zapredict_and_parse)r   r/   r0   r8   r3   r9   r    r    r!   _acall   s   
zLLMRouterChain._acallllmr   r   r   kwargsr   c                 K  s   t ||d}| dd|i|S )zConvenience constructor.)r=   r   r   Nr    r   )clsr=   r   r>   r   r    r    r!   from_llm   s   zLLMRouterChain.from_llm)r   r   )r   r#   )r%   r&   r   r'   )N)r/   r&   r0   r1   r   r&   )r/   r&   r0   r;   r   r&   )r=   r   r   r   r>   r   r   r   )__name__
__module____qualname____doc____annotations__r   r"   propertyr$   r*   r:   r<   classmethodr@   __classcell__r    r    r-   r!   r      s   
 
Cr   c                   @  s@   e Zd ZU dZdZded< eZded< dZded< dddZ	dS )RouterOutputParserz<Parser for output of router chain in the multi-prompt chain.DEFAULTr6   default_destinationr   next_inputs_typeinputnext_inputs_inner_keytextr   r&   c              
   C  s   zMddg}t ||}t|d tstdt|d | js&td| j d| j|d i|d< |d   | j krCd |d< |W S |d  |d< |W S  t	yc } z
t
d| d| d }~ww )Ndestinationr(   z&Expected 'destination' to be a string.zExpected 'next_inputs' to be .zParsing text
z
 raised following error:
)r   r+   r6   r   rL   rN   striplowerrK   	Exceptionr   )r   rO   Zexpected_keysparseder    r    r!   r7      s.   
zRouterOutputParser.parseN)rO   r6   r   r&   )
rA   rB   rC   rD   rK   rE   r6   rL   rN   r7   r    r    r    r!   rI      s   
 rI   N)$rD   
__future__r   typingr   r   r   r   r   r   Zlangchain_core._apir	   Zlangchain_core.callbacksr
   r   Zlangchain_core.exceptionsr   Zlangchain_core.language_modelsr   Zlangchain_core.output_parsersr   Zlangchain_core.promptsr   Zlangchain_core.utils.jsonr   Zpydanticr   Ztyping_extensionsr   Zlangchain.chainsr   Zlangchain.chains.router.baser   r   r6   rI   r    r    r    r!   <module>   s,     	  