o
    Zh                  	   @   s  d dl mZmZmZmZmZmZmZ d dlm	Z	 d dl
mZmZ d dlmZmZmZ d dlmZmZmZmZmZmZ d dlmZmZmZ d dlmZ d dlmZ d d	l m!Z! d
ede"de"de"fddZ#ddddee de"de"de"fddZ$e	ddddG dd dee!Z%dS )    )AnyAsyncIteratorDictIteratorListOptionalcast)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseChatModelagenerate_from_streamgenerate_from_stream)	AIMessageAIMessageChunkBaseMessageChatMessageHumanMessageSystemMessage)ChatGenerationChatGenerationChunk
ChatResult)PromptValue)
ConfigDict)_AnthropicCommonmessagehuman_prompt	ai_promptreturnc                 C   s   t t| j}t| trd| j  d| }|S t| tr&| d| }|S t| tr4| d| }|S t| t	r=|}|S t
d|  )Nz

z:  zGot unknown type )r   strcontent
isinstancer   Zrole
capitalizer   r   r   
ValueError)r   r   r   r!   message_text r&   `/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/chat_models/anthropic.py_convert_one_message_to_text   s   
	


r(   z

Human:z

Assistant:)r   r   messagesc                   sH   |   } t| d ts| tdd d fdd| D }| S )a  Format a list of messages into a full prompt for the Anthropic model
    Args:
        messages (List[BaseMessage]): List of BaseMessage to combine.
        human_prompt (str, optional): Human prompt tag. Defaults to "

Human:".
        ai_prompt (str, optional): AI prompt tag. Defaults to "

Assistant:".
    Returns:
        str: Combined string with necessary human_prompt and ai_prompt tags.
     r!   c                 3   s    | ]	}t | V  qd S N)r(   ).0r   r   r   r&   r'   	<genexpr>B   s
    

z7convert_messages_to_prompt_anthropic.<locals>.<genexpr>)copyr"   r   appendjoinrstrip)r)   r   r   textr&   r/   r'   $convert_messages_to_prompt_anthropic/   s   r6   z0.0.28z1.0z!langchain_anthropic.ChatAnthropic)ZsinceZremovalZalternative_importc                   @   s  e Zd ZdZedddZedeeef fddZ	edefddZ
edefd	d
Zedee fddZdee defddZdedefddZ		d"dee deee  dee dedee f
ddZ		d"dee deee  dee dedee f
ddZ		d"dee deee  dee dedef
ddZ		d"dee deee  dee dedef
ddZdede fd d!Z!dS )#ChatAnthropica  `Anthropic` chat large language models.

    To use, you should have the ``anthropic`` python package installed, and the
    environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            import anthropic
            from langchain_community.chat_models import ChatAnthropic
            model = ChatAnthropic(model="<model_name>", anthropic_api_key="my-api-key")
    T)Zpopulate_by_nameZarbitrary_types_allowedr   c                 C   s   ddiS )NZanthropic_api_keyZANTHROPIC_API_KEYr&   selfr&   r&   r'   
lc_secretsd   s   zChatAnthropic.lc_secretsc                 C      dS )zReturn type of chat model.zanthropic-chatr&   r8   r&   r&   r'   	_llm_typeh      zChatAnthropic._llm_typec                 C   r;   )z9Return whether this model can be serialized by Langchain.Tr&   clsr&   r&   r'   is_lc_serializablem   r=   z ChatAnthropic.is_lc_serializablec                 C   s   g dS )z*Get the namespace of the langchain object.)Z	langchainZchat_modelsZ	anthropicr&   r>   r&   r&   r'   get_lc_namespacer   s   zChatAnthropic.get_lc_namespacer)   c                 C   s6   i }| j r
| j |d< | jr| j|d< tdd|i|S )a  Format a list of messages into a full prompt for the Anthropic model
        Args:
            messages (List[BaseMessage]): List of BaseMessage to combine.
        Returns:
            str: Combined string with necessary HUMAN_PROMPT and AI_PROMPT tags.
        r   r   r)   Nr&   )ZHUMAN_PROMPTZ	AI_PROMPTr6   )r9   r)   Zprompt_paramsr&   r&   r'   _convert_messages_to_promptw   s   

z)ChatAnthropic._convert_messages_to_promptpromptc                 C   s   |  | S r-   )rB   Zto_messages)r9   rC   r&   r&   r'   convert_prompt   s   zChatAnthropic.convert_promptNstoprun_managerkwargsc                 k   s    |  |}d|i| j|}|r||d< | jjjdi |ddi}|D ]}|j}	tt|	dd}
|r;|j|	|
d |
V  q%d S 	NrC   stop_sequencesstreamTr,   r   )chunkr&   )	rB   _default_paramsclientcompletionscreate
completionr   r   on_llm_new_tokenr9   r)   rE   rF   rG   rC   paramsZstream_respdatadeltarL   r&   r&   r'   _stream   s   
zChatAnthropic._streamc                 K  s   |  |}d|i| j|}|r||d< | jjjdi |ddiI d H }|2 z 3 d H W }|j}	tt|	dd}
|rE|j|	|
dI d H  |
V  q(6 d S rH   )	rB   rM   async_clientrO   rP   rQ   r   r   rR   rS   r&   r&   r'   _astream   s   
"zChatAnthropic._astreamc                 K   s   | j r| j|f||d|}t|S | |}d|i| j|}|r'||d< | jjjdi |}|j}	t	|	d}
t
t|
dgdS N)rE   rF   rC   rI   r,   rK   )Zgenerationsr&   )	streamingrW   r   rB   rM   rN   rO   rP   rQ   r   r   r   r9   r)   rE   rF   rG   Zstream_iterrC   rT   responserQ   r   r&   r&   r'   	_generate   s0   
zChatAnthropic._generatec                    s   | j r| j|f||d|}t|I d H S | |}d|i| j|}|r+||d< | jjjdi |I d H }|j}	t	|	d}
t
t|
dgdS rZ   )r[   rY   r   rB   rM   rX   rO   rP   rQ   r   r   r   r\   r&   r&   r'   
_agenerate   s2   
zChatAnthropic._agenerater5   c                 C   s   | j std|  |S )zCalculate number of tokens.z-Please ensure the anthropic package is loaded)Zcount_tokens	NameError)r9   r5   r&   r&   r'   get_num_tokens   s   
zChatAnthropic.get_num_tokens)NN)"__name__
__module____qualname____doc__r   Zmodel_configpropertyr   r    r:   r<   classmethodboolr@   r   rA   r   rB   r   rD   r   r   r   r   r   rW   r
   r   rY   r   r^   r_   intra   r&   r&   r&   r'   r7   K   s    







r7   N)&typingr   r   r   r   r   r   r   Zlangchain_core._api.deprecationr	   Zlangchain_core.callbacksr
   r   Z*langchain_core.language_models.chat_modelsr   r   r   Zlangchain_core.messagesr   r   r   r   r   r   Zlangchain_core.outputsr   r   r   Zlangchain_core.prompt_valuesr   Zpydanticr   Z"langchain_community.llms.anthropicr   r    r(   r6   r7   r&   r&   r&   r'   <module>   sF   $  

