o
    ZhY1                     @   s   d dl Z d dlZd dlmZmZmZmZmZmZm	Z	m
Z
 d dlmZ d dlmZmZ d dlmZ d dlmZ d dlmZ d dlmZ d d	lmZmZmZmZ d d
lmZmZ d dl m!Z!m"Z"m#Z#m$Z$ G dd deZ%eddddG dd dee%Z&dS )    N)AnyAsyncIteratorCallableDictIteratorListMappingOptional)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseLanguageModel)LLM)GenerationChunk)PromptValue)check_package_versionget_from_dict_or_envget_pydantic_field_namespre_init)_build_model_kwargsconvert_to_secret_str)
ConfigDictField	SecretStrmodel_validatorc                   @   s  e Zd ZU dZeed< dZeed< edddZe	ed< 	 edd	dZ
eed
< 	 dZee ed< 	 dZee ed< 	 dZee ed< 	 dZeed< 	 dZee ed< 	 dZeed< 	 dZee	 ed< dZee ed< dZee	 ed< dZee	 ed< dZeee	gef  ed< eedZee	ef ed< edde dedefddZ!e"dedefd d!Z#e$de%e	ef fd"d#Z&e$de%e	ef fd$d%Z'd)d&ee(e	  de(e	 fd'd(Z)dS )*_AnthropicCommonNclientasync_clientzclaude-2Z
model_name)defaultaliasmodel   Z
max_tokensmax_tokens_to_sampletemperaturetop_ktop_pF	streamingdefault_request_timeout   max_retriesanthropic_api_urlanthropic_api_keyHUMAN_PROMPT	AI_PROMPTcount_tokens)default_factorymodel_kwargsbefore)modevaluesreturnc                 C   s   t | }t||}|S N)r   r   )clsr3   Zall_required_field_names r7   Y/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/llms/anthropic.pybuild_extraE   s   
z_AnthropicCommon.build_extrac                 C   s   t t|dd|d< t|dddd|d< zHddl}td	d
d |j|d |d  |d |d d|d< |j|d |d  |d |d d|d< |j|d< |j|d< |d j	|d< W |S  t
yg   t
dw )z?Validate that api key and python package exists in environment.r+   ZANTHROPIC_API_KEYr*   ZANTHROPIC_API_URLzhttps://api.anthropic.com)r   r   N	anthropicz0.3)Zgte_versionr'   r)   )base_urlZapi_keytimeoutr)   r   r   r,   r-   r.   z]Could not import anthropic python package. Please it install it with `pip install anthropic`.)r   r   r:   r   	AnthropicZget_secret_valueZAsyncAnthropicr,   r-   r.   ImportError)r6   r3   r:   r7   r7   r8   validate_environmentL   sB   







z%_AnthropicCommon.validate_environmentc                 C   sX   | j | jd}| jdur| j|d< | jdur| j|d< | jdur%| j|d< i || jS )z5Get the default parameters for calling Anthropic API.)r"   r    Nr#   r$   r%   )r"   r    r#   r$   r%   r0   )selfdr7   r7   r8   _default_paramsu   s   





z _AnthropicCommon._default_paramsc                 C   s   i i | j S )zGet the identifying parameters.)rB   r@   r7   r7   r8   _identifying_params   s   z$_AnthropicCommon._identifying_paramsstopc                 C   s2   | j r| js
td|d u rg }|| j g |S )N-Please ensure the anthropic package is loaded)r,   r-   	NameErrorextend)r@   rE   r7   r7   r8   _get_anthropic_stop   s   z$_AnthropicCommon._get_anthropic_stopr5   )*__name__
__module____qualname__r   r   __annotations__r   r   r    strr"   intr#   r	   floatr$   r%   r&   boolr'   r)   r*   r+   r   r,   r-   r.   r   dictr0   r   r   classmethodr9   r   r?   propertyr   rB   rD   r   rI   r7   r7   r7   r8   r   !   sF   
 ($r   z0.0.28z1.0z langchain_anthropic.AnthropicLLM)ZsinceZremovalZalternative_importc                   @   sB  e Zd ZdZedddZededefddZe	de
fdd	Zd
e
de
fddZ		dd
e
deee
  dee dede
f
ddZd
ede
fddZ		dd
e
deee
  dee dede
f
ddZ		dd
e
deee
  dee dedee f
ddZ		dd
e
deee
  dee dedee f
ddZde
defddZdS )r=   a  Anthropic large language models.

    To use, you should have the ``anthropic`` python package installed, and the
    environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
    it as a named parameter to the constructor.

    Example:
        .. code-block:: python

            import anthropic
            from langchain_community.llms import Anthropic

            model = Anthropic(model="<model_name>", anthropic_api_key="my-api-key")

            # Simplest invocation, automatically wrapped with HUMAN_PROMPT
            # and AI_PROMPT.
            response = model.invoke("What are the biggest risks facing humanity?")

            # Or if you want to use the chat mode, build a few-shot-prompt, or
            # put words in the Assistant's mouth, use HUMAN_PROMPT and AI_PROMPT:
            raw_prompt = "What are the biggest risks facing humanity?"
            prompt = f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}"
            response = model.invoke(prompt)
    T)Zpopulate_by_nameZarbitrary_types_allowedr3   r4   c                 C   s   t d |S )z,Raise warning that this class is deprecated.zpThis Anthropic LLM is deprecated. Please use `from langchain_community.chat_models import ChatAnthropic` instead)warningswarn)r6   r3   r7   r7   r8   raise_warning   s   zAnthropic.raise_warningc                 C   s   dS )zReturn type of llm.zanthropic-llmr7   rC   r7   r7   r8   	_llm_type   s   zAnthropic._llm_typepromptc                 C   s\   | j r| js
td|| j r|S td| j |\}}|dkr"|S | j  d| | j dS )NrF   z
^\n*Human:    z Sure, here you go:
)r,   r-   rG   
startswithresubn)r@   rY   Zcorrected_promptZn_subsr7   r7   r8   _wrap_prompt   s   zAnthropic._wrap_promptNrE   run_managerkwargsc           	      K   st   | j rd}| jd|||d|D ]}||j7 }q|S | |}i | j|}| jjjd| ||d|}|j	S )a  Call out to Anthropic's completion endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                prompt = "What are the biggest risks facing humanity?"
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                response = model.invoke(prompt)

         rY   rE   r`   rY   stop_sequencesNr7   )
r&   _streamtextrI   rB   r   completionscreater_   
completion	r@   rY   rE   r`   ra   rj   chunkparamsresponser7   r7   r8   _call   s$   


zAnthropic._callc                 C   s   |  | S r5   )r_   Z	to_string)r@   rY   r7   r7   r8   convert_prompt  s   zAnthropic.convert_promptc           	         s   | j r!d}| jd|||d|2 z3 dH W }||j7 }q6 |S | |}i | j|}| jjjd| ||d|I dH }|j	S )z;Call out to Anthropic's completion endpoint asynchronously.rb   rc   Nrd   r7   )
r&   _astreamrg   rI   rB   r   rh   ri   r_   rj   rk   r7   r7   r8   _acall  s(   

zAnthropic._acallc                 k   sl    |  |}i | j|}| jjjd| ||dd|D ]}t|jd}|r0|j|j	|d |V  qdS )a\  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python

                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        TrY   re   streamrg   rl   Nr7   )
rI   rB   r   rh   ri   r_   r   rj   on_llm_new_tokenrg   r@   rY   rE   r`   ra   rm   tokenrl   r7   r7   r8   rf     s   


zAnthropic._streamc                 K  s   |  |}i | j|}| jjjd| ||dd|I dH 2 z3 dH W }t|jd}|r:|j|j	|dI dH  |V  q!6 dS )a[  Call Anthropic completion_stream and return the resulting generator.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.
        Returns:
            A generator representing the stream of tokens from Anthropic.
        Example:
            .. code-block:: python
                prompt = "Write a poem about a stream."
                prompt = f"\n\nHuman: {prompt}\n\nAssistant:"
                generator = anthropic.stream(prompt)
                for token in generator:
                    yield token
        Trs   Nru   rv   r7   )
rI   rB   r   rh   ri   r_   r   rj   rw   rg   rx   r7   r7   r8   rq   @  s   

zAnthropic._astreamrg   c                 C   s   | j std|  |S )zCalculate number of tokens.rF   )r.   rG   )r@   rg   r7   r7   r8   get_num_tokensd  s   
zAnthropic.get_num_tokens)NN)rJ   rK   rL   __doc__r   Zmodel_configr   r   rW   rT   rN   rX   r_   r	   r   r   r   ro   r   rp   r   rr   r   r   rf   r   rq   rO   rz   r7   r7   r7   r8   r=      s    	

)



%

$r=   )'r]   rU   typingr   r   r   r   r   r   r   r	   Zlangchain_core._api.deprecationr
   Zlangchain_core.callbacksr   r   Zlangchain_core.language_modelsr   Z#langchain_core.language_models.llmsr   Zlangchain_core.outputsr   Zlangchain_core.prompt_valuesr   Zlangchain_core.utilsr   r   r   r   Zlangchain_core.utils.utilsr   r   Zpydanticr   r   r   r   r   r=   r7   r7   r7   r8   <module>   s&    (u