o
    Zh                     @  s   d Z ddlmZ ddlZddlZddlZddlmZmZm	Z	m
Z
mZmZmZmZmZ ddlmZ ddlmZ ddlmZmZ ddlmZmZ dd	lmZ dd
lmZmZ erZddlZe e!Z"dZ#dZ$G dd deZ%dS )z>EverlyAI Endpoints chat wrapper. Relies heavily on ChatOpenAI.    )annotationsN)	TYPE_CHECKINGAnyCallableDictOptionalSequenceSetTypeUnion)BaseMessage)BaseTool)convert_to_secret_strget_from_dict_or_env)Fieldmodel_validatorconvert_message_to_dict)
ChatOpenAI_import_tiktokenzhttps://everlyai.xyz/hostedmeta-llama/Llama-2-7b-chat-hfc                      s   e Zd ZU dZed)ddZed*ddZed+d
dZdZ	de
d< 	 eeddZde
d< 	 eZde
d< 	 dZde
d< 	 ed,ddZedded-ddZd.d d!Z	d/d0 fd'd(Z  ZS )1ChatEverlyAIaQ  `EverlyAI` Chat large language models.

    To use, you should have the ``openai`` python package installed, and the
    environment variable ``EVERLYAI_API_KEY`` set with your API key.
    Alternatively, you can use the everlyai_api_key keyword argument.

    Any parameters that are valid to be passed to the `openai.create` call can be passed
    in, even if not explicitly saved on this class.

    Example:
        .. code-block:: python

            from langchain_community.chat_models import ChatEverlyAI
            chat = ChatEverlyAI(model_name="meta-llama/Llama-2-7b-chat-hf")
    returnstrc                 C     dS )zReturn type of chat model.zeverlyai-chat selfr   r   _/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/chat_models/everlyai.py	_llm_type:   s   zChatEverlyAI._llm_typeDict[str, str]c                 C  s   ddiS )Neverlyai_api_keyEVERLYAI_API_KEYr   r   r   r   r   
lc_secrets?   s   zChatEverlyAI.lc_secretsboolc                 C  r   )NFr   )clsr   r   r   is_lc_serializableC   s   zChatEverlyAI.is_lc_serializableNzOptional[str]r!   model)defaultalias
model_nameeverlyai_api_basezOptional[Set[str]]available_modelsSet[str]c                   C  s   t ddgS )z'Get available models from EverlyAI API.r   z(meta-llama/Llama-2-13b-chat-hf-quantized)setr   r   r   r   get_available_modelsP   s
   z!ChatEverlyAI.get_available_modelsbefore)modevaluesdictr   c              
   C  s   t t|dd|d< t|d< zddl}W n ty% } ztd|d}~ww z|j|d< W n ty> } ztd	|d}~ww d
| vrIt	|d
< |d
 }| 
 }||vr`td| d| d||d< |S )z?Validate that api key and python package exists in environment.r!   r"   Zopenai_api_keyZopenai_api_baser   NzTCould not import openai python package. Please install it with `pip install openai`.clientz`openai` has no `ChatCompletion` attribute, this is likely due to an old version of the openai package. Try upgrading it with `pip install --upgrade openai`.r*   zModel name z  not found in available models: .r,   )r   r   DEFAULT_API_BASEopenaiImportErrorZChatCompletionAttributeError
ValueErrorkeysDEFAULT_MODELr/   )r%   r2   r7   eexcr*   r,   r   r   r   validate_environment_override[   sP   z*ChatEverlyAI.validate_environment_overridetuple[str, tiktoken.Encoding]c                 C  sd   t  }| jd ur| j}n| j}z
|d}W ||fS  ty1   td d}||}Y ||fS w )Nzgpt-3.5-turbo-0301z5Warning: model not found. Using cl100k_base encoding.Zcl100k_base)r   Ztiktoken_model_namer*   Zencoding_for_modelKeyErrorloggerwarningZget_encoding)r   Z	tiktoken_r'   encodingr   r   r   _get_encoding_model   s   

z ChatEverlyAI._get_encoding_modelmessageslist[BaseMessage]toolsCOptional[Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]]intc                   s   |dur	t d tjd dkrt |S |  \}}d}d}d}dd |D }|D ]"}	||7 }|	 D ]\}
}|t|	t
|7 }|
d	krL||7 }q5q+|d7 }|S )
zCalculate num tokens with tiktoken package.

        Official documentation: https://github.com/openai/openai-cookbook/blob/
        main/examples/How_to_format_inputs_to_ChatGPT_models.ipynbNzECounting tokens in tool schemas is not yet supported. Ignoring tools.         r   c                 S  s   g | ]}t |qS r   r   ).0mr   r   r   
<listcomp>   s    z=ChatEverlyAI.get_num_tokens_from_messages.<locals>.<listcomp>name)warningswarnsysversion_infosuperget_num_tokens_from_messagesrE   itemslenencoder   )r   rF   rH   r'   rD   Ztokens_per_messageZtokens_per_nameZ
num_tokensZmessages_dictmessagekeyvalue	__class__r   r   rW      s*   z)ChatEverlyAI.get_num_tokens_from_messages)r   r   )r   r    )r   r$   )r   r-   )r2   r3   r   r   )r   r@   )N)rF   rG   rH   rI   r   rJ   )__name__
__module____qualname____doc__propertyr   r#   classmethodr&   r!   __annotations__r   r<   r*   r6   r+   r,   staticmethodr/   r   r?   rE   rW   __classcell__r   r   r^   r   r   )   s0   
 

-r   )&rc   
__future__r   loggingrT   rR   typingr   r   r   r   r   r   r	   r
   r   Zlangchain_core.messagesr   Zlangchain_core.toolsr   Zlangchain_core.utilsr   r   Zpydanticr   r   Z#langchain_community.adapters.openair   Z&langchain_community.chat_models.openair   r   Ztiktoken	getLoggerr`   rB   r6   r<   r   r   r   r   r   <module>   s$    ,
