o
    Zh.                  
   @   s   d Z ddlmZmZmZmZmZmZ ddlm	Z	m
Z
 ddlmZmZmZ ddlmZmZmZ ddlmZmZ ddlmZmZmZ ddlmZ d	Zd
Zdee deeef deeef ddfddZdedee deee f dedef
ddZ!G dd deZ"dS )z Wrapper around Anyscale Endpoint    )AnyDictListMappingOptionalSet)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)
GenerationGenerationChunk	LLMResult)convert_to_secret_strget_from_dict_or_envpre_init)Field	SecretStr)
BaseOpenAIacompletion_with_retrycompletion_with_retry)is_openai_v1z%https://api.endpoints.anyscale.com/v1z$mistralai/Mixtral-8x7B-Instruct-v0.1keysresponsetoken_usagereturnNc                 C   sN   |  |d }|D ]}||vr|d | ||< q	||  |d | 7  < q	dS )zUpdate token usage.usageN)intersection)r   r   r   Z_keys_to_use_key r   X/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/llms/anyscale.pyupdate_token_usage   s   r   choicesprompts
model_namec           	   
   C   sd   g }t |D ] \}}| | }|t|d d t|d|dddg q||d}t||dS )	z2Create the LLMResult from the choices and prompts.messagecontentfinish_reasonlogprobs)r%   r&   )textgeneration_info)r   r"   )generations
llm_output)	enumerateappendr
   dictgetr   )	r    r!   r   r"   r)   i_choicer*   r   r   r   create_llm_result+   s   

r2   c                       sB  e Zd ZU dZ	 eedZeed< ee	ddZ
e	ed< eedZeed< eedZeed< ed	efd
dZeded	efddZed	eeef f fddZed	eeef f fddZed	efddZ		ddee deee  dee ded	ef
ddZ		ddee deee  dee  ded	ef
ddZ!  Z"S )Anyscalea  Anyscale large language models.

    To use, you should have the environment variable ``ANYSCALE_API_KEY``set with your
    Anyscale Endpoint, or pass it as a named parameter to the constructor.
    To use with Anyscale Private Endpoint, please also set ``ANYSCALE_BASE_URL``.

    Example:
        .. code-block:: python
            from langchain.llms import Anyscale
            anyscalellm = Anyscale(anyscale_api_key="ANYSCALE_API_KEY")
            # To leverage Ray for parallel processing
            @ray.remote(num_cpus=1)
            def send_query(llm, text):
                resp = llm.invoke(text)
                return resp
            futures = [send_query.remote(anyscalellm, text) for text in texts]
            results = ray.get(futures)
    defaultanyscale_api_base anyscale_api_keyr"   )default_factoryprefix_messagesr   c                 C      dS )NFr   )clsr   r   r   is_lc_serializable\   s   zAnyscale.is_lc_serializablevaluesc                 C   s   t |ddtd|d< tt |dd|d< t |ddtd|d< zHdd	l}t rR|d  |d d
}|dsA|jdi |j	|d< |dsQ|j
di |j	|d< n|d |d< |d  |d< |j|d< W n tyq   tdw |d r|d dkrtd|d r|d dkrtd|S )z?Validate that api key and python package exists in environment.r6   ZANYSCALE_API_BASEr4   r8   ZANYSCALE_API_KEYr"   Z
MODEL_NAMEr   N)api_keybase_urlclientZasync_clientZopenai_api_baseZopenai_api_keyzTCould not import openai python package. Please install it with `pip install openai`.	streamingn   z!Cannot stream results when n > 1.Zbest_ofz'Cannot stream results when best_of > 1.r   )r   DEFAULT_BASE_URLr   DEFAULT_MODELopenair   get_secret_valuer.   ZOpenAIZcompletionsZAsyncOpenAIZ
CompletionImportError
ValueError)r<   r>   rG   Zclient_paramsr   r   r   validate_environment`   sX   






zAnyscale.validate_environmentc                    s   i d| j it jS )zGet the identifying parameters.r"   )r"   super_identifying_paramsself	__class__r   r   rM      s
   zAnyscale._identifying_paramsc                    s8   d| j i}t s|| j | jd i |t jS )z,Get the parameters used to invoke the model.model)r?   Zapi_base)r"   r   updater8   rH   r6   rL   _invocation_params)rO   Zopenai_credsrP   r   r   rT      s   zAnyscale._invocation_paramsc                 C   r;   )zReturn type of llm.zAnyscale LLMr   rN   r   r   r   	_llm_type   s   zAnyscale._llm_typeNr!   stoprun_managerkwargsc                 K   s@  | j }i ||}| |||}g }i }h d}	d}
|D ]y}| jrkt|dkr+tdd}| j|d ||fi |D ]}|du rC|}q:||7 }q:|dusNJ ||j|jr[|j	dnd|jre|j	dndd qt
| f|d |d	|}t|ts| }||d
  t|	|| |
s|	d}
q| j|||||
dS )at  Call out to OpenAI's endpoint with k unique prompts.

        Args:
            prompts: The prompts to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The full LLM output.

        Example:
            .. code-block:: python

                response = openai.generate(["Tell me a joke."])
           Ztotal_tokensZcompletion_tokensZprompt_tokensNrD   ,Cannot stream results with multiple prompts.r   r%   r&   r'   r%   r&   promptrW   r    system_fingerprintr^   )rT   get_sub_promptsrB   lenrJ   _streamr,   r'   r(   r.   r   
isinstancer-   extendr   r2   rO   r!   rV   rW   rX   paramsZsub_promptsr    r   Z_keysr^   Z_promptsZ
generationchunkr   r   r   r   	_generate   sb   


zAnyscale._generatec                    sD  | j }i ||}| |||}g }i }h d}	d}
|D ]z}| jrqt|dkr,tdd}| j|d ||fi |2 z3 dH W }|du rH|}q;||7 }q;6 |dusTJ ||j|jra|j	dnd|jrk|j	dndd qt
| f|d |d	|I dH }t|ts| }||d
  t|	|| q| j|||||
dS )z:Call out to OpenAI's endpoint async with k unique prompts.rY   NrD   rZ   r   r%   r&   r[   r\   r    r_   )rT   r`   rB   ra   rJ   Z_astreamr,   r'   r(   r.   r   rc   r-   rd   r   r2   re   r   r   r   
_agenerate  sh   


zAnyscale._agenerate)NN)#__name__
__module____qualname____doc__r   rE   r6   str__annotations__r   r8   rF   r"   listr:   r   classmethodboolr=   r   r   rK   propertyr   r   rM   rT   rU   r   r	   r   rh   r   ri   __classcell__r   r   rP   r   r3   A   sV   
 7

R
r3   )#rm   typingr   r   r   r   r   r   Zlangchain_core.callbacksr   r	   Zlangchain_core.outputsr
   r   r   Zlangchain_core.utilsr   r   r   Zpydanticr   r   Zlangchain_community.llms.openair   r   r   Z langchain_community.utils.openair   rE   rF   rn   r   intr2   r3   r   r   r   r   <module>   s>     	




