o
    Zh                     @   s   d Z ddlmZmZmZmZmZ ddlmZ ddl	m
Z
mZ ddlmZmZmZ ddlmZmZmZmZmZ ddlmZmZmZmZ ddlmZ dd	lmZ dd
lm Z  ddl!m"Z" ddl#m$Z$ dZ%eddddG dd deZ&dS )zHugging Face Chat Wrapper.    )AnyAsyncIteratorIteratorListOptional)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseChatModelagenerate_from_streamgenerate_from_stream)	AIMessageAIMessageChunkBaseMessageHumanMessageSystemMessage)ChatGenerationChatGenerationChunk
ChatResult	LLMResult)model_validator)Self)HuggingFaceEndpoint)HuggingFaceHub)HuggingFaceTextGenInferencez4You are a helpful, respectful, and honest assistant.z0.0.37z1.0z%langchain_huggingface.ChatHuggingFace)ZsinceZremovalZalternative_importc                       s  e Zd ZU dZeed< 	 eedZeed< dZ	eed< dZ
ee ed< dZeed	< d
ef fddZedddefddZ		d)dee deee  dee d
edee f
ddZ		d)dee deee  dee d
edee f
ddZ		d)dee deee  dee d
edef
ddZ		d)dee deee  dee d
edef
ddZdee defddZdede fd d!Z!e"d"e#defd#d$Z$d*d%d&Z%e&defd'd(Z'  Z(S )+ChatHuggingFacea  
    Wrapper for using Hugging Face LLM's as ChatModels.

    Works with `HuggingFaceTextGenInference`, `HuggingFaceEndpoint`,
    and `HuggingFaceHub` LLMs.

    Upon instantiating this class, the model_id is resolved from the url
    provided to the LLM, and the appropriate tokenizer is loaded from
    the HuggingFace Hub.

    Adapted from: https://python.langchain.com/docs/integrations/chat/llama2_chat
    llmcontentsystem_messageN	tokenizermodel_idF	streamingkwargsc                    sN   t  jdi | ddlm} |   | jd u r!|| j| _d S | j| _d S )Nr   )AutoTokenizer )super__init__Ztransformersr$   _resolve_model_idr    Zfrom_pretrainedr!   )selfr#   r$   	__class__r%   b/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/chat_models/huggingface.pyr'   C   s   

zChatHuggingFace.__init__after)modereturnc                 C   s*   t | jtttfstdt| j | S )NzeExpected llm to be one of HuggingFaceTextGenInference, HuggingFaceEndpoint, HuggingFaceHub, received )
isinstancer   r   r   r   	TypeErrortyper)   r%   r%   r,   validate_llmP   s   zChatHuggingFace.validate_llmmessagesstoprun_managerc           	      k   sV    |  |}| jj|fi |D ]}|}tt|dd}|r%|j||d |V  qd S Nr   )message)chunk)_to_chat_promptr   streamr   r   on_llm_new_token	r)   r5   r6   r7   r#   requestdatadeltar:   r%   r%   r,   _stream\   s   
zChatHuggingFace._streamc           	      K  sf   |  |}| jj|fi |2 z3 d H W }|}tt|dd}|r,|j||dI d H  |V  q6 d S r8   )r;   r   Zastreamr   r   r=   r>   r%   r%   r,   _astreaml   s   
 zChatHuggingFace._astreamc                 K   sV   | j r| j|f||d|}t|S | |}| jjd|g||d|}| |S N)r6   r7   )Zpromptsr6   r7   r%   )r"   rB   r   r;   r   	_generate_to_chat_resultr)   r5   r6   r7   r#   Zstream_iterZ	llm_input
llm_resultr%   r%   r,   rE   {   s    

zChatHuggingFace._generatec                    sd   | j r| j|f||d|}t|I d H S | |}| jjd|g||d|I d H }| |S rD   )r"   rC   r   r;   r   
_ageneraterF   rG   r%   r%   r,   rI      s"   

zChatHuggingFace._ageneratec                    sF   |st dt|d tst d fdd|D } jj|dddS )	zHConvert a list of messages into a prompt format expected by wrapped LLM.z+At least one HumanMessage must be provided!z$Last message must be a HumanMessage!c                    s   g | ]}  |qS r%   )_to_chatml_format).0mr3   r%   r,   
<listcomp>   s    z3ChatHuggingFace._to_chat_prompt.<locals>.<listcomp>FT)tokenizeZadd_generation_prompt)
ValueErrorr0   r   r    Zapply_chat_template)r)   r5   Zmessages_dictsr%   r3   r,   r;      s   zChatHuggingFace._to_chat_promptr9   c                 C   sN   t |trd}nt |trd}nt |trd}n	tdt| ||jdS )z+Convert LangChain message to ChatML format.systemZ	assistantuserzUnknown message type: )roler   )r0   r   r   r   rP   r2   r   )r)   r9   rS   r%   r%   r,   rK      s   


z!ChatHuggingFace._to_chatml_formatrH   c                 C   sB   g }| j d D ]}tt|jd|jd}|| qt|| jdS )Nr   r   )r9   generation_info)generations
llm_output)rU   r   r   textrT   appendr   rV   )rH   Zchat_generationsgZchat_generationr%   r%   r,   rF      s   zChatHuggingFace._to_chat_resultc                 C   s   ddl m} |d}t| jtst| jdr!| jjr!| jj| _dS t| jtr,| jj	}n| jj
}|D ]}|j|kr=|j| _q2| jsItd| ddS )z8Resolve the model_id from the LLM's inference_server_urlr   )list_inference_endpoints*repo_idNzIFailed to resolve model_id:Could not find model id for inference server: zBMake sure that your Hugging Face token has access to the endpoint.)Zhuggingface_hubrZ   r0   r   r   hasattrr\   r!   r   Zinference_server_urlendpoint_urlurl
repositoryrP   )r)   rZ   Zavailable_endpointsr^   Zendpointr%   r%   r,   r(      s.   



z!ChatHuggingFace._resolve_model_idc                 C   s   dS )Nzhuggingface-chat-wrapperr%   r3   r%   r%   r,   	_llm_type   s   zChatHuggingFace._llm_type)NN)r/   N))__name__
__module____qualname____doc__r   __annotations__r   DEFAULT_SYSTEM_PROMPTr   r    r!   r   strr"   boolr'   r   r   r4   r   r   r	   r   r   rB   r   r   rC   r   rE   rI   r;   dictrK   staticmethodr   rF   r(   propertyra   __classcell__r%   r%   r*   r,   r   (   s   
 









r   N)'re   typingr   r   r   r   r   Zlangchain_core._api.deprecationr   Z langchain_core.callbacks.managerr   r	   Z*langchain_core.language_models.chat_modelsr
   r   r   Zlangchain_core.messagesr   r   r   r   r   Zlangchain_core.outputsr   r   r   r   Zpydanticr   Ztyping_extensionsr   Z-langchain_community.llms.huggingface_endpointr   Z(langchain_community.llms.huggingface_hubr   Z7langchain_community.llms.huggingface_text_gen_inferencer   rg   r   r%   r%   r%   r,   <module>   s&    