o
    Zh                     @   s   d dl mZmZmZmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZmZ d dlmZmZmZ d dlmZ G dd	 d	e
ZdS )
    )AnyDictListMappingOptionalN)CallbackManagerForLLMRun)LLMconvert_to_secret_strget_from_dict_or_env)
ConfigDict	SecretStrmodel_validator)enforce_stop_tokensc                   @   s   e Zd ZU dZdZeed< 	 dZeed< 	 dZ	e
ed< 	 dZeed	< 	 d
Ze
ed< 	 dZe
ed< 	 eed< dZee ed< 	 eddZeddededefddZedeeef fddZedeeef fddZedefddZ		d%ded eee  d!ee d"edef
d#d$ZdS )&ForefrontAIa2  ForefrontAI large language models.

    To use, you should have the environment variable ``FOREFRONTAI_API_KEY``
    set with your API key.

    Example:
        .. code-block:: python

            from langchain_community.llms import ForefrontAI
            forefrontai = ForefrontAI(endpoint_url="")
     endpoint_urlgffffff?temperature   lengthg      ?top_p(   top_k   repetition_penaltyforefrontai_api_keyNbase_urlZforbid)extrabefore)modevaluesreturnc                 C   s   t t|dd|d< |S )z,Validate that api key exists in environment.r   ZFOREFRONTAI_API_KEYr	   )clsr     r#   [/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/llms/forefrontai.pyvalidate_environment5   s   
z ForefrontAI.validate_environmentc                 C   s   | j | j| j| j| jdS )z7Get the default parameters for calling ForefrontAI API.r   r   r   r   r   r&   selfr#   r#   r$   _default_params>   s   zForefrontAI._default_paramsc                 C   s   i d| j i| jS )zGet the identifying parameters.r   )r   r)   r'   r#   r#   r$   _identifying_paramsI   s   zForefrontAI._identifying_paramsc                 C   s   dS )zReturn type of llm.Zforefrontair#   r'   r#   r#   r$   	_llm_typeN   s   zForefrontAI._llm_typepromptstoprun_managerkwargsc           	      K   sd   d| j   }tj| j|ddd|i| j|d}| }|d d d }|d	ur0t||}|S )
ar  Call out to ForefrontAI's complete endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

                response = ForefrontAI("Tell me a joke.")
        zBearer zapplication/json)AuthorizationzContent-Typetext)urlheadersjsonresultr   
completionN)r   Zget_secret_valuerequestspostr   r)   r4   r   )	r(   r,   r-   r.   r/   Z
auth_valueresponseZresponse_jsonr1   r#   r#   r$   _callS   s   
zForefrontAI._call)NN) __name__
__module____qualname____doc__r   str__annotations__r   floatr   intr   r   r   r   r   r   r   Zmodel_configr   classmethodr   r   r%   propertyr   r)   r*   r+   r   r   r:   r#   r#   r#   r$   r      sT   
 

r   )typingr   r   r   r   r   r7   Zlangchain_core.callbacksr   Z#langchain_core.language_models.llmsr   Zlangchain_core.utilsr
   r   Zpydanticr   r   r   Zlangchain_community.llms.utilsr   r   r#   r#   r#   r$   <module>   s    