o
    Zh|"                     @  s   d dl mZ d dlmZmZmZmZmZ d dlm	Z	 d dl
mZ d dlmZ d dlmZmZmZ d dlmZmZ d dlmZmZ d d	lmZ d d
lmZ 			d&d'ddZd(ddZd)d d!Ze	d"d#d$G d%d deeZdS )*    )annotations)AnyDictIteratorListOptional)
deprecated)CallbackManagerForLLMRun)LanguageModelInput)
GenerationGenerationChunk	LLMResult)get_from_dict_or_envpre_init)	BaseModel	SecretStr)BaseLLM)create_retry_decoratorFNllm
GooglePalmpromptr
   	is_geminiboolstreamrun_manager"Optional[CallbackManagerForLLMRun]kwargsr   returnc                   s8   t   j|d}|d fd
d}|d|||d|S )z*Use tenacity to retry the completion call.)max_retriesr   r   r
   r   r   r   r   r   r   c                   s8   | di }|r jj| ||dS  jjdd| i|S )Ngeneration_config)contentsr   r   r    )getclientZgenerate_contentZgenerate_text)r   r   r   r   r   r   r!   [/var/www/html/lang_env/lib/python3.10/site-packages/langchain_community/llms/google_palm.py_completion_with_retry   s   z5completion_with_retry.<locals>._completion_with_retry)r   r   r   N)
r   r
   r   r   r   r   r   r   r   r   r!   )r   r   )r   r   r   r   r   r   Zretry_decoratorr&   r!   r$   r%   completion_with_retry   s   	
r'   
model_namestrc                 C  s   d| v S )Ngeminir!   r(   r!   r!   r%   _is_gemini_model-   s   r,   textc                 C  s4   t dd | ddd D }|r| ddS | S )zStrip erroneous leading spaces from text.

    The PaLM API will sometimes erroneously return a single leading space in all
    lines > 1. This function strips that space.
    c                 s  s"    | ]}| p|d  dkV  qdS )r    Nr!   ).0liner!   r!   r%   	<genexpr>7   s     z2_strip_erroneous_leading_spaces.<locals>.<genexpr>
   Nz
 )allsplitreplace)r-   Zhas_leading_spacer!   r!   r%   _strip_erroneous_leading_spaces1   s    r7   z0.0.12z)langchain_google_genai.GoogleGenerativeAI)Zalternative_importc                   @  s  e Zd ZU dZded< ded< dZded< 	 d	Zd
ed< 	 dZded< 	 dZded< 	 dZ	ded< 	 dZ
ded< 	 dZded< 	 ed9ddZed:ddZed9ddZed;d!d"Zed<d%d&Z		d=d>d.d/Z		d=d?d2d3Zed@d4d5ZdAd7d8ZdS )Br   zg
    DEPRECATED: Use `langchain_google_genai.GoogleGenerativeAI` instead.

    Google PaLM models.
    r   r#   zOptional[SecretStr]google_api_keyzmodels/text-bison-001r)   r(   gffffff?floattemperatureNzOptional[float]top_pzOptional[int]top_kmax_output_tokensr3   intn   r   r   r   c                 C  s
   t | jS )z=Returns whether a model is belongs to a Gemini family or not.)r,   r(   selfr!   r!   r%   r   \   s   
zGooglePalm.is_geminiDict[str, str]c                 C  s   ddiS )Nr8   GOOGLE_API_KEYr!   rA   r!   r!   r%   
lc_secretsa   s   zGooglePalm.lc_secretsc                 C     dS )NTr!   rA   r!   r!   r%   is_lc_serializablee   s   zGooglePalm.is_lc_serializable	List[str]c                 C  s   g dS )z*Get the namespace of the langchain object.)Z	langchainZllmsgoogle_palmr!   )clsr!   r!   r%   get_lc_namespacei   s   zGooglePalm.get_lc_namespacevaluesr   c                 C  s(  t |dd}|d }z(ddlm} t|tr| }|j|d t|r-|j|d|d< n||d< W n t	y=   t	d	w |d
 durXd|d
   krSdksXt
d t
d|d durrd|d   krmdksrt
d t
d|d dur|d dkrt
d|d dur|d dkrt
d|S )z(Validate api key, python package exists.r8   rD   r(   r   N)Zapi_keyr+   r#   znCould not import google-generativeai python package. Please install it with `pip install google-generativeai`.r:   r3   z+temperature must be in the range [0.0, 1.0]r;   z%top_p must be in the range [0.0, 1.0]r<   ztop_k must be positiver=   z+max_output_tokens must be greater than zero)r   Zgoogle.generativeaiZgenerativeai
isinstancer   Zget_secret_value	configurer,   ZGenerativeModelImportError
ValueError)rJ   rL   r8   r(   Zgenair!   r!   r%   validate_environmentn   s>   
""zGooglePalm.validate_environmentpromptsstopOptional[List[str]]r   r   r   r   c              	   K  s   g }|| j | j| j| j| jd}|D ]N}| jr4t| |dd||d}dd |jD }	|dd |	D  qt| f| j	|dd|d|}g }
|jD ]}|d	 }t
|}|
t|d
 qH||
 qt|dS )N)stop_sequencesr:   r;   r<   r=   Zcandidate_countFT)r   r   r   r   r   c                 S  s$   g | ]}d  dd |jjD qS ) c                 S  s   g | ]}|j qS r!   r-   )r/   pr!   r!   r%   
<listcomp>   s    z3GooglePalm._generate.<locals>.<listcomp>.<listcomp>)joincontentpartsr/   cr!   r!   r%   rY      s    z(GooglePalm._generate.<locals>.<listcomp>c                 S  s   g | ]}t |d qS )rW   )r   r]   r!   r!   r%   rY      s    )modelr   r   r   r   outputrW   )generations)r:   r;   r<   r=   r?   r   r'   
candidatesappendr(   r7   r   r   )rB   rR   rS   r   r   ra   r   r   resrb   Zprompt_generations	candidateZraw_textZstripped_textr!   r!   r%   	_generate   sP   	

zGooglePalm._generater   Iterator[GenerationChunk]c                 k  sj    | di }|r||d< t| |fdd||d|D ]}t|jd}|r/|j|j|| jd |V  qd S )Nr   rU   T)r   r   r   r   rW   )chunkverbose)r"   r'   r   r-   Zon_llm_new_tokenri   )rB   r   rS   r   r   r   Zstream_resprh   r!   r!   r%   _stream   s0   
	zGooglePalm._streamc                 C  rF   )zReturn type of llm.rI   r!   rA   r!   r!   r%   	_llm_type   s   zGooglePalm._llm_typer-   c                 C  s(   | j rtd| jj| j|d}|d S )a  Get the number of tokens present in the text.

        Useful for checking if an input will fit in a model's context window.

        Args:
            text: The string input to tokenize.

        Returns:
            The integer number of tokens in the text.
        z%Counting tokens is not yet supported!)r_   r   Ztoken_count)r   rP   r#   Zcount_text_tokensr(   )rB   r-   resultr!   r!   r%   get_num_tokens   s   zGooglePalm.get_num_tokens)r   r   )r   rC   )r   rH   )rL   r   r   r   )NN)
rR   rH   rS   rT   r   r   r   r   r   r   )
r   r)   rS   rT   r   r   r   r   r   rg   )r   r)   )r-   r)   r   r>   )__name__
__module____qualname____doc____annotations__r(   r:   r;   r<   r=   r?   r   propertyr   rE   classmethodrG   rK   r   rQ   rf   rj   rk   rm   r!   r!   r!   r%   r   >   sJ   
 )4)FFN)r   r   r   r
   r   r   r   r   r   r   r   r   r   r   )r(   r)   r   r   )r-   r)   r   r)   ) 
__future__r   typingr   r   r   r   r   Zlangchain_core._api.deprecationr   Zlangchain_core.callbacksr	   Zlangchain_core.language_modelsr
   Zlangchain_core.outputsr   r   r   Zlangchain_core.utilsr   r   Zpydanticr   r   Zlangchain_community.llmsr   Z&langchain_community.utilities.vertexair   r'   r,   r7   r   r!   r!   r!   r%   <module>   s$    


