o
    Zhv                     @  s|  d dl mZ d dlZd dlZd dlmZ d dlmZ d dlmZm	Z	m
Z
mZmZmZmZmZmZmZ d dlmZmZ d dlmZ d dlmZ d d	lmZmZmZ d d
lmZ d dlm Z  d dl!m"Z"m#Z#m$Z$ d dl%m&Z& er~d dl'Z'd dl(m)Z) d dl*m+Z+ G dd deZ,G dd deZ-d'ddZ.d(ddZ/d)ddZ0d*d!d"Z1eee- e,ed# ed$ f Z2G d%d& d&eee2f Z3dS )+    )annotationsN)JSONDecodeError)sleep)
TYPE_CHECKINGAnyCallableDictListOptionalSequenceTupleTypeUnion)AgentActionAgentFinish)CallbackManager)dumpd)RunnableConfigRunnableSerializableensure_config)BaseTool)convert_to_openai_tool)	BaseModelFieldmodel_validator)Self)ThreadMessage)RequiredActionFunctionToolCallc                   @  s0   e Zd ZU dZded< ded< ed
ddZd	S )OpenAIAssistantFinishzuAgentFinish with run and thread metadata.

    Parameters:
        run_id: Run id.
        thread_id: Thread id.
    strrun_id	thread_idreturnboolc                 C     dS z]Check if the class is serializable by LangChain.

        Returns:
            False
        F clsr&   r&   ]/var/www/html/lang_env/lib/python3.10/site-packages/langchain/agents/openai_assistant/base.pyis_lc_serializable0      z(OpenAIAssistantFinish.is_lc_serializableNr"   r#   __name__
__module____qualname____doc____annotations__classmethodr*   r&   r&   r&   r)   r   %   s   
 r   c                   @  s8   e Zd ZU dZded< ded< ded< eddd	Zd
S )OpenAIAssistantActionzAgentAction with info needed to submit custom tool output to existing run.

    Parameters:
        tool_call_id: Tool call id.
        run_id: Run id.
        thread_id: Thread id
    r   tool_call_idr    r!   r"   r#   c                 C  r$   r%   r&   r'   r&   r&   r)   r*   G   r+   z(OpenAIAssistantAction.is_lc_serializableNr,   r-   r&   r&   r&   r)   r4   :   s   
 r4   r"   openai.OpenAIc               
   C  V   z	dd l } |  W S  ty } ztd|d }~w ty* } ztd|d }~ww Nr   zBUnable to import openai, please install with `pip install openai`.zuPlease make sure you are using a v1.1-compatible version of openai. You can install with `pip install "openai>=1.1"`.)openaiZOpenAIImportErrorAttributeErrorr9   er&   r&   r)   _get_openai_clientQ   $   
r>   openai.AsyncOpenAIc               
   C  r7   r8   )r9   AsyncOpenAIr:   r;   r<   r&   r&   r)   _get_openai_async_clienta   r?   rB   tool:Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]r#   c                 C  s"   d}t | tod| v o| d |v S )z<Determine if tool corresponds to OpenAI Assistants built-in.)Zcode_interpreterZfile_searchtype)
isinstancedict)rC   Zassistants_builtin_toolsr&   r&   r)   _is_assistants_builtin_toolq   s   

rH   Dict[str, Any]c                 C  s   t | r| S t| S )zConvert a raw function/class to an OpenAI tool.

    Note that OpenAI assistants supports several built-in tools,
    such as "code_interpreter" and "file_search".
    )rH   r   )rC   r&   r&   r)   _get_assistants_tool}   s   rJ   r   r   c                   @  s  e Zd ZU dZeedZded< 	 dZded< 	 ded< 	 d	Z	d
ed< 	 dZ
ded< 	 edddFddZedddGddZ	dHdId$d%Zedd&dJd(d)Z	dHdId*d+ZdKd.d/ZdLd0d1ZdMd3d4ZdNd6d7ZdOd:d;ZdKd<d=ZdLd>d?ZdMd@dAZdNdBdCZdOdDdEZdS )POpenAIAssistantRunnablea  Run an OpenAI Assistant.

    Example using OpenAI tools:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable

            interpreter_assistant = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=[{"type": "code_interpreter"}],
                model="gpt-4-1106-preview"
            )
            output = interpreter_assistant.invoke({"content": "What's 10 - 4 raised to the 2.7"})

    Example using custom tools and AgentExecutor:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            agent_executor = AgentExecutor(agent=agent, tools=tools)
            agent_executor.invoke({"content": "What's 10 - 4 raised to the 2.7"})


    Example using custom tools and custom execution:
        .. code-block:: python

            from langchain_experimental.openai_assistant import OpenAIAssistantRunnable
            from langchain.agents import AgentExecutor
            from langchain_core.agents import AgentFinish
            from langchain.tools import E2BDataAnalysisTool


            tools = [E2BDataAnalysisTool(api_key="...")]
            agent = OpenAIAssistantRunnable.create_assistant(
                name="langchain assistant e2b tool",
                instructions="You are a personal math tutor. Write and run code to answer math questions.",
                tools=tools,
                model="gpt-4-1106-preview",
                as_agent=True
            )

            def execute_agent(agent, tools, input):
                tool_map = {tool.name: tool for tool in tools}
                response = agent.invoke(input)
                while not isinstance(response, AgentFinish):
                    tool_outputs = []
                    for action in response:
                        tool_output = tool_map[action.tool].invoke(action.tool_input)
                        tool_outputs.append({"output": tool_output, "tool_call_id": action.tool_call_id})
                    response = agent.invoke(
                        {
                            "tool_outputs": tool_outputs,
                            "run_id": action.run_id,
                            "thread_id": action.thread_id
                        }
                    )

                return response

            response = execute_agent(agent, tools, {"content": "What's 10 - 4 raised to the 2.7"})
            next_response = execute_agent(agent, tools, {"content": "now add 17.241", "thread_id": response.thread_id})

    )default_factoryr   clientNasync_clientr   assistant_idg     @@floatcheck_every_msFr#   as_agentafter)moder"   r   c                 C  s,   | j d u rdd l}| jj}|j|d| _ | S )Nr   )api_key)rN   r9   rM   rU   rA   )selfr9   rU   r&   r&   r)   validate_async_client   s
   
z-OpenAIAssistantRunnable.validate_async_client)rM   nameinstructionstoolsSequence[Union[BaseTool, dict]]model2Optional[Union[openai.OpenAI, openai.AzureOpenAI]]kwargsc                K  s@   |pt  }|jjj||dd |D |d}| d|j|d|S )a*  Create an OpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            client: OpenAI or AzureOpenAI client.
                Will create a default OpenAI client if not specified.
            kwargs: Additional arguments.

        Returns:
            OpenAIAssistantRunnable configured to run using the created assistant.
        c                 S     g | ]}t |qS r&   rJ   .0rC   r&   r&   r)   
<listcomp>      z<OpenAIAssistantRunnable.create_assistant.<locals>.<listcomp>rX   rY   rZ   r\   )rO   rM   Nr&   )r>   beta
assistantscreateid)r(   rX   rY   rZ   r\   rM   r^   	assistantr&   r&   r)   create_assistant   s   
z(OpenAIAssistantRunnable.create_assistantinputrG   configOptional[RunnableConfig]
OutputTypec              
   K  s  t |}tj|d|d|dd}|jt| ||dp#|  d}zq| jrC|drC| |d }| j	j
jjjdi |}nKd|vrdd	|d
 |d|ddg|dd}| ||}n*d|vr| j	j
jjj|d |d
 d	|dd}	| |}n| j	j
jjjdi |}| |j|j}W n ty }
 z||
 |
d}
~
ww z| |}W n ty }
 z|j|
| d |
d}
~
ww || |S )a  Invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when new thread being created.
                instructions: Additional run instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                parallel_tool_calls: Allow Assistant to set parallel_tool_calls
                    for this run.
                top_p: Override Assistant top_p for this run.
                temperature: Override Assistant temperature for this run.
                max_completion_tokens: Allow setting max_completion_tokens for this run.
                max_prompt_tokens: Allow setting max_prompt_tokens for this run.
                run_metadata: Metadata to associate with new run.
                attachments: A list of files attached to the message, and the
                    tools they should be added to.
            config: Runnable config. Defaults to None.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        	callbackstagsmetadataZinheritable_callbacksZinheritable_tagsZinheritable_metadatarun_namerX   intermediate_stepsr!   usercontentmessage_metadataattachments)rolerx   rr   rz   thread_metadatamessagesrr   r    rx   r{   rr   Nrr   r&   )r   r   	configuregeton_chain_startr   get_namerR   _parse_intermediate_stepsrM   rf   threadsrunssubmit_tool_outputs_create_thread_and_runr~   rh   _create_run_wait_for_runri   r!   BaseExceptionon_chain_error_get_responserG   on_chain_endrV   rl   rm   r^   Zcallback_managerZrun_managertool_outputsrunthread_r=   responser&   r&   r)   invoke  sb   #

zOpenAIAssistantRunnable.invoke)rN   <Optional[Union[openai.AsyncOpenAI, openai.AsyncAzureOpenAI]]c          	        sL   |pt  }dd |D }|jjj||||dI dH }| d|j|d|S )a	  Async create an AsyncOpenAI Assistant and instantiate the Runnable.

        Args:
            name: Assistant name.
            instructions: Assistant instructions.
            tools: Assistant tools. Can be passed in OpenAI format or as BaseTools.
            model: Assistant model to use.
            async_client: AsyncOpenAI client.
                Will create default async_client if not specified.

        Returns:
            AsyncOpenAIAssistantRunnable configured to run using the created assistant.
        c                 S  r_   r&   r`   ra   r&   r&   r)   rc     rd   z=OpenAIAssistantRunnable.acreate_assistant.<locals>.<listcomp>re   N)rO   rN   r&   )rB   rf   rg   rh   ri   )	r(   rX   rY   rZ   r\   rN   r^   Zopenai_toolsrj   r&   r&   r)   acreate_assistantr  s   
z)OpenAIAssistantRunnable.acreate_assistantc              
     s  |pi }t j|d|d|dd}|jt| ||dp$|  d}z| jrJ|drJ| |d I dH }| jj	j
jjdi |I dH }nSd	|vrjd
|d |ddg|dd}| ||I dH }n3d|vr| jj	j
jj|d	 |d d
|ddI dH }	| |I dH }n| jj	j
jjdi |I dH }| |j|jI dH }W n ty }
 z||
 |
d}
~
ww z| |}W n ty }
 z|j|
| d |
d}
~
ww || |S )a  Async invoke assistant.

        Args:
            input: Runnable input dict that can have:
                content: User message when starting a new run.
                thread_id: Existing thread to use.
                run_id: Existing run to use. Should only be supplied when providing
                    the tool output for a required action after an initial invocation.
                message_metadata: Metadata to associate with a new message.
                thread_metadata: Metadata to associate with new thread. Only relevant
                    when a new thread is created.
                instructions: Overrides the instructions of the assistant.
                additional_instructions: Appends additional instructions.
                model: Override Assistant model for this run.
                tools: Override Assistant tools for this run.
                parallel_tool_calls: Allow Assistant to set parallel_tool_calls
                    for this run.
                top_p: Override Assistant top_p for this run.
                temperature: Override Assistant temperature for this run.
                max_completion_tokens: Allow setting max_completion_tokens for this run.
                max_prompt_tokens: Allow setting max_prompt_tokens for this run.
                run_metadata: Metadata to associate with new run.
            config: Runnable config. Defaults to None.
            kwargs: Additional arguments.

        Return:
            If self.as_agent, will return
                Union[List[OpenAIAssistantAction], OpenAIAssistantFinish].
                Otherwise, will return OpenAI types
                Union[List[ThreadMessage], List[RequiredActionFunctionToolCall]].
        rp   rq   rr   rs   rt   ru   rv   Nr!   rw   rx   ry   )r{   rx   rr   r|   r}   r    r   r   r&   )r   r   r   r   r   r   rR   _aparse_intermediate_stepsrN   rf   r   r   r   _acreate_thread_and_runr~   rh   _acreate_run_await_for_runri   r!   r   r   r   rG   r   r   r&   r&   r)   ainvoke  sj   #



zOpenAIAssistantRunnable.ainvokerv   'List[Tuple[OpenAIAssistantAction, str]]c                   sb   |d \}}|  |j|j}t  |jrdd |jjjD   fdd|D }||j|jd}|S )Nc                 S     h | ]}|j qS r&   ri   rb   Ztcr&   r&   r)   	<setcomp>      zDOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<setcomp>c                   *   g | ]\}}|j  v rt||j d qS )outputr5   r5   r   rb   actionr   Zrequired_tool_call_idsr&   r)   rc     
    
zEOpenAIAssistantRunnable._parse_intermediate_steps.<locals>.<listcomp>r   r    r!   r   r    r!   setrequired_actionr   
tool_callsrV   rv   Zlast_actionZlast_outputr   r   r   r&   r   r)   r     s   
z1OpenAIAssistantRunnable._parse_intermediate_stepsc                 C  s6   dd |  D }| jjjjj|d fd| ji|S )Nc                 S     i | ]\}}|d v r||qS )
rY   r\   rZ   Zadditional_instructionsparallel_tool_callstop_ptemperaturemax_completion_tokensmax_prompt_tokensrun_metadatar&   rb   kvr&   r&   r)   
<dictcomp>      z7OpenAIAssistantRunnable._create_run.<locals>.<dictcomp>r!   rO   )itemsrM   rf   r   r   rh   rO   rV   rl   paramsr&   r&   r)   r   
  s   z#OpenAIAssistantRunnable._create_runr   c                 C  s4   dd |  D }| jjjjd| j|d|}|S )Nc                 S  r   )	rY   r\   rZ   r   r   r   r   r   r   r&   r   r&   r&   r)   r   #  r   zBOpenAIAssistantRunnable._create_thread_and_run.<locals>.<dictcomp>rO   r   r&   )r   rM   rf   r   create_and_runrO   rV   rl   r   r   r   r&   r&   r)   r   "  s   z.OpenAIAssistantRunnable._create_thread_and_runr   c                   s  j dkrrdd l t jjdd }t jjdd }|dkp*|dko*|dk| jjjj	j
jdd}fdd	|D }| jsE|S d
d	 |D }t fdd|D rbddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]H}|j}	z
tj|	jdd}
W n ty } ztd|	j d|	j |d }~ww t|
dkrd|
v r|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| d)N	completedr   .      ascorderc                      g | ]
}|j  jkr|qS r&   r    ri   rb   msgr   r&   r)   rc   I      z9OpenAIAssistantRunnable._get_response.<locals>.<listcomp>c                 S     g | ]
}|j D ]}|qqS r&   rx   rb   r   Zmsg_contentr&   r&   r)   rc   L  
    c                 3  8    | ]}rt | jjjjnt | jjjjV  qd S NrF   typesrf   r   ZTextContentBlockZMessageContentTextrb   rx   r9   version_gte_1_14r&   r)   	<genexpr>O      	

z8OpenAIAssistantRunnable._get_response.<locals>.<genexpr>
c                 s      | ]}|j jV  qd S r   textvaluer   r&   r&   r)   r   Z      r   r!   r     Zreturn_valueslogr    r!   requires_actionFstrict*Received invalid JSON function arguments:  for function __arg1rC   Z
tool_inputr5   r   r    r!      indentUnexpected run status: . Full run info:

)) statusr9   intversionVERSIONsplitrM   rf   r   r~   listr!   rR   alljoinr   ri   r   r   r   functionjsonloads	argumentsr   
ValueErrorrX   lenappendr4   dumpsrG   rV   r   major_versionminor_versionr~   Znew_messagesZansweractionsZ	tool_callr   argsr=   Zrun_infor&   r9   r   r   r)   r   :  s   
	



z%OpenAIAssistantRunnable._get_responser    r!   c                 C  sB   d}|r| j jjjj||d}|jdv }|rt| jd  |s|S NT)r!   )in_progressZqueuedi  )rM   rf   r   r   retriever   r   rQ   rV   r    r!   r  r   r&   r&   r)   r     s   
z%OpenAIAssistantRunnable._wait_for_runc                   sd   |d \}}|  |j|j}t  |jrdd |jjjD   fdd|D }||j|jd}|S )Nr   c                 S  r   r&   r   r   r&   r&   r)   r     r   zEOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<setcomp>c                   r   r   r   r   r   r&   r)   rc     r   zFOpenAIAssistantRunnable._aparse_intermediate_steps.<locals>.<listcomp>r   r   r   r&   r   r)   r     s    
z2OpenAIAssistantRunnable._aparse_intermediate_stepsc                   s>   dd |  D }| jjjjj|d fd| ji|I d H S )Nc                 S  r   r   r&   r   r&   r&   r)   r     r   z8OpenAIAssistantRunnable._acreate_run.<locals>.<dictcomp>r!   rO   )r   rN   rf   r   r   rh   rO   r   r&   r&   r)   r     s   z$OpenAIAssistantRunnable._acreate_runc                   s<   dd |  D }| jjjjd| j|d|I d H }|S )Nc                 S  r   r   r&   r   r&   r&   r)   r     r   zCOpenAIAssistantRunnable._acreate_thread_and_run.<locals>.<dictcomp>r   r&   )r   rN   rf   r   r   rO   r   r&   r&   r)   r     s   z/OpenAIAssistantRunnable._acreate_thread_and_runc                   s  j dkrvdd l t jjdd }t jjdd }|dkp+|dko+|dk| jjjj	j
jddI d H }fdd	|D }| jsI|S d
d	 |D }t fdd|D rfddd |D }t|jjddjjdS j dkr| jsjjjS g }jjjD ]H}|j}	z
tj|	jdd}
W n ty } ztd|	j d|	j |d }~ww t|
dkrd|
v r|
d }
|t|	j|
|jdjjd q|S tj dd}tdj  d| d)Nr   r   r   r   r   r   r   c                   r   r&   r   r   r   r&   r)   rc     r   z:OpenAIAssistantRunnable._aget_response.<locals>.<listcomp>c                 S  r   r&   r   r   r&   r&   r)   rc     r   c                 3  r   r   r   r   r   r&   r)   r     r   z9OpenAIAssistantRunnable._aget_response.<locals>.<genexpr>r   c                 s  r   r   r   r   r&   r&   r)   r     r   r   r   r   r   Fr   r   r   r   r   r   r   r   r   r   ) r   r9   r   r   r   r   rN   rf   r   r~   r   r!   rR   r   r   r   ri   r   r   r   r   r  r  r  r   r  rX   r  r  r4   r  rG   r  r&   r  r)   _aget_response  s   
	



z&OpenAIAssistantRunnable._aget_responsec                   sR   d}|r'| j jjjj||dI d H }|jdv }|r%t| jd I d H  |s|S r  )	rN   rf   r   r   r  r   asyncior   rQ   r  r&   r&   r)   r     s   
z&OpenAIAssistantRunnable._await_for_run)r"   r   )rX   r   rY   r   rZ   r[   r\   r   rM   r]   r^   r   r"   rK   r   )rl   rG   rm   rn   r^   r   r"   ro   )rX   r   rY   r   rZ   r[   r\   r   rN   r   r^   r   r"   rK   )rv   r   r"   rG   )rl   rG   r"   r   )rl   rG   r   rG   r"   r   )r   r   r"   r   )r    r   r!   r   r"   r   )r.   r/   r0   r1   r   r>   rM   r2   rN   rQ   rR   r   rW   r3   rk   r   r   r   r   r   r   r   r   r   r   r   r  r   r&   r&   r&   r)   rK      sD   
 M"[
$
^



K
	


KrK   )r"   r6   )r"   r@   )rC   rD   r"   r#   )rC   rD   r"   rI   )4
__future__r   r  r  r   timer   typingr   r   r   r   r	   r
   r   r   r   r   Zlangchain_core.agentsr   r   Zlangchain_core.callbacksr   Zlangchain_core.loadr   Zlangchain_core.runnablesr   r   r   Zlangchain_core.toolsr   Z%langchain_core.utils.function_callingr   Zpydanticr   r   r   Ztyping_extensionsr   r9   Zopenai.types.beta.threadsr   Z<openai.types.beta.threads.required_action_function_tool_callr   r   r4   r>   rB   rH   rJ   ro   rK   r&   r&   r&   r)   <module>   s@    0



