o
    ZhK                     @   s  d Z ddlZddlZddlZddlZddlZddlmZ ddlm	Z	 ddl
mZ ddlmZ ddlmZ ddlmZmZmZmZmZmZmZmZ dd	lmZ dd
lmZmZmZ ddlm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z& ddl'm(Z(m)Z) erzddl*m+Z+ e$,e-Z.e/dZ0dZ1dddZ2e	G dd dZ3e&			dFdee3 dee4 de4de4dee4 dee4 deee4e4f  deee5 ee5 f fddZ6G dd  d eZ7G d!d" d"eZ8		dGd#d$d%edee4 deee4e4f  ddf
d&d'Z9d(e5fd)d*Z:d+e5fd,d-Z;d.e5fd/d0Z<d#d$d1e4ddfd2d3Z=d#d$d4ed5e>d1e4ddf
d6d7Z?d4ed8e3d5e>dee4 fd9d:Z@d;ee d<e4de8fd=d>ZAd#d$d?ee4 d5e>dee fd@dAZBd#d$d?ee4 d5e>dee fdBdCZCG dDdE dEeZDdS )Hz.Git LFS related type definitions and utilities    N)AbstractContextManager)	dataclass)ceil)getsize)Path)TYPE_CHECKINGBinaryIODictIterableListOptionalTuple	TypedDict)unquote)ENDPOINTHF_HUB_ENABLE_HF_TRANSFERREPO_TYPES_URL_PREFIXES   )build_hf_headersget_sessionhf_raise_for_statushttp_backoffloggingtqdmvalidate_hf_hub_args)sha256sha_fileobj)CommitOperationAddz^[0-9a-f]{40}$zlfs-multipart-uploadzapplication/vnd.git-lfs+json)AcceptzContent-Typec                   @   s`   e Zd ZU dZeed< eed< eed< edefddZ	edefd	d
Z
edefddZdS )
UploadInfoad  
    Dataclass holding required information to determine whether a blob
    should be uploaded to the hub using the LFS protocol or the regular protocol

    Args:
        sha256 (`bytes`):
            SHA256 hash of the blob
        size (`int`):
            Size in bytes of the blob
        sample (`bytes`):
            First 512 bytes of the blob
    r   sizesamplepathc                 C   s\   t |}t|d}|dd d }t|}W d    n1 s"w   Y  | |||dS )Nrb   r    r   r!   )r   ioopenpeekr   )clsr"   r    filer!   sha r,   J/var/www/html/lang_env/lib/python3.10/site-packages/huggingface_hub/lfs.py	from_pathN   s   
zUploadInfo.from_pathdatac                 C   s&   t | }| t||d d |dS )Nr$   )r    r!   r   )r   digestlen)r)   r/   r+   r,   r,   r-   
from_bytesV   s   zUploadInfo.from_bytesfileobjc                 C   sD   | d}|dtj t|}| }|dtj | |||dS )Nr$   r   r%   )readseekr&   SEEK_SETr   tell)r)   r3   r!   r+   r    r,   r,   r-   from_fileobj[   s   
zUploadInfo.from_fileobjN)__name__
__module____qualname____doc__bytes__annotations__intclassmethodstrr.   r2   r   r8   r,   r,   r,   r-   r   ;   s   
 r   upload_infostoken	repo_typerepo_idrevisionendpointheadersreturnc                 C   s   |dur|nt }d}|tv rt| }| d| | d}dddgdd	 | D d
d}	|dur6dt|i|	d< i tt|d|pAi }t j|||	d}
t|
 |
 }|	dd}t
|tsctddd	 |D dd	 |D fS )aA  
    Requests the LFS batch endpoint to retrieve upload instructions

    Learn more: https://github.com/git-lfs/git-lfs/blob/main/docs/api/batch.md

    Args:
        upload_infos (`Iterable` of `UploadInfo`):
            `UploadInfo` for the files that are being uploaded, typically obtained
            from `CommitOperationAdd.upload_info`
        repo_type (`str`):
            Type of the repo to upload to: `"model"`, `"dataset"` or `"space"`.
        repo_id (`str`):
            A namespace (user or an organization) and a repo name separated
            by a `/`.
        revision (`str`, *optional*):
            The git revision to upload to.
        headers (`dict`, *optional*):
            Additional headers to include in the request

    Returns:
        `LfsBatchInfo`: 2-tuple:
            - First element is the list of upload instructions from the server
            - Second element is an list of errors, if any

    Raises:
        `ValueError`: If an argument is invalid or the server response is malformed

        `HTTPError`: If the server returned an error
    N /z.git/info/lfs/objects/batchuploadbasic	multipartc                 S   s   g | ]}|j  |jd qS )oidr    )r   hexr    ).0rL   r,   r,   r-   
<listcomp>   s    z'post_lfs_batch_info.<locals>.<listcomp>r   )	operationZ	transfersobjectsZ	hash_algonameref)rC   rH   jsonrU   zMalformed response from serverc                 S   s   g | ]
}d |vrt |qS error)_validate_batch_actionsrR   objr,   r,   r-   rS          c                 S   s   g | ]
}d |v rt |qS rZ   )_validate_batch_errorr]   r,   r,   r-   rS      r_   )r   r   r   LFS_HEADERSr   r   postr   rY   get
isinstancelist
ValueError)rB   rC   rD   rE   rF   rG   rH   Z
url_prefixZ	batch_urlpayloadrespZ
batch_inforU   r,   r,   r-   post_lfs_batch_infoe   s<   '
ri   c                   @   s   e Zd ZU eed< eed< dS )PayloadPartT
partNumberetagN)r9   r:   r;   r?   r>   rA   r,   r,   r,   r-   rj      s   
 rj   c                   @   s&   e Zd ZU dZeed< ee ed< dS )CompletionPayloadTz?Payload that will be sent to the Hub when uploading multi-part.rP   partsN)r9   r:   r;   r<   rA   r>   r   rj   r,   r,   r,   r-   rm      s   
 rm   rT   r   lfs_batch_actionc           
   	   C   s6  t | |d}|du rtd| j d dS |d d }t| |d d}|dur2t| |di }|d}|durdzt|}W n ttfyX   td	| d
w t	| |||d d nt
| |d d |durt| t j|d t||d| jj | jjdd}	t|	 t| j d dS )a  
    Handles uploading a given object to the Hub with the LFS protocol.

    Can be a No-op if the content of the file is already present on the hub large file storage.

    Args:
        operation (`CommitOperationAdd`):
            The add operation triggering this upload.
        lfs_batch_action (`dict`):
            Upload instructions from the LFS batch endpoint for this object. See [`~utils.lfs.post_lfs_batch_info`] for
            more details.
        headers (`dict`, *optional*):
            Headers to include in the request, including authentication and user agent headers.

    Raises:
        - `ValueError` if `lfs_batch_action` is improperly formatted
        - `HTTPError` if the upload resulted in an error
    actionsNzContent of file z. is already present upstream - skipping uploadrL   verifyheader
chunk_sizezTMalformed response from LFS batch endpoint: `chunk_size` should be an integer. Got 'z'.href)rT   rr   rs   
upload_url)rT   ru   )rC   rH   rO   rX   z: Upload successful)r\   rc   loggerdebugpath_in_repo_validate_lfs_actionr?   rf   	TypeError_upload_multi_part_upload_single_partr   rb   r   upload_infor   rQ   r    r   )
rT   ro   rC   rH   rp   upload_actionverify_actionrr   rs   Zverify_respr,   r,   r-   
lfs_upload   s>   



r   
lfs_actionc                 C   s:   t | dtr| ddu st | dtstd| S ).validates response from the LFS batch endpointrt   rr   Nz"lfs_action is improperly formatted)rd   rc   rA   dictrf   )r   r,   r,   r-   ry      s
   ry   lfs_batch_actionsc                 C   sp   t | dtrt | dtstd| di d}| di d}|dur.t| |dur6t| | S )r   rP   r    z)lfs_batch_actions is improperly formattedrp   rL   rq   N)rd   rc   rA   r?   rf   ry   )r   r~   r   r,   r,   r-   r\   
  s    r\   lfs_batch_errorc                 C   sh   t | dtrt | dtstd| d}t |tr.t |dtr.t |dts2td| S )r   rP   r    z'lfs_batch_error is improperly formattedr[   messagecode)rd   rc   rA   r?   rf   r   )r   
error_infor,   r,   r-   r`     s    
r`   ru   c                 C   sJ   | j dd}td||dd}t| W d   dS 1 sw   Y  dS )aZ  
    Uploads `fileobj` as a single PUT HTTP request (basic LFS transfer protocol)

    Args:
        upload_url (`str`):
            The URL to PUT the file to.
        fileobj:
            The file-like object holding the data to upload.

    Returns: `requests.Response`

    Raises: `requests.HTTPError` if the upload resulted in an error
    TZ	with_tqdmPUTi  i  i  i  r/   Zretry_on_status_codesN)as_filer   r   )rT   ru   r3   responser,   r,   r-   r|   &  s   
"r|   rr   rs   c                 C   s   t || j|d}t}trt| jtst| jtstd d}|r(t	| ||dnt
| ||d}t j|t|| jj td}t| dS )z@
    Uploads file using HF multipart LFS transfer protocol.
    )rr   r}   rs   zlhf_transfer is enabled but does not support uploading from bytes or BinaryIO, falling back to regular uploadF)rT   sorted_parts_urlsrs   )rY   rH   N)_get_sorted_parts_urlsr}   r   rd   path_or_fileobjrA   r   warningswarn_upload_parts_hf_transfer_upload_parts_iterativelyr   rb   _get_completion_payloadr   rQ   ra   r   )rT   rr   rs   ru   r   Zuse_hf_transferresponse_headersZcompletion_resr,   r,   r-   r{   :  s,   

r{   r}   c                 C   sN   dd t dd |  D dd dD }t|}|t|j| kr%td|S )Nc                 S   s   g | ]\}}|qS r,   r,   )rR   _ru   r,   r,   r-   rS   ^  s    z*_get_sorted_parts_urls.<locals>.<listcomp>c                 S   s2   g | ]\}}|  rt|d krt|d|fqS )r   
   )isdigitr1   r?   )rR   Zpart_numru   r,   r,   r-   rS   a  s    c                 S   s   | d S Nr   r,   )tr,   r,   r-   <lambda>f  s    z(_get_sorted_parts_urls.<locals>.<lambda>)keyz0Invalid server response to upload large LFS file)sorteditemsr1   r   r    rf   )rr   r}   rs   Zsorted_part_upload_urlsZ	num_partsr,   r,   r-   r   ]  s   r   r   rP   c                 C   sf   g }t | D ]'\}}|d}|d u s|dkr#td| d|d  ||d |d q||dS )Nrl   rJ   zInvalid etag (`z`) returned for part r   )rk   rl   )rP   rn   )	enumeraterc   rf   append)r   rP   rn   Zpart_numberrr   rl   r,   r,   r-   r   o  s   

r   r   c           	   
   C   s   g }| j dd=}t|D ]/\}}t||| |d}td||dd}t| ||j W d    n1 s7w   Y  qW d    |S 1 sHw   Y  |S )NTr   )	seek_from
read_limitr   r   r   )r   r   SliceFileObjr   r   r   rH   )	rT   r   rs   rH   r3   Zpart_idxZpart_upload_urlZfileobj_sliceZpart_upload_resr,   r,   r-   r   ~  s*   
r   c                 C   s&  zddl m} W n ty   tdw dt|jv }|s#td | j	j
}| j}t|dkr9d|dd   }t tjkrBd	nd }td
d	|d||d;}z|d| j||dddd|rbd|jini }	W n tyx }
 ztd|
d }
~
ww |s|| |	W  d    S 1 sw   Y  d S )Nr   )multipart_uploadzFast uploading using 'hf_transfer' is enabled (HF_HUB_ENABLE_HF_TRANSFER=1) but 'hf_transfer' package is not available in your environment. Try `pip install hf_transfer`.callbackzYou are using an outdated version of `hf_transfer`. Consider upgrading to latest version to enable progress bars using `pip install -U hf_transfer`.(   u   (…)iTB)unitZ
unit_scaletotalinitialdescdisable         )	file_pathZ
parts_urlsrs   Z	max_filesZparallel_failuresmax_retriesz~An error occurred while uploading using `hf_transfer`. Consider disabling HF_HUB_ENABLE_HF_TRANSFER for better error handling.r,   )Zhf_transferr   ImportErrorrf   inspect	signature
parametersr   r   r}   r    rx   r1   rv   getEffectiveLevelr   NOTSETr   r   update	ExceptionRuntimeError)rT   r   rs   r   Zsupports_callbackr   r   r   progressoutputer,   r,   r-   r     sR   
	
$r   c                   @   sx   e Zd ZdZdededefddZdd Zd	d
 ZddefddZ	defddZ
ejfdededefddZdd ZdS )r   a  
    Utility context manager to read a *slice* of a seekable file-like object as a seekable, file-like object.

    This is NOT thread safe

    Inspired by stackoverflow.com/a/29838711/593036

    Credits to @julien-c

    Args:
        fileobj (`BinaryIO`):
            A file-like object to slice. MUST implement `tell()` and `seek()` (and `read()` of course).
            `fileobj` will be reset to its original position when exiting the context manager.
        seek_from (`int`):
            The start of the slice (offset from position 0 in bytes).
        read_limit (`int`):
            The maximum number of bytes to read from the slice.

    Attributes:
        previous_position (`int`):
            The previous position

    Examples:

    Reading 200 bytes with an offset of 128 bytes from a file (ie bytes 128 to 327):
    ```python
    >>> with open("path/to/file", "rb") as file:
    ...     with SliceFileObj(file, seek_from=128, read_limit=200) as fslice:
    ...         fslice.read(...)
    ```

    Reading a file in chunks of 512 bytes
    ```python
    >>> import os
    >>> chunk_size = 512
    >>> file_size = os.getsize("path/to/file")
    >>> with open("path/to/file", "rb") as file:
    ...     for chunk_idx in range(ceil(file_size / chunk_size)):
    ...         with SliceFileObj(file, seek_from=chunk_idx * chunk_size, read_limit=chunk_size) as fslice:
    ...             chunk = fslice.read(...)

    ```
    r3   r   r   c                 C   s   || _ || _|| _d S N)r3   r   r   )selfr3   r   r   r,   r,   r-   __init__  s   
zSliceFileObj.__init__c                 C   sF   | j  | _| j dtj}t| j|| j | _	| j | jt
j | S r   )r3   r7   _previous_positionr5   osSEEK_ENDminr   r   _lenr&   r6   )r   Zend_of_streamr,   r,   r-   	__enter__  s
   zSliceFileObj.__enter__c                 C   s   | j | jtj d S r   )r3   r5   r   r&   r6   )r   exc_type	exc_value	tracebackr,   r,   r-   __exit__  s   zSliceFileObj.__exit__nc                 C   sH   |   }|| jkrdS | j| }| j|dk r|}|S t||}|S )N    r   )r7   r   r3   r4   r   )r   r   posZremaining_amountr/   r,   r,   r-   r4     s   

zSliceFileObj.readrI   c                 C   s   | j  | j S r   )r3   r7   r   r   r,   r,   r-   r7     s   zSliceFileObj.telloffsetwhencec                 C   s   | j }|| j }|tjtjfv r)|tjkr|| n|| }t|t||}tj}n|tjkr@| j	 }t|| t||| }nt
d| d| j||| j  S )Nzwhence value z is not supported)r   r   r   r6   r   maxr   SEEK_CURr3   r7   rf   r5   )r   r   r   startendZcur_posr,   r,   r-   r5   
  s   


zSliceFileObj.seekc                 c   s    | j ddV  d S )Ni  @ )r   )r4   r   r,   r,   r-   __iter__  s   zSliceFileObj.__iter__N)r   )r9   r:   r;   r<   r   r?   r   r   r   r4   r7   r   r6   r5   r   r,   r,   r,   r-   r     s    ,r   )NNN)NN)Er<   r   r&   r   rer   
contextlibr   dataclassesr   mathr   Zos.pathr   pathlibr   typingr   r   r	   r
   r   r   r   r   urllib.parser   Zhuggingface_hub.constantsr   r   r   utilsr   r   r   r   r   r   r   Z	utils.shar   r   Z_commit_apir   Z
get_loggerr9   rv   compileZ	OID_REGEXZLFS_MULTIPART_UPLOAD_COMMANDra   r   rA   r   ri   rj   rm   r   ry   r\   r`   r|   r?   r{   r   r   r   r   r   r,   r,   r,   r-   <module>   s   ($	

)M

A
#

0