
    8h                     x   % S SK r S SKrS SKrS SKrS SKrS SKr\ R                  " \5      rS\	4S jr
\
" 5       (       a'  \R                  R                  5       (       d  \" S5      e\R                  R                  r\R                  R                   r\R                  R$                  r\R                  R(                  r\R                  R,                  r\
" 5       (       a  S SKJrJrJrJrJrJrJrJ r J!r!J"r"J#r$J%r%J&r&J'r'J(r(J)r)J*r*J+r+J,r,J-r-J.r.J/r/J0r0J1r1J2r3   " S S\Rh                  5      r50 r6\7\8\Rr                  4   \:S'   SS	\8S
\84S jjr;\Rx                  S:w  a  S SKJ=r=  SSK>J?r?J@r@  SSKA7  SSKAJBrBJCrCJDrDJErEJFrFJGrGJHrHJIrIJJrJ  SSKKJLrL  SSKMJNrNJOrOJMrM  \/" 5         g " S S5      rP\P\R                  S   l,        g)    Nreturnc                  6    [        [        R                  S5      $ )a  
Return ``True`` if the distributed package is available.

Otherwise,
``torch.distributed`` does not expose any other APIs. Currently,
``torch.distributed`` is available on Linux, MacOS and Windows. Set
``USE_DISTRIBUTED=1`` to enable it when building PyTorch from source.
Currently, the default value is ``USE_DISTRIBUTED=1`` for Linux and Windows,
``USE_DISTRIBUTED=0`` for MacOS.

_c10d_init)hasattrtorch_C     T/var/www/fran/franai/venv/lib/python3.13/site-packages/torch/distributed/__init__.pyis_availabler      s     588\**r
   z&Failed to initialize torch.distributed)_broadcast_coalesced"_compute_bucket_assignment_by_size_ControlCollectives_DEFAULT_FIRST_BUCKET_BYTES_make_nccl_premul_sum_register_builtin_comm_hook_register_comm_hook_StoreCollectives_test_python_store_verify_params_across_processesBackendBuiltinCommHookType
DebugLevel	FileStoreget_debug_level
GradBucketLoggerPrefixStoreProcessGroupReducerset_debug_levelset_debug_level_from_envStoreTCPStoreWorkc                       \ rS rSrSrS rSrg)_DistributedPdbC   zg
Supports using PDB from inside a multiprocessing child process.

Usage:
_DistributedPdb().set_trace()
c                     [         R                  n [        S5      [         l        [        R                  R
                  " U /UQ70 UD6  U[         l        g ! U[         l        f = f)Nz
/dev/stdin)sysstdinopenpdbPdbinteraction)selfargskwargs_stdins       r   r/   _DistributedPdb.interactionK   sG    YYF# .	##D:4:6:"	F	s   8A A#r	   N)__name__
__module____qualname____firstlineno____doc__r/   __static_attributes__r	   r
   r   r'   r'   C   s    		#r
   r'   _breakpoint_cacherankskipc                    US:  ae  [        [        [        R                  " 5       5      5      n[        R                  US5      S-   nU[        U'   X1::  a  [        R                  SU5        g[        5       U :X  a1  [        5       nUR                  " SU  S35        UR                  " 5         [        R                  R                  5       n[        R                  R                  5       n[        R                  R!                  S5         [#        5         [        R                  R!                  U5        Ag! [        R                  R!                  U5        Af = f)a  
Set a breakpoint, but only on a single rank.  All other ranks will wait for you to be
done with the breakpoint before continuing.

Args:
    rank (int): Which rank to break on.  Default: ``0``
    skip (int): Skip the first ``skip`` calls to this breakpoint. Default: ``0``.
r      zSkip the breakpoint, counter=%dNzS
!!! ATTENTION !!!

Type 'up' to get to the frame that called dist.breakpoint(rank=z)
F)hashstr	traceback
format_excr;   getlogwarningget_rankr'   message	set_tracer   r   _meta_in_tls_dispatch_include_DisableTorchDispatch!_set_meta_in_tls_dispatch_includebarrier)r<   r=   keycounterr-   meta_in_tlsguards          r   
breakpointrR   U   s
    !8s9//123C'++C3a7G%,c"=wG:!#CKKRRVQWWZ\ MMO hh<<>..02259	IHH66{C HH66{Cs   
D2 2"Ewin32)	HashStorer?   )
DeviceMeshinit_device_mesh)*)	_all_gather_base_coalescing_manager_CoalescingManager_create_process_group_wrapper_get_process_group_name_rank_not_in_group_reduce_scatter_base_time_estimatorget_node_local_rank)_remote_device)_create_store_from_optionsregister_rendezvous_handler
rendezvousc                       \ rS rSrSrg)_ProcessGroupStub   r	   N)r5   r6   r7   r8   r:   r	   r
   r   rf   rf      s    r
   rf   ztorch.distributed)r   r   )Rloggingr-   r*   rB   typingr   	getLoggerr5   rE   boolr   r   r   RuntimeError
_DistError	DistError_DistBackendErrorDistBackendError_DistNetworkErrorDistNetworkError_DistStoreErrorDistStoreError_DistQueueEmptyErrorQueueEmptyErrortorch._C._distributed_c10dr   r   r   r   r   r   r   r   r   r   r   _Backendr   r   r   r   r   r   r   r   r    r!   r"   r#   r$   r%   _Workr.   r'   r;   dictintAny__annotations__rR   platformrT   device_meshrU   rV   distributed_c10drX   rY   rZ   r[   r\   r]   r^   r_   r`   remote_devicera   rd   rb   rc   rf   modulesr	   r
   r   <module>r      sq    
 
    !+d + >>%((--//
?
@@ HH	88-- 88-- ))((//>>      8##'' #  02tCO,1! ! !F ||w89
 $
 
 
 .    5FCKK#$1r
   