
    Vhu                        d dl mZ d dlmZmZmZ d dlmZmZ d dl	Z	d dl
mZ d dlmZmZ ddlmZmZ eddej"                  d	ee   fd
       Z G d de      Z ee      dej"                  dej"                  fd       Zy)    )	Generator)AbstractContextManagercontextmanagernullcontext)AnyOptionalN)'_checkpoint_without_reentrant_generator_DEFAULT_DETERMINISM_MODE   )_Statecontractmoduleuser_ctxc              #   B  K   |r|n	t               5  t        j                  |       j                  }dt        j                  |       _        	 d |t        j                  |       _        	 ddd       y# |t        j                  |       _        w xY w# 1 sw Y   yxY ww)zs
    Disable hooks installed by checkpoint to avoid unintentional recursion
    during backward recomputation.
    FN)r   
checkpointstateenable_hook)r   r   orig_enable_hooks      c/home/dcms/DCMS/lib/python3.12/site-packages/torch/distributed/_composable/checkpoint_activation.py_no_hookr      s      ;= D%++F3??/4
 ,	D3CJV$0D D 4DJV$0D Ds4   B:BA4B+	B4BBBBc                   2    e Zd ZU dZeed<   eed      ed<   y)_CheckpointStateFr   )NNN_ac_generatorN)__name__
__module____qualname__r   bool__annotations__r   r        r   r   r       s    KI&6788r    r   returnc                    t         j                  j                  d       |j                  dd      }|rt	        d      |j                  dd      |j                  dd      |j                  d	t
              |j                  d
d      |r$t        ddj                  d |D              z         dt        j                  dt        t        df   dt        t        t        f   ddffd}dt        j                  dt        t        df   dt        dt        fd}dt        j                  |       _        | j#                  |d       | j%                  |dd       | S )a  
    This is a composable activation checkpointing API. Unlike functional
    activation checkpointing APIs, this one does not require changing model
    source code. Unlike ``nn.Module`` wrapper activation checkpointing APIs,
    this one does not modify model structure or fully-qualified names either.
    Under the hood, it registers activation checkpointing logic as pre- and
    post-forward hooks. Hence, this API can be easily applied to any model or
    sub-modules in the model.

    Args:
        module (nn.Module): the target model or sub-module to apply activation
            checkpointing.

    Example::
        >>> # xdoctest: +SKIP
        >>> import torch.nn as nn
        >>>
        >>> class MyModel(nn.Module):
        >>>     def __init__(self) -> None:
        >>>         super().__init__()
        >>>         self.l1 = nn.Linear(10, 10)
        >>>         self.l2 = nn.Linear(10, 10)
        >>>
        >>>     def forward(self, x):
        >>>         return self.l2(self.l1(x))
        >>>
        >>> model = MyModel()
        >>> checkpoint(model.l1)  # apply activation checkpointing only to l1
        >>> model(torch.zeros(2, 10)).sum().backward()

    ztorch.distributed.checkpointuse_reentrantFzsuse_reentrant=True is not supported in composable checkpoint. Please use torch.utils.checkpoint.checkpoint instead.preserve_rng_stateT
context_fnNdeterminism_checkdebugzUnexpected keyword arguments: ,c              3       K   | ]  }|  y wNr   ).0args     r   	<genexpr>zcheckpoint.<locals>.<genexpr>U   s     7N7Ns   r   args.kwargsr!   c                      t         j                         j                  r? fd}t         |g|i |}|t         j                         _        t        |       y y )Nc                  b            \  } }| t        |      fS t               t              fS r*   )r   r   )ctx1ctx2r   user_context_fnss     r   context_fnsz9checkpoint.<locals>.forward_pre_hook.<locals>.context_fns]   s9    #/!1!3JD$&$!777&=(6*:::r    )r   r   r   r	   r   next)	r   r.   r/   r5   genr'   r&   r$   r4   s	   `    r   forward_pre_hookz$checkpoint.<locals>.forward_pre_hookX   sn     F#//; :"!  C 69JV$2I' 0r    inputsoutputc                    t         j                  |       j                  r:	 t         j                  |       j                  }|J t	        |       t        d      d t         j                  |       _        y # t        $ r Y &w xY w)NzWExpected non-reentrant activation checkpoint generator to be exhausted, but it was not!)r   r   r   r   r6   RuntimeErrorStopIteration)r   r9   r:   r7   s       r   forward_hookz checkpoint.<locals>.forward_hookp   s}    F#//	 &&v.<<&S	 #m  26
 . ! s   .A5 5	B B)with_kwargs)prependalways_call)torch_C_log_api_usage_oncepopNotImplementedErrorr
   
ValueErrorjoinnnModuletupler   dictstrr   r   r   register_forward_pre_hookregister_forward_hook)	r   r/   r#   r8   r>   r'   r&   r$   r4   s	        @@@@r   r   r   %   s]   B 
HH  !?@JJ6M!D
 	
  $8$?zz,5

#68QRJJw&E,sxx7Nv7N/NN
 	
		!&sCx:>sCx.	 06RYY 6c3h 6 6QT 6" ,0JV(
$$%54$H
  t NMr    r*   )collections.abcr   
contextlibr   r   r   typingr   r   rB   torch.nnrI   torch.utils.checkpointr	   r
   r   r   rJ   r   r   r   r   r    r   <module>rU      s    % J J    
 ' DRYY D(3I*J D D9v 9
 

^ryy ^ryy ^ ^r    