
    Vh~5                         d dl Z d dlZd dlmZ d dlmZmZ d dlZd dlmZ d dl	m
Z
 d dlmZ ddlmZmZmZmZmZ d	gZej(                  hZg d
Z G d d	e j.                        Zy)    N)defaultdict)AnyOptional)nn)parametrize)type_before_parametrizations   )FakeSparsityget_arg_info_from_tensor_fqnmodule_contains_parammodule_to_fqnswap_moduleBaseSparsifier)module
module_fqntensor_namec            
       *    e Zd ZdZd!deeeef      f fdZdeeef   fdZ	deeeeef   f   ddfdZ
d	 Zdeeef   fd
Zd"deeef   defdZefdej"                  deeej(                        ddfdZd Zd Z	 	 d#deeedf      deeeeedf   f      fdZddefdej"                  deeeej"                     eej"                     f      dedeej"                     fdZd"deddfdZej<                  dej"                  defd        Z xZ S )$r   a'  Base class for all sparsifiers.

    Abstract methods that need to be implemented:

    - update_mask: Function to compute a new mask for all keys in the
        `groups`.

    Args:
        - model [nn.Module]: model to configure. The model itself is not saved
            but used for the state_dict saving / loading.
        - config [list]: configuration elements should be a dict map that includes
            `tensor_fqn` of tensors to sparsify
        - defaults [dict]: default configurations will be attached to the
            configuration. Only the keys that don't exist in the `config` will
            be updated.

    Example::

        >>> # xdoctest: +SKIP("Can't instantiate abstract class BaseSparsifier with abstract method update_mask")
        >>> config = [{'tensor_fqn': 'layer1.weight', 'tensor_fqn': 'linear2.weight2', 'sparsity_level': 0.5}]
        >>> defaults = {'sparsity_level': 0.7}
        >>> # model.layer1.weight will have `sparsity_level` = 0.7 (getting default)
        >>> sparsifier = BaseSparsifier(config, defaults)
    Ndefaultsc                 |    t         |           |xs i | _        t        t              | _        g | _        d| _        y )NT)super__init__r   r   dictstategroupsenable_mask_update)selfr   	__class__s     [/home/dcms/DCMS/lib/python3.12/site-packages/torch/ao/pruning/sparsifier/base_sparsifier.pyr   zBaseSparsifier.__init__7   s4    (0B&1$&7
,."&    returnc                 J    | j                   | j                  | j                  dS )Nr   r   r   r"   )r   s    r   __getstate__zBaseSparsifier.__getstate__?   s!    ZZkk
 	
r   r   c                 :    | j                   j                  |       y N)__dict__update)r   r   s     r   __setstate__zBaseSparsifier.__setstate__F   s    U#r   c                    | j                   j                  dz   }t        | j                        D ]T  \  }}|d   }|dz  }|d| dz  }|d| dz  }t	        |j                               D ]  }|dk(  r	|d| d||    dz  } V |dz  }|S )	Nz (r   
z	Group z	    module: z	    z: ))r   __name__	enumerater   sortedkeys)r   format_stringisparse_argsr   keys         r   __repr__zBaseSparsifier.__repr__I   s    //$6'4 	FNA{ *FT!Mxs"--M~fXR88Mk..01 F(?6#bS1A0B"!EEF	F 	r   c           
          | j                   D cg c]&  }t        t        d |j                                     ( }}| j                  |dS c c}w )a  Returns the state of the optimizer as a :class:`dict`.

        It contains:
        * state - current state of the sparsification.
        * groups - a list containing all sparsity configuration groups
            with the key 'tensor_fqn' specifying the path to the sparsified tensor within a model

        TODO: Need a clean way of loading the state of the "prepared" module
        c                     | d   t         vS )Nr   )KEYS_NOT_IN_STATE_DICT)	key_values    r   <lambda>z+BaseSparsifier.state_dict.<locals>.<lambda>e   s    il:P&P r   r   r   )r   r   filteritemsr   )r   mgr   s      r   
state_dictzBaseSparsifier.state_dictW   s[    $ kk(
  PHHJ(
 (
 ZZ
 	
(
s   +Ar>   strictc           	      |   t        j                  |d         }|d   }|j                         D ]  \  }}t        | j                  |      }|d   }|d   }	|r|t        d| d      d}
|j                  |	   D ]  }t        |t              sd}
 n |
sIt        t        j                  t        ||	      j                              }t        j                  ||	|       |j                  d	d       |j!                  d	      }|_        |D ]  }|d
   |k(  s|j%                  |         | j'                  ||d       y )Nr   r   r   r   zError loading z into the modelFTmask
tensor_fqnr:   )copydeepcopyr<   r   modelRuntimeErrorparametrizations
isinstancer
   torchonesgetattrshaper   register_parametrizationgetpoprA   r'   r(   )r   r>   r?   r   statesrB   sarg_infor   r   foundprA   r=   s                 r   load_state_dictzBaseSparsifier.load_state_dictq   sA   z(34G$#\\^ 	(MJ3DJJ
KHh'F"=1K&."^J<#OPPE,,[9 a. E  GFK,H,N,N!OP44V[!LuuVT".uuV} (l#z1IIh'('	(, 	Ff=>r   rE   SUPPORTED_MODULESc                 .   g | _         |g}|r|j                         }|j                         D ]b  \  }}t        |      |v r?t	        ||      }t        |t              sJ | j                   j                  d|dz   i       R|j                  |       d |ry y )NrB   z.weight)configrO   named_childrentyper   rH   strappend)r   rE   rV   stackr   _namechildr   s           r   make_config_from_modelz%BaseSparsifier.make_config_from_model   s    
 YY[F & 5 5 7 (u;"33!.ue!<J%j#666KK&&j96L'MNLL'( r   c                    || _         || _        | j                  | j                  |       | j                  D ]  }t        |t              sJ d       t        | j
                  t              sJ t        j                  | j
                        }|j                  |       |j                  dd      }|J d       t        ||      }|j                         D ]1  }||v s||   ||   k(  r|dk(  rd||   z   ||   k(  r(J d| d        |j                  |       | j                  j                  |        | j                          y)zPrepares a model, by adding the parametrizations.

        Note::

            The model is modified inplace. If you need to preserve the original
            model, use copy.deepcopy.
        Nznconfig elements should be dicts not modules i.e.:[{`tensor_fqn`: `foo.bar.weight`}, {`tensor_fqn`: ... }, ...]rB   zttensor_fqn is a required argument in the sparsity config whichreplaces previous `module` and [module]`fqn` arguments.zGiven both `z?` and `tensor_fqn` in the config, it is expected them to agree!)rE   rX   r`   rH   r   r   rC   rD   r'   rN   r   r/   r   r\   _prepare)r   rE   rX   module_config
local_argsrB   info_from_tensor_fqnr3   s           r   preparezBaseSparsifier.prepare   sq    
 ;;''. "[[  	+MmT2 P2
 dmmT222t}}5Jm,#d;J) I) $@z#R  ,002 	k*$,S1Z_D</ #&:3&? ?:c? R	k &cU*ijk	k 23KKz*A 	+B 	r   c           
      (   | j                   D ]  }|d   }|d   }|j                  dt              }|j                  dt        j                  t        ||                  }|| j                  |d      d<   t        j                  || ||              y)z-Adds mask parametrization to the layer weightr   r   parametrizationrA   rB   N)	r   rN   r
   rI   	ones_likerK   r   r   rM   )r   argskwargsrX   r   r   ri   rA   s           r   rc   zBaseSparsifier._prepare   s    kk 	FH%F /K$jj):LIO::feoogfk6R&STD7;DJJvl+,V400_T%:	r   params_to_keep.params_to_keep_per_layerc                 \   | j                   D ]  }|d   }|d   }t        j                  ||d       i }|$|D 	ci c]  }	|	||	   
 }
}	|j                  |
       |;|j	                  |d   d      }|$|D 	ci c]  }	|	||	   
 }}	|j                  |       |s||_         yc c}	w c c}	w )a=	  Squashes the sparse masks into the appropriate tensors.

        If either the `params_to_keep` or `params_to_keep_per_layer` is set,
        the module will have a `sparse_params` dict attached to it.

        Args:
            params_to_keep: List of keys to save in the module or a dict
                            representing the modules and keys that will have
                            sparsity parameters saved
            params_to_keep_per_layer: Dict to specify the params that should be
                            saved for specific layers. The keys in the dict
                            should be the module fqn, while the values should
                            be a list of strings with the names of the variables
                            to save in the `sparse_params`

        Examples:
            >>> # xdoctest: +SKIP("locals are undefined")
            >>> # Don't save any sparse params
            >>> sparsifier.squash_mask()
            >>> hasattr(model.submodule1, 'sparse_params')
            False

            >>> # Keep sparse params per layer
            >>> sparsifier.squash_mask(
            ...     params_to_keep_per_layer={
            ...         'submodule1.linear1': ('foo', 'bar'),
            ...         'submodule2.linear42': ('baz',)
            ...     })
            >>> print(model.submodule1.linear1.sparse_params)
            {'foo': 42, 'bar': 24}
            >>> print(model.submodule2.linear42.sparse_params)
            {'baz': 0.1}

            >>> # Keep sparse params for all layers
            >>> sparsifier.squash_mask(params_to_keep=('foo', 'bar'))
            >>> print(model.submodule1.linear1.sparse_params)
            {'foo': 42, 'bar': 24}
            >>> print(model.submodule2.linear42.sparse_params)
            {'foo': 42, 'bar': 24}

            >>> # Keep some sparse params for all layers, and specific ones for
            >>> # some other layers
            >>> sparsifier.squash_mask(
            ...     params_to_keep=('foo', 'bar'),
            ...     params_to_keep_per_layer={
            ...         'submodule2.linear42': ('baz',)
            ...     })
            >>> print(model.submodule1.linear1.sparse_params)
            {'foo': 42, 'bar': 24}
            >>> print(model.submodule2.linear42.sparse_params)
            {'foo': 42, 'bar': 24, 'baz': 0.1}
        r   r   T)leave_parametrizedNr   )r   r   remove_parametrizationsr'   rN   sparse_params)r   rm   rn   rk   rl   rX   r   r   rr   kglobal_paramsparamsper_layer_paramss                r   squash_maskzBaseSparsifier.squash_mask   s    v kk 	5FH%F /K// M)7E F!F1I F F$$]3'3155f\6JDQ%>D'E6!9'E$'E!(()9:'4$#	5 !G
 (Fs   B$7B)Fr   mappinginplaceparameterizationc                 R   |t        d      |st        j                  |      }i }|j                         D ]F  \  }}t	        ||      rt        |      |v rt        ||      ||<   /| j                  ||d|      ||<   H |j                         D ]  \  }}	|	|j                  |<    |S )a  Converts submodules in input module to a different module according to `mapping`
        by calling `from_dense` method on the target module class
        Args:
            module: input module
            mapping: a dictionary that maps from source module type to target
                module type, can be overwritten to allow swapping user defined
                Modules
            inplace: carry out model transformations in-place, the original module
                is mutated
        zNeed to auto generate mapping T)rx   ry   rz   )
NotImplementedErrorrC   rD   rY   r   r   r   convertr<   _modules)
r   r   rx   ry   rz   reassignnamemodr3   values
             r   r}   zBaseSparsifier.convert*  s    " ?%&FGG]]6*F..0 	ID# &c+;<05@!,S'!: "&# %5	 ". "	  #..* 	)JC#(FOOC 	) r   use_pathc                     | j                   sy t        j                         5  | j                  D ]  } | j                  di |  	 d d d        y # 1 sw Y   y xY w)N )r   rI   no_gradr   update_mask)r   r   rX   s      r   stepzBaseSparsifier.stepV  sR    &&]]_ 	+++ +   *6*+	+ 	+ 	+s   $AAr   c                      y r%   r   )r   r   r   rl   s       r   r   zBaseSparsifier.update_mask]  s    r   r%   )T)NN)!r,   
__module____qualname____doc__r   r   r[   r   r   r#   r(   r4   r>   boolrU   rV   r   ModulesetrZ   Linearr`   rg   rc   tuplerw   r
   r}   r   abcabstractmethodr   __classcell__)r   s   @r   r   r      s   2'$sCx.!9 '
d38n 
$$sDcN':"; $ $
DcN 
4?$sCx. ?$ ?< 3D(yy( tBII/( 
	("1f
 59IML5 sCx1L5 #+4U38_0D+E"FL5b EI,8*		* $tBIIRYY?@A* 	*
 ryy/*X+T +T + 	")) #  r   )r   rC   collectionsr   typingr   r   rI   r   torch.nn.utilsr   torch.nn.utils.parametrizer   utilsr
   r   r   r   r   __all__r   rV   r7   ABCr   r   r   r   <module>r      sU    
  #     & C  
YYK @ BSWW Br   