
    Vh$                         d dl Z d dl mZ d dlmZ g dZ G d de j
                  j                        Z G d de j
                  j                        Z G d	 d
e j
                  j                        Z	y)    N)Tensor)ops)FloatFunctionalFXFloatFunctionalQFunctionalc                        e Zd ZdZd fdZd Z	 dededefdZ	 dededefdZ		 dededefd	Z
	 dededefd
Z	 ddee   dedefdZ	 dededefdZ	 dededefdZ xZS )r   a  State collector class for float operations.

    The instance of this class can be used instead of the ``torch.`` prefix for
    some operations. See example usage below.

    .. note::

        This class does not provide a ``forward`` hook. Instead, you must use
        one of the underlying functions (e.g. ``add``).

    Examples::

        >>> f_add = FloatFunctional()
        >>> a = torch.tensor(3.0)
        >>> b = torch.tensor(4.0)
        >>> f_add.add(a, b)  # Equivalent to ``torch.add(a, b)``

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    returnc                 h    t         |           t        j                  j	                         | _        y N)super__init__torchnnIdentityactivation_post_processself	__class__s    `/home/dcms/DCMS/lib/python3.12/site-packages/torch/ao/nn/quantized/modules/functional_modules.pyr   zFloatFunctional.__init__&   s#    ',xx'8'8':$    c                     t        d      NzYFloatFunctional is not intended to use the 'forward'. Please use the underlying operationRuntimeErrorr   xs     r   forwardzFloatFunctional.forward*       ?
 	
r   r   yc                 T    t        j                  ||      }| j                  |      }|S r   )r   addr   r   r   r   rs       r   r!   zFloatFunctional.add2   &    IIaO((+r   c                 2    t        j                  ||      }|S r   r   r!   r"   s       r   
add_scalarzFloatFunctional.add_scalar9       IIaO r   c                 T    t        j                  ||      }| j                  |      }|S r   )r   mulr   r"   s       r   r*   zFloatFunctional.mulA   r$   r   c                 2    t        j                  ||      }|S r   r   r*   r"   s       r   
mul_scalarzFloatFunctional.mul_scalarH   r(   r   dimc                 V    t        j                  ||      }| j                  |      }|S N)r.   )r   catr   r   r   r.   r#   s       r   r1   zFloatFunctional.catP   s'    IIaS!((+r   c                     t        j                  ||      }t         j                  j                  j	                  |      }| j                  |      }|S r   )r   r!   r   
functionalrelur   r"   s       r   add_reluzFloatFunctional.add_reluW   s@    IIaOHH$$Q'((+r   c                 T    t        j                  ||      }| j                  |      }|S r   )r   matmulr   r"   s       r   r8   zFloatFunctional.matmul_   s'    LLA((+r   r	   Nr   )__name__
__module____qualname____doc__r   r   r   r!   floatr'   r*   r-   listintr1   r6   r8   __classcell__r   s   @r   r   r      s    4;
 AV  6 
 @F u   AV  6 
 @F u   1T&\  F 
 <& V   D 6 f r   r   c                       e Zd ZdZd Z	 dededefdZ	 dededefdZ	 dededefdZ		 dededefd	Z
	 ddee   d
edefdZ	 dededefdZ	 dededefdZy)r   a#  module to replace FloatFunctional module before FX graph mode quantization,
    since activation_post_process will be inserted in top level module directly

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    c                     t        d      r   r   r   s     r   r   zFXFloatFunctional.forwardr   r   r   r   r   r	   c                 2    t        j                  ||      }|S r   r&   r"   s       r   r!   zFXFloatFunctional.addz       IIaOr   c                 2    t        j                  ||      }|S r   r&   r"   s       r   r'   zFXFloatFunctional.add_scalar   rG   r   c                 2    t        j                  ||      }|S r   r,   r"   s       r   r*   zFXFloatFunctional.mul   rG   r   c                 2    t        j                  ||      }|S r   r,   r"   s       r   r-   zFXFloatFunctional.mul_scalar   rG   r   r.   c                 4    t        j                  ||      }|S r0   )r   r1   r2   s       r   r1   zFXFloatFunctional.cat   s    IIaS!r   c                     t        j                  ||      }t         j                  j                  j	                  |      }|S r   )r   r!   r   r4   r5   r"   s       r   r6   zFXFloatFunctional.add_relu   s0    IIaOHH$$Q'r   c                 2    t        j                  ||      }|S r   )r   r8   r"   s       r   r8   zFXFloatFunctional.matmul   s    LLAr   Nr:   )r;   r<   r=   r>   r   r   r!   r?   r'   r*   r-   r@   rA   r1   r6   r8    r   r   r   r   e   s    

 AV  6  @F u   AV  6  @F u   1T&\  F  <& V  
 D 6 f r   r   c                        e Zd ZdZd fdZ fdZ fdZd Zd Zd Z		 d	e
d
e
de
fdZ	 d	e
d
ede
fdZ	 d	e
d
e
de
fdZ	 d	e
d
ede
fdZ	 dd	ee
   dede
fdZ	 d	e
d
e
de
fdZ	 d	e
d
e
de
fdZedd       Z xZS )r   a  Wrapper class for quantized operations.

    The instance of this class can be used instead of the
    ``torch.ops.quantized`` prefix. See example usage below.

    .. note::

        This class does not provide a ``forward`` hook. Instead, you must use
        one of the underlying functions (e.g. ``add``).

    Examples::

        >>> q_add = QFunctional()
        >>> # xdoctest: +SKIP
        >>> a = torch.quantize_per_tensor(torch.tensor(3.0), 1.0, 0, torch.qint32)
        >>> b = torch.quantize_per_tensor(torch.tensor(4.0), 1.0, 0, torch.qint32)
        >>> q_add.add(a, b)  # Equivalent to ``torch.ops.quantized.add(a, b, 1.0, 0)``

    Valid operation names:
        - add
        - cat
        - mul
        - add_relu
        - add_scalar
        - mul_scalar
    r	   c                     t         |           d| _        d| _        t        j
                  j                         | _        y )Ng      ?r   )r   r   scale
zero_pointr   r   r   r   r   s    r   r   zQFunctional.__init__   s1    
',xx'8'8':$r   c                     t         |   |||       t        j                  | j                        ||dz   <   t        j                  | j
                        ||dz   <   y )NrQ   rR   )r   _save_to_state_dictr   tensorrQ   rR   )r   destinationprefix	keep_varsr   s       r   rT   zQFunctional._save_to_state_dict   sL    #KC(-TZZ(@FW$%-2\\$//-JF\)*r   c           	          t        |j                  |dz               | _        t        |j                  |dz               | _        t
        |   |||d|||       y )NrQ   rR   F)r?   poprQ   rA   rR   r   _load_from_state_dict)	r   
state_dictrW   local_metadatastrictmissing_keysunexpected_keys
error_msgsr   s	           r   r[   z!QFunctional._load_from_state_dict   s[     :>>&7*:;<
jnnVl-BCD%	
r   c                      y)Nr   rN   r   s    r   	_get_namezQFunctional._get_name   s    r   c                 :    d| j                    d| j                   S )Nzscale=z, zero_point=rQ   rR   rc   s    r   
extra_reprzQFunctional.extra_repr   s    

|=0ABBr   c                     t        d      )NzTFunctional is not intended to use the 'forward'. Please use the underlying operationr   r   s     r   r   zQFunctional.forward   r   r   r   r   c                     t         j                  j                  ||| j                  | j                        }| j                  |      }|S Nrf   )r   	quantizedr!   rQ   rR   r   r"   s       r   r!   zQFunctional.add   <    MMa$**Q((+r   c                 F    t         j                  j                  ||      }|S r   )r   rk   r'   r"   s       r   r'   zQFunctional.add_scalar       MM$$Q* r   c                     t         j                  j                  ||| j                  | j                        }| j                  |      }|S rj   )r   rk   r*   rQ   rR   r   r"   s       r   r*   zQFunctional.mul   rl   r   c                 F    t         j                  j                  ||      }|S r   )r   rk   r-   r"   s       r   r-   zQFunctional.mul_scalar  rn   r   r.   c                     t         j                  j                  || j                  | j                  |      }| j                  |      }|S )N)rQ   rR   r.   )r   rk   r1   rQ   rR   r   r2   s       r   r1   zQFunctional.cat  s=    MMatzzdooSVW((+r   c                     t         j                  j                  ||| j                  | j                        }| j                  |      }|S rj   )r   rk   r6   rQ   rR   r   r"   s       r   r6   zQFunctional.add_relu  s<    MM""1atzzdoo"V((+r   c                 t    t         j                  j                  ||| j                  | j                        }|S rj   )r   rk   r8   rQ   rR   r"   s       r   r8   zQFunctional.matmul  s.    MM  ATZZDOO T r   c                     t        |      t        k(  sJ d       |j                  j                         \  }}t	               }t        |      |_        t        |      |_        |S )Nz=QFunctional.from_float expects an instance of FloatFunctional)	typer   r   calculate_qparamsr   r?   rQ   rA   rR   )clsmoduse_precomputed_fake_quantrQ   rR   new_mods         r   
from_floatzQFunctional.from_float!  s^     I(	KJ	K(77IIKz-e _r   r9   r:   )F)r;   r<   r=   r>   r   rT   r[   rd   rg   r   r   r!   r?   r'   r*   r-   r@   rA   r1   r6   r8   classmethodr{   rB   rC   s   @r   r   r      s   6;K

,C
 ?V  6 
 NF u   OV  6 
 NF u   ?T&\  F 
 D& V  
 R 6 f   r   r   )
r   r   
torch._opsr   __all__r   Moduler   r   r   rN   r   r   <module>r      sZ       BWehhoo Wt< <~F%((// Fr   