
    Vh                     $   d dl Z d dlmZmZmZ d dlZd dlZd dlmZ d dl	m
Z
 d dlmZ d dlmZ d dlmZmZ dd	gZ ed
       G d de             Z	 ddej(                  defdZ ed
       G d d	ej,                  j.                               Zy)    N)Any
NamedTupleOptional)enable_python_dispatcher)detect_fake_mode)is_sparse_any)compatibility)map_aggregateNodeTensorMetadata	ShapePropT)is_backward_compatiblec                       e Zd ZU ej                  ed<   ej                  ed<   eed<   ee	df   ed<   e
ej                     ed<   eed<   eeef   ed<   y	)
r   shapedtyperequires_grad.stridememory_formatis_quantizedqparamsN)__name__
__module____qualname__torchSize__annotations__r   booltupleintr   r   dictstrr        J/home/dcms/DCMS/lib/python3.12/site-packages/torch/fx/passes/shape_prop.pyr   r      sS     ::;;#s(OE//00 #s(^r#   resultreturnc           	      D   | j                   }| j                  }| j                  }t        |       s| j	                         nd}d}|rWt        |       sLt
        j                  t
        j                  t
        j                  h}|D ]  }| j                  |      s|} n | j                  }	i }
|	r| j                         }||
d<   |t
        j                  t
        j                  hv r'| j                         |
d<   | j                         |
d<   n|t
        j                   t
        j"                  t
        j$                  hv rU| j'                         j)                         |
d<   | j+                         j)                         |
d<   | j-                         |
d<   t/        ||||||	|
      S )zB
    Extract a TensorMetadata NamedTuple describing `result`.
    r"   N)r   qschemescale
zero_pointaxis)r   r   r   r   r   r   contiguous_formatchannels_lastchannels_last_3dis_contiguousr   r(   per_tensor_affineper_tensor_symmetricq_scaleq_zero_pointper_channel_affine per_channel_affine_float_qparamsper_channel_symmetricq_per_channel_scalestolistq_per_channel_zero_pointsq_per_channel_axisr   )r%   include_contiguityr   r   r   r   r   memory_formatsquery_formatr   r   r(   s               r$   _extract_tensor_metadatar>   #   s    LLELLE((M$1&$9V]]_rFM-"7##""

 + 	L##,#? ,	
 &&L G.."$	u..0J0JKK%~~/GG$*$7$7$9GL!$$22''
 
  &::<CCEGG$*$D$D$F$M$M$OGL!$779GFOumV]L' r#   c                   B     e Zd ZdZd fd	Zdedef fdZ fdZ xZ	S )r   aE  
    Execute an FX graph Node-by-Node and
    record the shape and type of the result
    into the corresponding node.

    Example:
         In this example, we record the shape
         and data type of a module given
         an example input ``torch.randn(50, D_in)``.
         We print the name, shape and dtype of each node.

        class TwoLayerNet(torch.nn.Module):
            def __init__(self, D_in, H, D_out):
                super().__init__()
                self.linear1 = torch.nn.Linear(D_in, H)
                self.linear2 = torch.nn.Linear(H, D_out)
            def forward(self, x):
                h_relu = self.linear1(x).clamp(min=0)
                y_pred = self.linear2(h_relu)
                return y_pred
        N, D_in, H, D_out = 64, 1000, 100, 10
        x = torch.randn(N, D_in)
        y = torch.randn(N, D_out)
        model = TwoLayerNet(D_in, H, D_out)
        gm = torch.fx.symbolic_trace(model)
        sample_input = torch.randn(50, D_in)
        ShapeProp(gm).propagate(sample_input)

        for node in gm.graph.nodes:
            print(node.name, node.meta['tensor_meta'].dtype,
                node.meta['tensor_meta'].shape)

        The output of this code is:

        x torch.float32 torch.Size([50, 1000])
        linear1 torch.float32 torch.Size([50, 100])
        clamp_1 torch.float32 torch.Size([50, 100])
        linear2 torch.float32 torch.Size([50, 10])
        output torch.float32 torch.Size([50, 10])

    Args:
         module (GraphModule): The module to be executed
         fake_mode (FakeTensorMode): A fake mode for copying the gm

    c                     t         |   |       |
t               }|&ddlm}  || j
                  |      | _        || _        nd | _        d | _        | j
                  | _        y )Nr   )deepcopy_to_fake_tensor)	super__init__r   torch._dynamo.utilsrA   modulefake_module	fake_modereal_module)selfgmrG   rA   	__class__s       r$   rC   zShapeProp.__init__   s^    (*I C  7t{{IND&DN#D!DN;;r#   nr&   c                 L  
 ddl m}m} 	 | j                  | j                  | _        	 | j
                  V| j
                  5  t               5  t        | !  |      } || j
                  j                  ||       d d d        d d d        nt        | !  |      }| j                  | _        	 d

fd}t#        |      }
r||j                   d<   | j
                  r2| j
                  j                  x}r |||      x}	r|	|j                   d<   t%        |      |j                   d	<   |S # 1 sw Y   xY w# 1 sw Y   xY w# | j                  | _        w xY w# t        $ rC}t        j                          t        d|j                          d|j                          |d }~ww xY w)
Nr   )compute_unbacked_bindingsrebind_unbackedzShapeProp error for: node=z with meta=Fc                 V    t        | t        j                        rdt        |       S | S )NT)
isinstancer   Tensorr>   )objfound_tensors    r$   extract_tensor_metaz/ShapeProp.run_node.<locals>.extract_tensor_meta   s&    #u||,#/44
r#   tensor_metaunbacked_bindingstype)%torch.fx.experimental.symbolic_shapesrN   rO   rF   rE   rG   r   rB   run_node	shape_envrH   	Exception	traceback	print_excRuntimeErrorformat_nodemetar
   rX   )rI   rL   rN   rO   r%   erU   ra   r[   symbol_to_pathrT   rK   s             @r$   rZ   zShapeProp.run_node   s   	

	+ #../>>- M)A)C M!&!1!!4'(@(@!VLM M M #W-a0F".. 	 V%89$(AFF=!>>!^^555	5";Iv"NNN.<*+fvEM M M M #.. 	!,Q]]_,=[Q	s^   E E D5.D);D5E E )D2	.D55D>:E EE 	F# >FF#c                     | j                   E|D cg c]9  }t        |t        j                        r| j                   j	                  |      n|; }}n|}t        |   | S c c}w )a  
        Run `module` via interpretation and return the result and
        record the shape and type of each node.

        Args:
            *args (Tensor): the sample input.

        Returns:
            Any: The value returned from executing the Module
        )rG   rQ   r   rR   from_tensorrB   run)rI   argst	fake_argsrK   s       r$   	propagatezShapeProp.propagate   sk     >>%  2<Au||1L**1-RSSI 
 Iw{I&&s   >A!)N)
r   r   r   __doc__rC   r   r   rZ   rj   __classcell__)rK   s   @r$   r   r   T   s,    ,\'0/$ /3 /b' 'r#   )T)r]   typingr   r   r   r   torch.fxtorch._dispatch.pythonr   torch._guardsr   torch._subclasses.meta_utilsr   torch.fx._compatibilityr	   torch.fx.noder
   r   __all__r   rR   r>   fxInterpreterr   r"   r#   r$   <module>rw      s     , ,   ; * 6 1 - [
) d+Z  ," .2.LL..b d+J'$$ J' ,J'r#   