o
    ,h                      @   s   d dl Z d dlmZmZmZ d dlZd dlZd dlmZ d dl	m
Z
 d dlmZ d dlmZ d dlmZ d dlmZmZ d	d
gZeddG dd	 d	eZ	ddejdefddZeddG dd
 d
ejjZdS )    N)Any
NamedTupleOptional)enable_python_dispatcher)detect_fake_mode)'definitely_contiguous_for_memory_format)is_sparse_any)compatibility)map_aggregateNodeTensorMetadata	ShapePropT)is_backward_compatiblec                   @   s`   e Zd ZU ejed< ejed< eed< ee	df ed< e
ej ed< eed< eeef ed< d	S )
r   shapedtyperequires_grad.stridememory_formatis_quantizedqparamsN)__name__
__module____qualname__torchSize__annotations__r   booltupleintr   r   dictstrr    r!   r!   U/var/www/html/scripts/venv/lib/python3.10/site-packages/torch/fx/passes/shape_prop.pyr      s   
 

resultreturnc                 C   s   | j }| j}| j}t| s|  nd}d}|r2t| s2tjtjtjh}|D ]}t	| |dr1|} nq%| j
}	i }
|	rv|  }||
d< |tjtjhv rV|  |
d< |  |
d< n |tjtjtjhv rv|   |
d< |   |
d< |  |
d< t||||||	|
S )zB
    Extract a TensorMetadata NamedTuple describing `result`.
    r!   N)r   qschemescale
zero_pointaxis)r   r   r   r   r   r   contiguous_formatchannels_lastchannels_last_3dr   r   r%   per_tensor_affineper_tensor_symmetricq_scaleq_zero_pointper_channel_affine per_channel_affine_float_qparamsper_channel_symmetricq_per_channel_scalestolistq_per_channel_zero_pointsq_per_channel_axisr   )r#   include_contiguityr   r   r   r   r   memory_formatsquery_formatr   r   r%   r!   r!   r"   _extract_tensor_metadata(   sH   r:   c                       sD   e Zd ZdZd fdd	Zdedef fddZ fd	d
Z  Z	S )r   aE  
    Execute an FX graph Node-by-Node and
    record the shape and type of the result
    into the corresponding node.

    Example:
         In this example, we record the shape
         and data type of a module given
         an example input ``torch.randn(50, D_in)``.
         We print the name, shape and dtype of each node.

        class TwoLayerNet(torch.nn.Module):
            def __init__(self, D_in, H, D_out):
                super().__init__()
                self.linear1 = torch.nn.Linear(D_in, H)
                self.linear2 = torch.nn.Linear(H, D_out)
            def forward(self, x):
                h_relu = self.linear1(x).clamp(min=0)
                y_pred = self.linear2(h_relu)
                return y_pred
        N, D_in, H, D_out = 64, 1000, 100, 10
        x = torch.randn(N, D_in)
        y = torch.randn(N, D_out)
        model = TwoLayerNet(D_in, H, D_out)
        gm = torch.fx.symbolic_trace(model)
        sample_input = torch.randn(50, D_in)
        ShapeProp(gm).propagate(sample_input)

        for node in gm.graph.nodes:
            print(node.name, node.meta['tensor_meta'].dtype,
                node.meta['tensor_meta'].shape)

        The output of this code is:

        x torch.float32 torch.Size([50, 1000])
        linear1 torch.float32 torch.Size([50, 100])
        clamp_1 torch.float32 torch.Size([50, 100])
        linear2 torch.float32 torch.Size([50, 10])
        output torch.float32 torch.Size([50, 10])

    Args:
         module (GraphModule): The module to be executed
         fake_mode (FakeTensorMode): A fake mode for copying the gm

    Nc                    s\   t  | |d u rt }|d ur"ddlm} || j|| _|| _nd | _d | _| j| _d S )Nr   )deepcopy_to_fake_tensor)	super__init__r   torch._dynamo.utilsr;   modulefake_module	fake_modereal_module)selfgmrA   r;   	__class__r!   r"   r=      s   zShapeProp.__init__nr$   c           
   
      s`  ddl m}m} zW| jd ur| j| _zF| jd urM| j) t  t |}|| jj	|| W d    n1 s8w   Y  W d    n1 sGw   Y  nt |}W | j
| _n| j
| _w W n ty} } zt  td|  d|j |d }~ww d  fdd}t||} r||jd< | jr| jj	 }r||| }	r|	|jd	< t||jd
< |S )Nr   )compute_unbacked_bindingsrebind_unbackedzShapeProp error for: node=z with meta=Fc                    s   t | tjrd t| S | S )NT)
isinstancer   Tensorr:   )objfound_tensorr!   r"   extract_tensor_meta   s   z/ShapeProp.run_node.<locals>.extract_tensor_metatensor_metaunbacked_bindingstype)%torch.fx.experimental.symbolic_shapesrH   rI   r@   r?   rA   r   r<   run_node	shape_envrB   	Exception	traceback	print_excRuntimeErrorformat_nodemetar
   rR   )
rC   rG   rH   rI   r#   erO   r[   rU   symbol_to_pathrE   rM   r"   rT      sJ   

 


zShapeProp.run_nodec                    s.    j dur fdd|D }n|}t j| S )a  
        Run `module` via interpretation and return the result and
        record the shape and type of each node.

        Args:
            *args (Tensor): the sample input.

        Returns:
            Any: The value returned from executing the Module
        Nc                    s(   g | ]}t |tjr j|n|qS r!   )rJ   r   rK   rA   from_tensor).0trC   r!   r"   
<listcomp>   s    z'ShapeProp.propagate.<locals>.<listcomp>)rA   r<   run)rC   args	fake_argsrE   ra   r"   	propagate   s   

zShapeProp.propagate)N)
r   r   r   __doc__r=   r   r   rT   rf   __classcell__r!   r!   rE   r"   r   [   s
    .1)T)rW   typingr   r   r   r   torch.fxtorch._dispatch.pythonr   torch._guardsr   torch._prims_commonr   torch._subclasses.meta_utilsr   torch.fx._compatibilityr	   torch.fx.noder
   r   __all__r   rK   r:   fxInterpreterr   r!   r!   r!   r"   <module>   s*   
3