o
    0hc                     @   s8  d dl Z d dl mZ d dlZd dlmZ d dlmZmZmZ d dl	m
Z d dlmZ d dlmZmZ d dlm  mZ d dlmZ d dlZd	d
 Zdd Zdd Zdd Zdd Zd0ddZdd Zdd Zdd Z dd Z!dd Z"d1d!d"Z#d#d$ Z$d%d& Z%d'd( Z&d2d)d*Z'		d3d+efd,d-Z(d2d+efd.d/Z)dS )4    N)Tensor)TorchDispatchMode)tree_maptree_flattentree_unflatten)_pytree)partial)no_dispatchall_same_mode)Callablec                 C   s2   | j }|| }||}||krd S td| d)Nz.This operator is not Composite Compliant: the zR of the tensor was modified directly without going through the PyTorch dispatcher.)elemRuntimeError)wrapper_tensormetadata_namemetadata_accessorr   metadata_wrapper_tensormetadata_elem r   `/var/www/vscode/kcb/lib/python3.10/site-packages/torch/testing/_internal/composite_compliance.pycheck_attr_consistency   s   r   c                 C   sR   t | |sd S tjdd dd tjtjtjd}| D ]
\}}t| || qd S )Nc                 S      | j S N)dtypexr   r   r   <lambda>"       z,check_metadata_consistency.<locals>.<lambda>c                 S   r   r   )devicer   r   r   r   r   #   r   )shaper   r   numelstridestorage_offset)
isinstancer   sizer   r    r!   itemsr   )r   CCTthings_to_checkr   r   r   r   r   check_metadata_consistency   s   
r'   c                 C      | j jdv S )N>"   timagrealview_conjaliaschunkslicesplitdetachexpandhsplitnarrowselectunbindunfoldvsplitmovedimpermutesqueezeview_asdiagonalswapaxes	expand_as	transpose	unflatten	unsqueeze
as_strided_unsafe_viewtensor_splitview_as_real_reshape_aliasview_as_complexsplit_with_sizesoverloadpacket__name__funcr   r   r   
is_view_fn+      rP   c                 C   r(   )N>   t_detach_squeeze_	swapaxes_	swapdims_
transpose_
unsqueeze_as_strided_rK   rN   r   r   r   is_inplace_view_fnS   rQ   rZ   c                 C   s4   | j j}td|rdS td|rdS |d dkS )Nz__i.+__Tz__.+__F_)rL   rM   rematch)rO   namer   r   r   
is_inplacea   s   r`   Tc                    s6   G  fdddt j G  fdddt} | fS )Nc                       sD   e Zd ZU ejed< dgZedd Zdd Z	e
d
 fdd		ZdS )z7generate_cct_and_mode.<locals>.CompositeCompliantTensorr   c              
   _   s   t || us
J dtjj| | |j|j|j|j|	 |
 d}|jrItjd|j|j|jdd}|j|  |
 | |	 d ||_n||_|	 |j	 ksWJ tj||j  tj||j  ||_|S )NzRWrapping a CompositeCompliantTensor in a CompositeCompliantTensor is not supported)r   layoutr   requires_gradstridesr!   r   F)r   r   ra   rb   )sourcer!   r#   r    )typetorchr   _make_wrapper_subclassr#   r   ra   r   rb   r    r!   emptyset_untyped_storagecloner   _C	_set_conjis_conj_set_negis_negmode)clsr   rq   argskwargsrtmpr   r   r   __new__y   s<   	
z?generate_cct_and_mode.<locals>.CompositeCompliantTensor.__new__c                 S   s   d| j  dS )NzCompositeCompliantTensor())r   selfr   r   r   __repr__   s   z@generate_cct_and_mode.<locals>.CompositeCompliantTensor.__repr__r   Nc                    st   t j|i |pi }t fdd|D }t|std|d  ||i |W  d    S 1 s3w   Y  d S )Nc                 3   s     | ]}t | r|jV  qd S r   )r"   rq   .0eCompositeCompliantTensorr   r   	<genexpr>   s    z]generate_cct_and_mode.<locals>.CompositeCompliantTensor.__torch_dispatch__.<locals>.<genexpr>z*Multiple CompositeCompliantTensorModes NYIr   )pytreearg_tree_leavestupler
   r   )rr   rO   typesrs   rt   all_argsmodesr   r   r   __torch_dispatch__   s   
$zJgenerate_cct_and_mode.<locals>.CompositeCompliantTensor.__torch_dispatch__r   N)rM   
__module____qualname__rf   r   __annotations__	__slots__staticmethodrw   r{   classmethodr   r   r   r   r   r   t   s   
 

/r   c                       s   e Zd Zd fdd	ZdS )z;generate_cct_and_mode.<locals>.CompositeCompliantTensorModer   Nc                    s  fdd} fdd}|t jjjjkrtd|jjdv r't|j dt|rL|d }t	|sLt
fd	d
|dd  D rLtd|j dt||}t||}	||i |	}
t||
}t|rrt + ||i |}t	|ttfrt||D ]	\}}|| qn|| W d    n1 sw   Y  t  t|r||i | W d    n1 sw   Y  ttd}t|| t|| t|| |S )Nc                       t |  r| jS | S r   r"   r   r~   r   r   r   unwrap      z^generate_cct_and_mode.<locals>.CompositeCompliantTensorMode.__torch_dispatch__.<locals>.unwrapc                       t | tjr | S | S r   r"   rf   r   r   )r   rz   r   r   wrap      z\generate_cct_and_mode.<locals>.CompositeCompliantTensorMode.__torch_dispatch__.<locals>.wrapz.item() is not allowed to be called inside of composite functions in the PyTorch library because not all backends and/or Tensor subclasses (e.g. vmap, ProxyTensor) support them.)ri   resize_z; is not allowed to be called inside of Composite operators.r   c                 3   s    | ]}t | V  qd S r   )r"   r}   ar   r   r   r      s    zagenerate_cct_and_mode.<locals>.CompositeCompliantTensorMode.__torch_dispatch__.<locals>.<genexpr>   z7Not composite compliant: performing in-place operation z where the Tensor being written to is regular Tensor but the other tensors are Tensor Subclasses. Please try to avoid this in-place operation.r%   )rf   opsaten_local_scalar_densedefaultr   rL   rM   r`   r"   anyr   rP   r	   r   listzipri   rZ   r   r'   r   	tree_map_)rz   rO   r   rs   rt   r   r   mutated_argumentunwrapped_argsunwrapped_kwargsunwrapped_rsrsresultr   bcheckr   autograd_view_consistencyry   r   r      sX   





zNgenerate_cct_and_mode.<locals>.CompositeCompliantTensorMode.__torch_dispatch__r   )rM   r   r   r   r   r   r   r   CompositeCompliantTensorMode   s    r   )rf   r   r   )r   r   r   r   r   generate_cct_and_modej   s   
A
Mr   c                 C   sd   t | tst | tsdS t| dkrdS tdd | D }|r!dS tdd | D }|r0tddS )NFr   c                 s       | ]	}t |tjV  qd S r   r   r}   eltr   r   r   r   	      z is_tensorlist.<locals>.<genexpr>Tc                 s   r   r   r   r   r   r   r   r     r   zVThis test assumes that PyTorch APIs cannot take mixed lists of Tensor and other things)r"   r   r   lenallr   )lstall_tensorsexists_one_tensorr   r   r   is_tensorlist  s   r   c                 C   s   |r| |S |S r   r   )fn
should_mapargr   r   r   	maybe_map  s   r   c                    s:   t | tjr | S t| r fdd| D S td)Nc                    s   g | ]} |qS r   r   r   r%   cct_moder   r   
<listcomp>      zwrap.<locals>.<listcomp>z*wrap assumes that the input can be wrapped)r"   rf   r   r   r   )r   r%   r   r   r   r   r     s
   
r   c                 #   sV    dd | D }dd |D }t j| D ]} fddt|| D }||fV  qd S )Nc                 S   s    g | ]}t |tjpt|qS r   )r"   rf   r   r   r}   r   r   r   r   r   ,       z-generate_subclass_choices.<locals>.<listcomp>c                 S   s   g | ]}|r
d dgnd gqS )FTr   )r}   is_tensor_liker   r   r   r   -  s    c                    s&   g | ]\}}t tt d ||qS )r   )r   r   r   )r}   should_wrap_argr   r   r   r   r   1  s    )	itertoolsproductr   )	flat_argsr%   r   is_tensor_likessubclass_optionswhich_args_are_wrappedr   r   r   r   generate_subclass_choices*  s   r   c                 c   s    t |\}}t| t| }t|||D ]1\}}|d t|  }	t|t| d  |}
|d t|  }t|t| d  |}|	|
||fV  qd S r   )r   r   r   r   r   )rs   rt   r%   r   flat_kwargsspecflat_args_kwargschoicedebug_metadatanew_args
new_kwargsr   which_kwargs_are_wrappedr   r   r   %generate_subclass_choices_args_kwargs9  s   r    c                 C   s   t d| d| )Nz8Composite compliance check failed with the above error.
a(  If you are adding an OpInfo of an existing operator, please feel free to skip this test because the problem was pre-existing and file an issue. Otherwise, if you added a new operator, please read through the Composite Compliance section in aten/src/ATen/native/README.md for how to resolve this. )r   )erradditional_infor   r   r    raise_composite_compliance_errorE  s   
r   c                    s   t  \ }| |i |}t|| |D ]>}|\}}}	}
z	| |i |}W n tyB } zt|d|	 d|
 d W Y d }~nd }~ww  fdd}|t||| qd S )N- wrapped_args: 
- wrapped_kwargs: 
c                    r   r   r   r   r   r   r   r   w  r   z&check_all_permutations.<locals>.unwrap)r   r   r   r   r   )oprs   rt   assert_equal_fnr   expectedr   r   r   r   r   actualr   r   r   r   r   check_all_permutations[  s$   
r   c           	   
      s   t  \  fdd}| |i |}t||}t||}z | |i |}W d    n1 s2w   Y  W n tyN } z
t| W Y d }~nd }~ww  fdd}|t||| d S )Nc                    r   r   r   r   r   r   r   r     r   zcheck_with_mode.<locals>.wrapc                    r   r   r   r   r   r   r   r     r   zcheck_with_mode.<locals>.unwrap)r   r   r   r   )	r   rs   rt   r   r   r   r   r   r   r   r   r   check_with_mode  s    


r   c                 C   sP   g }t | \} }t |\}}| | } | D ]}t|tjsq|jr%|| q|S r   )r   r"   rf   r   rb   append)rs   rt   leaf_tensors
_args_spec_kwargs_specr   r   r   r   gather_leaf_tensors  s   
r   c           
      C   s   |d u r| |i |}n|| g|R i |}|d ur||}t |}dd |D }dd |D }t|dks:J dd |D }t||}	t|	dksNJ tjj||	|dddS )Nc                 S      g | ]
}t |tjr|qS r   r   r}   ru   r   r   r   r         z*compute_expected_grads.<locals>.<listcomp>c                 S      g | ]}|j r|qS r   rb   r   r   r   r   r     r   r   c                 S   "   g | ]}t j|j|j|jd qS )r   r   rf   onesr   r   r   r   r   r   r   r        " Tallow_unusedretain_graph)r   tree_leavesr   r   rf   autogradgrad)
r   rs   rt   output_process_fn_gradgradcheck_wrapperresultsflat_resultsflat_diff_resultsgradsr   r   r   r   compute_expected_grads  s   


r   r   c                    s  t  \ }t| ||||}t|| |D ]}|\}	}
}}t|	|
}t|dks)J z!|d u r6| |	i |
}n|| g|	R i |
}|d urI||}W n tyh } zt|d| d| d W Y d }~nd }~ww t|}dd |D }dd |D }t|dksJ dd |D }t	| |D ]E\}}zt
jj|||d	d	d
}W n! ty } zt|d| d| d| d W Y d }~nd }~ww  fdd}|tt|||d	d qqd S )Nr   r   r   r   c                 S   r   r   r   r   r   r   r   r     r   z*check_backward_formula.<locals>.<listcomp>c                 S   r   r   r   r   r   r   r   r     r   c                 S   r   r   r   r   r   r   r   r     s    Tr   z
- wrapped_grads: c                    r   r   r   r   r   r   r   r     r   z&check_backward_formula.<locals>.unwrap	equal_nan)r   r   r   r   r   r   r   r   r   r   rf   r   r   r   map)r   rs   rt   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   flat_new_gradswhich_grad_is_batchedr   r   r   r   r   check_backward_formula  sb   



r  c                    s  t dd\ } fddtfdd|D }t|\}}tfdd|D }	t|	|}
t  dd	 fd
d}|||||
}ttj|}tdd |}tdd |}t|| |D ]}|\}}}}t||
 |D ]}|\}}}tt	t
||}fdd| D }zd u r|i |}ng|R i |}W n$ ty } zt|d| d| d| d| d	 W Y d }~nd }~ww  fddttj|}tfdd|}tfdd|}|||dd |||dd qpqaW d    d S 1 sw   Y  d S )NF)r   c                    sF   t |  usJ t| tjr| jrt| S t| r!dd | D S d S )Nc                 S   s    g | ]}|j rt|nd qS r   )rb   rf   
randn_liker|   r   r   r   r     r   zCcheck_forward_ad_formula.<locals>.maybe_tangent.<locals>.<listcomp>)re   r"   rf   r   rb   r  r   )r)   r   r   r   maybe_tangent  s   
z/check_forward_ad_formula.<locals>.maybe_tangentc                 3       | ]} |V  qd S r   r   r   r  r   r   r         z+check_forward_ad_formula.<locals>.<genexpr>c                 3   r	  r   r   r   r
  r   r   r     r  c                 S   sN   | \}}t |tjr|jrt| |S t|r%tdd t	||D S |S )Nc                 s   s0    | ]\}}|d urt | |n|V  qd S r   )fwAD	make_dualr2   )r}   pritangr   r   r   r     s    $zDcheck_forward_ad_formula.<locals>.maybe_make_dual.<locals>.<genexpr>)
r"   rf   r   rb   r  r  r2   r   r   r   )dualprimaltangentr   r   r   maybe_make_dual  s   z1check_forward_ad_formula.<locals>.maybe_make_dualc                    sX   t tt| |} fdd| D }d u r!|i |S g|R i |S )Nc                    "   i | ]\}}| || fqS r   r   r}   kv)r  tangent_kwargsr   r   
<dictcomp>#  r   zKcheck_forward_ad_formula.<locals>.compute_expected_grad.<locals>.<dictcomp>)r   r  r   r$   )rs   tangent_argsrt   r  op_args	op_kwargs)r   r  r   )r  r   compute_expected_grad!  s
   z7check_forward_ad_formula.<locals>.compute_expected_gradc                 S   r   r   r  r   r   r   r   r   +  r   z*check_forward_ad_formula.<locals>.<lambda>c                 S   r   r   r  r   r   r   r   r   ,  r   c                    r  r   r   r  )r  new_tang_kwargsr   r   r  8  r   z,check_forward_ad_formula.<locals>.<dictcomp>r   r   z
- wrapped_tangent_args: z
- wrapped_tangent_kwargs: r   c                    r   r   r   r   r   r   r   r   I  r   z(check_forward_ad_formula.<locals>.unwrapc                    
    | j S r   r  r   r   r   r   r   M     
 c                    r!  r   r  r   r"  r   r   r   N  r#  Tr  )r   r   r   r   r  
dual_levelr   unpack_dualr   r  r   r$   r   r   )r   rs   rt   r   r   r   r  r   r   flat_tangent_kwargsr  r  r   expected_primalsexpected_tangentsr   r   r   r   r   tang_choicenew_tang_argswhich_tang_args_are_wrappedwhich_tang_kwargs_are_wrappedr  r  r   r   actual_primalsactual_tangentsr   )r%   r   r  r  r   r   r   r   check_forward_ad_formula  s`   


	$r/  )T)r   )NN)NNN)*rf   r   r   torch.utils._python_dispatchr   torch.utils._pytreer   r   r   torch.utilsr   r   	functoolsr   torch.utils._mode_utilsr	   r
   torch.autograd.forward_adr   
forward_adr  typingr   r]   r   r'   rP   rZ   r`   r   r   r   r   r   r   r   r   r   r   r   r  r/  r   r   r   r   <module>   s@   (
	 
-
<