o
    Ih*.                     @   s   d dl Z d dlm  mZ d dlmZ d dlZdd Zdd Z	dd Z
d	d
 Zdd Zejdd Zdd Zdd Zdd Zdd Zdd ZdS )    N)
namedtuplec                    s   t   fdd}|S )Nc               
      s    drdj}|| i |S  ds drD dr#dnd}|dkr+dnd}|j}td| d d| d| d	 | i |S )	Nautogradsave_for_backwardbackwardzWe found a 'z' registration for  at z but were unable to find a 'z' registration. To use the CustomOp API to register a backward formula, please provide us both a backward function and a 'save for backward' function via `impl_backward` and `impl_save_for_backward` respectively.)	_has_impl	_get_implfunclocationRuntimeError)argskwargskernelmissingfoundlocautograd_fallback	custom_op M/var/www/vscode/kcb/lib/python3.10/site-packages/torch/_custom_op/autograd.pyinner   s"   
z*autograd_kernel_indirection.<locals>.inner)autograd_not_implemented)r   r   r   r   r   autograd_kernel_indirection   s   r   c                    s    fdd}|S )Nc                     s`   t  rtdd | |frtdt j   | i |W  d    S 1 s)w   Y  d S )Nc                 S   s   t | tjo| jS N)
isinstancetorchTensorrequires_gradxr   r   r   <lambda>4       z:autograd_not_implemented.<locals>.kernel.<locals>.<lambda>z.Autograd has not been implemented for operator)r   is_grad_enabledpytreetree_anyr   _C_AutoDispatchBelowAutograd)r   r   r   r   r   r   2   s   $z(autograd_not_implemented.<locals>.kernelr   )r   r   r   r(   r   r   1   s   r   c              	   C   s   |d ur_t |ts|f}n|}t|t|ksJ g }tt||D ]3\}\}}t |tjr6|s5|| q"t |trC|sB|	| q"|rUt
d| d| dt| dq"|ra| j|  d S d S d S )NzWith output_differentiability=z	. At idx z , we received an object of type za that is not a Tensor, so it cannot have be marked as differentiable in output_differentiability.)r   tuplelen	enumeratezipr   r   appendlistextendr   typemark_non_differentiable)ctxoutputoutput_differentiabilitytuple_outputnon_differentiable_tensorsidxdifferentiableoutr   r   r   r1   <   s8   



r1   c                    s    fdd}|S )Nc                     sp   t | \}d   fdd} fdd}tjd ||}|j| } d us0J t t| S )Nc                    s   |  d tt|}tj   | }W d    n1 s!w   Y  ttt	|}t|}||}t
| ||f t| | t|\}t|S )NT)set_materialize_gradsr$   tree_unflattenr.   r   r&   r'   namedtuple_argstree_mapr0   save_pytree_for_backwardr1   tree_flattenr)   )r2   	flat_argsr   r3   	args_infosave_for_backward_fn_inputsto_saveflat_output)op_overloadout_specr4   save_for_backward_fnschemaspecr   r   forwardh   s   



z9construct_autograd_kernel.<locals>.apply.<locals>.forwardc                    sf   d usJ t t|}t| \}}t }t|ts|f} ||g|R  }t|| t||S r   )	r$   r;   r.   unpack_savedobjectr   r)   validate_grad_inputs_dictgrad_inputs_dict_to_flat_tuple)r2   flat_grad_outputgradssavedrA   	inner_ctxgrad_inputs_dict)backward_fnr   rF   r   r   r   |   s   

z:construct_autograd_kernel.<locals>.apply.<locals>.backward	_customop)r$   r?   gen_autograd_function_opnameapplyr;   r.   )r   r@   rJ   r   generated_clsrD   rT   r   rE   r4   rG   rH   )rF   rI   r   rX   d   s   
z(construct_autograd_kernel.<locals>.applyr   )rH   r4   r   rE   rG   rT   rX   r   rZ   r   construct_autograd_kernel\   s   .r[   c                 C   s$   t | tjjft|t|d}|S )N)rJ   r   )r0   r   r   Functionstaticmethod)namerJ   r   rY   r   r   r   rV      s   rV   c                 C   s.   dd | j jD }t| jd }t||}|S )Nc                 S   s   g | ]}|j qS r   )r^   .0argr   r   r   
<listcomp>   r"   z'namedtuple_args_cls.<locals>.<listcomp>_args)	argumentsflat_allstrr^   r   )rH   attribsr^   	tuple_clsr   r   r   namedtuple_args_cls   s   
ri   c                 C   s   t |tsJ t| }|| S r   )r   r)   ri   )rH   r   rh   r   r   r   r<      s   r<   c                    s   fdd}t | ts|dt|   dd  jjjD }|  }||kr1|d| d| d |  D ]\}}t||}t |t	rt |t
t	fsW|d	| d
t| d t|t|ksp|d	| dt| dt|  tt||D ]6\}	\}
}|
d u rqwt |
tjs|d	| dt|
 d|	  t|tjs|d	| d|	 d|	 d|  qwq5|d u rq5t |tjs|dt| d| d t|tjs|d| d| d| d q5d S )Nc                    s&     d}td  d|j d|  )Nr   z%In the backward function defined for r   z using the CustomOp API, )r   r   r
   )whatr   
forward_opr   r   error   s   
z(validate_grad_inputs_dict.<locals>.errorzBexpected the output of the backward function to be a dict but got c                 S   s   h | ]
}|j  r|jqS r   )r0   is_tensor_liker^   r_   r   r   r   	<setcomp>   s    z,validate_grad_inputs_dict.<locals>.<setcomp>z3expected the returned grad_input dict to have keys z	 but got z. The backward function must return a gradient (can be None) for each arg to the CustomOp that may be a Tensor or Sequence[Tensor]. Args declared to be non-Tensor-like types should not appear in the grad_input dictzfor input 'zR' expected the grad_input dict to hold a list of gradients but got object of type .z1' expected the grad_input dict to hold a list of z gradients but got z\' expected the grad_input dict to hold a list of None or Tensor gradients but got object of z
 at index z(', got a Tensor as the gradient for the z(-th value but expected None because the z(-th value was not a Tensor (it was type zgot object of type z as the gradient for input 'z:', but expected the gradient to be either None or a Tensorz(got a Tensor as the gradient for input 'z3' but expected None as the gradient because input 'z ' was not a Tensor (it was type z).)r   dictr0   _schemard   re   keysitemsgetattrr.   r)   r*   r+   r,   r   r   
issubclass)rS   rl   rA   rm   expected_keysactual_keysr^   gradarg_infor7   ginfor   rk   r   rM      sv   











rM   c                 C   sV   g }|   D ]\}}|| vr|tdd | q|| |  qtt|S )Nc                 S   s   d S r   r   r   r   r   r   r!      s    z0grad_inputs_dict_to_flat_tuple.<locals>.<lambda>)_asdictrt   r-   r$   r=   r)   tree_leaves)rS   rA   resultr^   rz   r   r   r   rN      s   rN   c           	      C   s   t |\}}t|}dd t|D }dd t|D }dd |D }dd |D }|| _|| _| j|  || _|| _|| _	d S )Nc                 S   s    g | ]\}}t |tjr|qS r   r   r   r   r`   r7   thingr   r   r   rb          

z,save_pytree_for_backward.<locals>.<listcomp>c                 S   s    g | ]\}}t |tjs|qS r   r   r   r   r   r   rb      r   c                 S   s   g | ]
}t |tjr|qS r   r   r`   r   r   r   r   rb          c                 S   s   g | ]
}t |tjs|qS r   r   r   r   r   r   rb     r   )
r$   r?   r*   r+   rI   num_eltsr   tensor_idxssaved_non_tensorsnon_tensor_idxs)	r2   stuff
flat_stuffrI   r   r   r   tensorsnon_tensorsr   r   r   r>      s   

r>   c                 C   s^   d g| j  }t| j| jD ]\}}|||< qt| j| jD ]\}}|||< qt|| j}|S r   )	r   r,   saved_tensorsr   r   r   r$   r;   rI   )r2   r   tensorr7   
non_tensorr   r   r   r   rK     s   

rK   )r   torch.utils._pytreeutils_pytreer$   collectionsr   	functoolsr   r   r1   r[   rV   	lru_cacheri   r<   rM   rN   r>   rK   r   r   r   r   <module>   s     9
<