o
    I&i{                     @   s   d dl Z d dlZd dlZd dlmZ d dlZd dlmZ d dlm	Z	m
Z
 d dlmZ d dlmZ eeZdd Zd	d
 Zdd ZdejfddZdejfddZdS )    N)patchdisable)countersdefake)aot_module_simplified)_disable_current_modesc                     s   dt jjf fdd}|S )Ngmc              	      s  t drd  d< td d  d7  < d}|r-td td d  d7  < | S  fdd	}d
p;d  |d
< dpHd d< ddlm} ddlm} dd |kret	dd}nt
 }zI| ; |' t| |fi }td d  d7  < t|W  d    W  d    W S 1 sw   Y  W d    W d S 1 sw   Y  W d S  ty   td d  d7  <  w )Ndecompositionsaot_autogradtotal   Fz5Unable to use AOT Autograd because graph has mutationZnot_okc                     s   t t  | i |S Nr   )argskwargsbw_compiler HC:\wamp64\www\opt\env\Lib\site-packages\torch/_dynamo/backends/common.py_wrapped_bw_compiler   s   z?aot_autograd.<locals>.compiler_fn.<locals>._wrapped_bw_compilerr   fw_compilerZinference_compilerr   )nop)enable_aot_loggingz%functorch.compile.config.debug_assertTok)callablegetr   logdebugfunctorch.compiler   Ztorch._inductor.debugr   r   
contextlibnullcontextr   r   	Exception)r	   example_inputsZuse_fallbackr   r   r   Zpatch_configZcgr   r   r   compiler_fn   s8   
Xz!aot_autograd.<locals>.compiler_fn)torchZfxZGraphModule)r   r$   r   r#   r   r      s   .r   c                 C   s0   ddl m}m}m} |||d}| r||d< |S )Nr   )default_decompositions#min_cut_rematerialization_partition
ts_compile)r   r   Zpartition_fnr
   )r   r&   r'   r(   )Zuse_decompsr&   r'   r(   r   r   r   r   mem_efficient_fusion_kwargsA   s   r)   c                    s   t   fdd}|S )zg
    Decorator for backends that need real inputs.  We swap out fake
    tensors for zero tensors.
    c                    sJ   t   ttt|} | |fi |W  d    S 1 sw   Y  d S r   )r   listmapr   )modelZinputsr   fnr   r   wrapper[   s   $z(fake_tensor_unsupported.<locals>.wrapper)	functoolswraps)r.   r/   r   r-   r   fake_tensor_unsupportedU   s   r2   returnc                 C   "   | D ]}t |dr|j  S qd S )Ndevice)hasattrr5   r"   xr   r   r   device_from_inputsd   
   

r9   c                 C   r4   )Ndtype)r6   r;   r7   r   r   r   dtype_from_inputsj   r:   r<   )r   r0   loggingZunittest.mockr   r%   Ztorch._dynamor   Ztorch._dynamo.utilsr   r   Ztorch._functorch.aot_autogradr   Ztorch.utils._python_dispatchr   	getLogger__name__r   r   r)   r2   r5   r9   r;   r<   r   r   r   r   <module>   s    
2