o
    I&i                     @   s   d dl Z d dlZd dlZd dlZd dlZd dlZddlmZmZ ddl	m
Z
 eeZe
edddddZe jed	d
Ze jedd
Zdd Ze ddd ZdS )    N   )device_from_inputsfake_tensor_unsupported)register_backend N  )	schedulertrialsc             
      sL  dd l ddl m} ddlm} tj| |}t|}dd t|D }| | }	t	|	dkr6t
d | jS |j||\}
}|jdkrP|j}j }nd}jt }|d u rgtjdd }|d	krdd
l m} t }tj|s||
d ||\}}|D ]}t|j  qtd t	|dkr|!||}tj|s|dksJ |j"||#|gdd}z|$| W n t%y   tj|rt&|  w |'|* j(j)dddid |j*|
||d}W d    n1 sw   Y  W d    n	1 sw   Y  n||dkr\ddl m+} t, 6}|jdkr4jt  d|j-j.dd }|j/j0|
||dd|dd}|j/j1||
||d}W d    n	1 sVw   Y  n-|dksd|sj(j)dd  |j*|
||d}W d    n	1 sw   Y  nt2d!|3|d | d"d# fd$d% fd&d'}|S )(Nr   )relay)graph_executorc                 S   s    g | ]\}}d | |j fqS )inp_)shape).0idxi r   EC:\wamp64\www\opt\env\Lib\site-packages\torch/_dynamo/backends/tvm.py
<listcomp>   s     ztvm.<locals>.<listcomp>z0Explicitly fall back to eager due to zero outputcudaZTVM_SCHEDULERauto_scheduler)r   mainzNo tasksi  )Znum_measure_trialsZmeasure_callbacksZearly_stopping   z relay.backend.use_auto_schedulerT)	opt_levelconfig)targetparamsmeta_schedule)r   z --num-cores F)Zlogicalr   @   Zevolutionary)modr   work_dirZmax_trials_globalZnum_trials_per_iterr   Zstrategy)databaser   r   r   default
   )r   zThis tuning option is invalid/not implemented for torchdynamo's TVM-related backend. There are three available options: default, auto_scheduler and meta_schedule.c                 S   s*   | j dkrt|  S tjj|  S )z8A helper function to transfer a NDArray to torch.tensor.bool)dtypetorchZ
from_numpynumpyutilsZdlpackfrom_dlpackZ	to_dlpack)Z	nd_tensorr   r   r   to_torch_tensorp   s   
ztvm.<locals>.to_torch_tensorc                    s,   | j tjkr j|   S  j| S )z8A helper function to transfer a torch.tensor to NDArray.)r#   r$   r"   ndarraycpur%   r'   )Ztorch_tensor)tvmr   r   to_tvm_tensory   s   ztvm.<locals>.to_tvm_tensorc                     s   dd | D }   \}}dd | D }t|dD ])\}}| dkrD|jr,| }d| }||vr<td| q || q 	   fddt
  D S )	Nc                 S   s   g | ]}|  qS r   )
contiguous)r   ar   r   r   r          z)tvm.<locals>.exec_tvm.<locals>.<listcomp>c                 S   s   h | ]\}}|qS r   r   )r   name_r   r   r   	<setcomp>   r0   z(tvm.<locals>.exec_tvm.<locals>.<setcomp>r   r   z6input %s skipped as not found in tvm's runtime libraryc                    s   g | ]	}  |qS r   )Z
get_output)r   r   )mr(   r   r   r      s    )Zget_input_infoitems	enumeratedimZrequires_graddetachlogwarningZ	set_inputrunrangeZget_num_outputs)Zi_argsargsZ
shape_infor2   Zactive_inputsr   argZinp_name)r4   r(   r-   r   r   exec_tvm   s*   
ztvm.<locals>.exec_tvm)4r,   r	   Ztvm.contribr
   r$   Zjittracer   r6   lenr9   r:   forwardZfrontendZfrom_pytorchtyper   indexr   r+   ZTargetllvm_targetosenvirongetr   tempfileNamedTemporaryFilepathexistsZextract_tasksprintZcompute_dagZTaskSchedulerZTuningOptionsZRecordToFileZtune	ExceptionunlinkZApplyHistoryBestZ	transformZPassContextbuildr   TemporaryDirectoryr&   	cpu_countZrelay_integrationZ
tune_relayZcompile_relayNotImplementedErrorZGraphModule)ZgmZexample_inputsr   r   r	   r
   Zjit_modZdeviceZ
shape_listZexample_outputsr   r   devr   r   Zlog_filetasksZtask_weightstaskZtunerZtune_optionlibmsr   r   r?   r   )r4   r(   r-   r,   r   r,      s   








		r,   r   )r   r   c                   C   s&   zt d W dS  ty   Y dS w )Nr,   TF)	importlibimport_moduleImportErrorr   r   r   r   has_tvm   s   
r\   c                   C   s   dt d v r
dS dS )NZavx512z/proc/cpuinfozllvm -mcpu=skylake-avx512zllvm -mcpu=core-avx2)openreadr   r   r   r   rE      s   rE   )	functoolsrY   loggingrF   rI   r$   commonr   r   registryr   	getLogger__name__r9   r,   partialZtvm_meta_scheduleZtvm_auto_schedulerr\   	lru_cacherE   r   r   r   r   <module>   s$    
 
