o
    <&izt                     @   s  d dl Z d dlmZ d dlmZ d dlmZmZmZm	Z	m
Z
mZ d dlm  mZ d dlmZmZmZmZmZmZ d dlmZ d dlmZmZmZmZmZmZmZm Z  d dl!m"Z" d d	l#m$Z$ d d
l%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z. dede/fddZ0dede/fddZ1dedeeef de/de/fddZ2dede/fddZ3dede
e/ee f fddZ4eddG dd deZ5eddG dd de5Z6eddG d d! d!Z7G d"d# d#Z8eddG d$d% d%Z9d&eee/ef  d'e5dee/ fd(d)Z:dS )*    N)ABC)	dataclass)AnyDictListOptionalTupleUnion)	getValueTisValueTypeLazyArgumentLazyIrPropertiesLazyIrSchematensorListValueT)	translate)	BaseCTypeBindingdeviceTDispatcherSignaturekernel_signatureNativeSignatureOptionalCTypeVectorCType)method_with_native_function)ts_lowering_body)	ArgumentBackendIndexBackendMetadataBaseTyBaseTypeFunctionSchemaListTypeNativeFunctionNativeFunctionsGroupargreturnc                 C   s  t | jrbt| jtr2| jrd| j S | jjtu r!d| j dS | jr+d| j dS d| j dS t| jt	rY| jrE| j d| j dS | jrNd| j S d| j d	| j d
S t
d| j dt| jtr| jjttjkr| jr{d| j dS d| j d| j dS t| jtrt| jjtrd| jjj d| j d| j dS t| jt	rt| jjtrt| jjjtrd| jjjj d| j dS | j S )z
    Given a LazyArgument,
    generate a c++ string for materializing an rvalue of that arg for passing into
    a lazy Node constructor.
    Znode_lazy_Z_tensorlistzGetSymIntValue()z->GetIrValue()z& ? c10::make_optional(GetSymIntValue(*z)) : c10::nulloptz ? c10::make_optional(lazy_z->GetIrValue()) : c10::nullopt=TODO not sure if there are other valid types to handle here (zGetSymIntArrayRefValue(zstd::vector<int64_t>(z
.begin(), z.end())std::vector<>(ztorch::lazy::ToOptionalVector<)r   	lazy_type
isinstancer   is_wrapped_scalarnametyper   is_symint_or_listr   AssertionError	orig_typer!   elemr   r   ZSymIntsymintr   )r$    r5   @C:\wamp64\www\opt\env\Lib\site-packages\torchgen/dest/lazy_ir.pynode_ctor_arg_rvalue_string)   sN   

"
r7   schemac                 C   s   dd |   D }d|S )zg
    Produce a formatted string with the arguments as passed into the constructor of a node class.
    c                 S   s   g | ]}t |qS r5   )r7   .0r$   r5   r5   r6   
<listcomp>h   s    z$node_ctor_inputs.<locals>.<listcomp>, )filtered_argsjoin)r8   Znode_ctor_valuesr5   r5   r6   node_ctor_inputsd   s   
r?   sigoverload_namec                 C   sz   t | j}t| | }ddd |D }t|r(d| j d| d}nd| j d}dt|  d	| d
| dS )zK
    Generate code that falls back to eager conditioned on a predicate
    z,
                c                 S      g | ]}|j qS r5   exprr:   ar5   r5   r6   r;   x       z%gen_fallback_code.<locals>.<listcomp>z	ATEN_OP2(r<   r'   zATEN_OP(z"
        if (force_eager_fallback(zQ)) {
            return at::native::call_fallback_fn_symint<&ltc_eager_fallback, z>::call(
                z
            );
        }
)	r   from_schemafuncr   	argumentsr>   len	aten_nameaten_symbol)r8   r@   rA   dispatcher_sigexprsZfallback_argsZaten_op_strr5   r5   r6   gen_fallback_coden   s   rP   c                 C   s<   dh}| j |v rd| j  dS | j dsd| j  S | j S )NZsigmoid_backwardz#c10::Symbol::fromQualString("aten::z")zat::z
at::aten::)rL   
startswith)r8   Zmissing_interned_stringsr5   r5   r6   rM      s   
rM   c                 C   s   g }g }|   D ]/}t|jtr2|jj r2|j d}|d| d|j d ||| q|| qd	|}||fS )N_metazauto z = to_meta();	
        )
rJ   r,   Zargumentr   r/   Zis_tensor_liker.   append	with_namer>   )r@   contextZunwrapped_tensor_argsr$   Zunwrapped_nameZunwrap_tensor_args_strr5   r5   r6   convert_to_meta_tensors   s   
rX   T)frozenc                   @   s   e Zd ZU eed< eed< eed< eed< edee	e
f dee fddZd	edefd
dZd	ededefddZd	ededefddZd	edefddZd	edee fddZdS )	GenLazyIRbackend_indexbackend_name	node_baseuse_lazy_shapefr%   c                 C   sV   t |tr	|jjn|j}| jt |tr|jn|}t||d uo#| d}| |S )Nr4   )	r,   r#   Z
functionalrI   r[   
get_kernelr   supports_symintgen)selfr_   rI   metadatar8   r5   r5   r6   __call__   s   
zGenLazyIR.__call__r8   c                 C      dS N r5   )rd   r8   r5   r5   r6   lowering_function      zGenLazyIR.lowering_functionnode_ctor_argsc                 C   rg   rh   r5   rd   r8   rl   r5   r5   r6   create_function   rk   zGenLazyIR.create_functionc                 C   s   d| dS )Nbool CanBeReused(z!) const {
    return false;
    }r5   rm   r5   r5   r6   can_be_reused_function      z GenLazyIR.can_be_reused_functionc           
      C   sp  |j ddd}g }|D ]*}t|jttfr||j  qt|jtr-||j d qtd|j dd	|}|j ddd}|j
jrId}nL|j
jrld	d
 |D }|dd |D  d|j dd	| d}n)|j
jrdd
 tt|D }|dd |D  d|j dd	| d}nd}d	dd |D }	| j d|j d| d| dt|j d|	 dS )NTFvaluesZscalars.value_or(kNullValue)zUnsupported type (z) - add support if necessaryr<   zstd::move(shapes),c                 S   rB   r5   r.   rE   r5   r5   r6   r;      rG   z1GenLazyIR.node_base_ctor_call.<locals>.<listcomp>c                 s       | ]}|j V  qd S Nru   rE   r5   r5   r6   	<genexpr>       z0GenLazyIR.node_base_ctor_call.<locals>.<genexpr>Zcompute_shape_(z),c                 S      g | ]}d | dqS )zoperand(r'   r5   r:   ir5   r5   r6   r;          c                 s   rv   rw   ru   rE   r5   r5   r6   rx      ry   z[&](){ return compute_shape_z)[0]; },ri   c                 s   s    | ]}|j  V  qd S rw   ru   rE   r5   r5   r6   rx      s    z(
              z&::ClassOpKind(),
              OpList{z},
              z!
              /* num_outputs */ z#,
              torch::lazy::MHash()))r=   r,   r+   r   r   rU   r.   r   r1   r>   
propertiesShapePrecomputeZShapeComputeextend
ShapeCacherangerK   r]   	node_namereturns)
rd   r8   
value_argsZbase_ctor_value_args_listr$   Zbase_ctor_value_argsscalar_argsZshape_ctor_argZ
shape_argsZscalar_hashesr5   r5   r6   node_base_ctor_call   sF   
zGenLazyIR.node_base_ctor_callc                 C   s  |j pt|}| }|jddd}|jddd}dd |D }d|}| jr1|jjr1|d d|}dd	d |D }	t|	rId|	 }	d
dd |D }
dd |jdddD }d
dd |D }ddd |D }g }|D ]7}t	|j
tr|j d}|jrd}|d|j d|j d| d|j d	 qw|d|j d|j d qwd|}d|j d| j d| d|j d| d| | |	 d| d | j d!| d"| || d#| || d#| | d#|
 d
| d$gS )%NTFrr   c                 S   s$   g | ]}d |j   d|j qS )zconst z& r+   Zcpp_typer.   r|   r5   r5   r6   r;      s   $ z!GenLazyIR.gen.<locals>.<listcomp>r<   z(std::vector<torch::lazy::Shape>&& shapesz
,
        c                 S   sJ   g | ]!}|j  d kr|j d|j d|j dn	|j d|j dqS )c10::optional<c10::string_view>rz   z/.has_value() ? c10::make_optional(std::string(*z)) : c10::nullopt)r'   r   rE   r5   r5   r6   r;      s    z
  c                 S   s\   g | ]*}|j  d krd|j dn|j  dkr d|j dn|j   d|j dqS )zc10::string_viewzstd::string ;r   zc10::optional<std::string>  r   rE   r5   r5   r6   r;   
  s    c                 S   s   g | ]}t |jtr|jqS r5   )r,   r+   r   r.   r9   r5   r5   r6   r;     s    
c                 S   r{   )z	bool has_z: 1;r5   r:   valuer5   r5   r6   r;     r~   z
    c                 S   s   g | ]}d | d| dqS )Zhas_z = !!r   r5   r   r5   r5   r6   r;     s    z.value()z"torch.Generator()"zif (z.has_value()) {
      ss << ", z=" << z;
    } else {
      ss << ", z=null";
    }z	ss << ", r   zclass z
 : public zX {
 public:
  static torch::lazy::OpKind ClassOpKind() {
    return torch::lazy::OpKind(z
);
  }

  rz   z
)
      : z	
  {
    zT
  }

  std::string ToString() const override {
    std::stringstream ss;
    ss << z::ToString();
    z
    return ss.str();
  }

  z

  z

};

)opkindrM   r=   r>   r^   r   r   rU   rK   r,   r+   r   r.   Zis_generatorr   r]   r   rn   rp   rj   )rd   r8   r   all_argsr   r   Z	ctor_argsZreuse_ctor_argsrl   Zscalar_initializersZscalar_declsZoptional_valuesZhas_optional_declsZhas_optional_defsZmembers_to_stringr$   r   Zmembers_to_string_strr5   r5   r6   rc      s   


	






zGenLazyIR.genN)__name__
__module____qualname__r   __annotations__strboolr   r	   r#   r"   r   rf   r   rj   rn   rp   r   rc   r5   r5   r5   r6   rZ      s   
  *rZ   c                   @   sJ   e Zd ZdedefddZdededefddZdededefdd	Zd
S )GenTSLazyIRr8   r%   c                 C   s6   d}|j jr| dS |j jr| dt| dS dS )Nz
  torch::lazy::TSOpVector Lower(
      std::shared_ptr<torch::jit::GraphFunction> function,
      torch::lazy::TSLoweringContext* loctx) const overrider   z {
    z
  }
            ri   )r   LowerDeclOnlyZLowerr   )rd   r8   	signaturer5   r5   r6   rj   U  s   
zGenTSLazyIR.lowering_functionrl   c                 C   s<   d| d}|j jr| dS |j jsdS | d|j dS )Nzstatic NodePtr Create(r'   r   ri   z {
    return ReuseOrMakeNode<z>(data);
  })r   ZCreateFnDeclOnlyZCreateFnr   )rd   r8   rl   r   r5   r5   r6   rn   e  s   
zGenTSLazyIR.create_functionc                 C   s  d| d}|j jr| dS |j jsdS g }t|j|jD ]}t|jt	r2|
d|j d q|
d|j  qt|j|jD ]4}t|jt	rk|
d|j d	|j d
|j d|j d|j d|j d qD|
d|j d|j  qDd|}| d| dS )Nro   z) constr   ri   znullable_operand(i++) == rt   zoperand(i++) == z	((!this->z&&!z) || (this->z&&z && *(this->z) == *r   zthis->z == z &&
        z! {
    size_t i = 0;
    return (z);
  })r   ZCanBeReusedDeclOnlyCanBeReused	itertoolschainZpositional_valuesZkeyword_valuesr,   r+   r   rU   r.   Zpositional_scalarsZkeyword_scalarsr>   )rd   r8   rl   r   Zvalue_comparisonr$   Zvalue_comparison_strr5   r5   r6   rp   o  s,   
4
z"GenTSLazyIR.can_be_reused_functionN)r   r   r   r   r   rj   rn   rp   r5   r5   r5   r6   r   S  s    
r   c                
   @   sb  e Zd ZU eed< eed< eed< eed< eed< eed< eed< eed< eed	< eed
< eed< eed< eed< eed< eed< dededefddZ	dedede
deeef def
ddZdededefddZdededefddZdededefddZdededefdd Zd)d"ee defd#d$Zdededefd%d&Zededee fd'd(Zd!S )*GenLazyNativeFuncDefinitionclass_method_namer[   tensor_classgen_forced_fallback_codebackend_namespaceget_tensorlistget_tensor_or_wrap_numbertry_get_tensormetrics_countercreate_tensorcreate_from_first_tensorcreate_aten_from_ltc_tensortuple_aten_from_ltc_tensorslazy_tensor_ptrget_device_fnrI   r8   r%   c                 C   sl  |j ddd}g }|D ]}|jr8t|jtr)|d|j d|j d|j d q|d|j d|j d	 q|jr<qt|jtry|jj	t
u r_|d
|j d| j d| j d|j d	 q|| j d|j d| j d| j d|j d	
 qt|jtr|jjtt ksJ |jj|| j d|j d| j d| j d|j d
 qtd|j dd|S )NTFrr   z
auto node_z = z ?
                c10::make_optional(torch::lazy::LazyGraphExecutor::Get()->
                    GetIrValueForScalarFromCodegen(*z1, *common_device)):
                c10::nullopt;zf = torch::lazy::LazyGraphExecutor::Get()->
                            GetIrValueForScalarFromCodegen(z, *common_device);z
auto lazy_z_tensorlist = ::rz   rS   z lazy_z.value_or(at::Tensor()));r(   r'   rT   )r=   r-   r,   r+   r   rU   r.   r0   r   r/   r   r   r   r   r   r3   r
   r   r1   r>   )rd   rI   r8   r   lazy_tensor_declsr$   r5   r5   r6   r     sh   


z-GenLazyNativeFuncDefinition.lazy_tensor_declsre   r@   c                 C   s   | j rt|||jjjdS dS )N)rA   ri   )r   rP   rI   r.   rA   )rd   rI   r8   re   r@   r5   r5   r6   force_eager_fallback  s
   z0GenLazyNativeFuncDefinition.force_eager_fallbackc                 C   s   | j  dS )Nr   )r   )rd   rI   r8   r5   r5   r6   metrics  rq   z#GenLazyNativeFuncDefinition.metricsc                    s   |j ddd}|j ddd}dd |D }ttt  fdd|D }t|dks4t|dks4J d| j d	d
||  d}d| dS )NTFrr   c                 S      g | ]	}|j s|j qS r5   r-   r.   rE   r5   r5   r6   r;         z:GenLazyNativeFuncDefinition.get_device.<locals>.<listcomp>c                    s   g | ]
}|j  kr|jqS r5   )r+   r.   rE   Zoptional_devicer5   r6   r;     s    r   z*Expected at least one Value or Device typerz   r<   r'   zauto common_device = z8;
        TORCH_INTERNAL_ASSERT(common_device);
        )r=   r   r   r   rK   r   r>   )rd   rI   r8   r   r   value_types_namesZoptional_devicesZget_device_strr5   r   r6   
get_device  s   
z&GenLazyNativeFuncDefinition.get_devicec              
      s  | j |}|d usJ | }t|j}d|jv }|jp!|jd u}|s&|rd}|dkrIdtdt	fdd d
 fd	d
t|D }	d|	 d }t|j}
t|
\}}dd
 t||
 ddD }|rm|jsjJ d}nd}|j}|j r| r|d7 }d| d| d| dd
| d| 
}nt|j|| d}d|j d}|d| d7 }dt	|j }|dd
d d! |D  d"| d#7 }|S )$N	view_copyzl
std::vector<torch::lazy::Shape> shapes{torch::lazy::Shape(out_meta.scalar_type(), out_meta.sizes().vec())};   r}   r%   c                 S   s   d|  d|  dS )Nztorch::lazy::Shape(std::get<z$>(out_meta).scalar_type(), std::get<z>(out_meta).sizes().vec())r5   )r}   r5   r5   r6   
this_shape  s   z?GenLazyNativeFuncDefinition.shape_inference.<locals>.this_shape,c                    s   g | ]} |qS r5   r5   r|   r   r5   r6   r;         z?GenLazyNativeFuncDefinition.shape_inference.<locals>.<listcomp>z'std::vector<torch::lazy::Shape> shapes{z};c                 S   rB   r5   rC   )r:   er5   r5   r6   r;     s    F)methodZ&compositeexplicitautogradnonfunctionalmetaZ_symintz        z
        auto out_meta = at::r   rz   r<   z);
        r`   z
            auto shapes = r   z4
            TORCH_INTERNAL_ASSERT(shapes.size() == rS   zaten::zq
            if(torch::lazy::symbolicShapeEnabled()){
                std::vector<torch::jit::IValue> inputs = { c                 s   s    | ]}t |jV  qd S rw   )r   r.   rE   r5   r5   r6   rx   1  s    z>GenLazyNativeFuncDefinition.shape_inference.<locals>.<genexpr>z. };
                const char* schema_str = "z^";
                applySymbolicShapesOnLT(schema_str, inputs, shapes);
            }
        )r[   ra   r=   rK   r   tags
structuredstructured_delegateintr   r>   r   r   rH   rI   rX   r   rJ   Z5has_composite_explicit_autograd_non_functional_kernelrL   Z
has_symintrb   ComputeShapeSignaturekernel
shape_call)rd   rI   r8   re   r   returns_lengthis_view_copy_opis_structuredZmeta_outZ
shapes_strrN   Zmeta_conversion_strZmeta_call_ctxZmeta_call_argsZdispatch_nsrL   Z	shape_str	shape_sigZfunc_schema_strr5   r   r6   shape_inference  sj   





z+GenLazyNativeFuncDefinition.shape_inferencec                 C   s8   t |}d|j d| d| || d|j d| dS )Nz3torch::lazy::NodePtr node = torch::lazy::ReuseNode<r*   z$);
        if (!node) {
            z*
            node = torch::lazy::MakeNode<zE, std::move(shapes));
            CacheNode(node);
        }
        )r?   r   r   )rd   rI   r8   Znode_ctor_input_strr5   r5   r6   build_ir_node8  s   
z)GenLazyNativeFuncDefinition.build_ir_nodeNfirst_tensor_namec                 C   s8   | j r|d usJ d| d| j S | j d| j S )Nz+Requires first tensor to create lazy tensor.r   )r   r   r   )rd   r   r5   r5   r6   create_lazy_tensorB  s   
z.GenLazyNativeFuncDefinition.create_lazy_tensorc                 C   s   t |j}|jddd}dd |D }t |dkr|d nd }d| j d| | d	}|d
krUt |dks:J dd| j d| d| | dt  d| j d| d}|jjj	s_|j
 rt|d
kskJ d| dd| d| d}|d7 }|S )NTFrr   c                 S   r   r5   r   rE   r5   r5   r6   r;   O  r   zBGenLazyNativeFuncDefinition.return_aten_tensor.<locals>.<listcomp>r   zauto result = z(
                z#(std::move(node), *common_device));r   z3Code below assumes there is at least one tensor argr)   z,> lazy_tensors;
        for (int i = 0; i < z,; i++) {
            lazy_tensors.push_back(rz   z=(node, i), *common_device));
        }
        auto result = <z>(lazy_tensors);zqWe assumed there was no such case where an op is an in-place variant and has tuple outputs, but got tuple of len r   r&   z2->SetInPlaceIrValue(node);
        auto& result = r   z
        return result;)rK   r   r=   r   r   r   r
   r   r.   ZinplacerI   Z	is_out_fn)rd   rI   r8   r   r   r   r   Z
bridge_strr5   r5   r6   return_aten_tensorL  sB   



z.GenLazyNativeFuncDefinition.return_aten_tensorc                 C   s   t || j}| j|}|d usJ t|j| d}d|j| j d|j d d| 	|||| d| 
|| d| || d| || d| || d| || dgS )Nr`   z    r   ru   z {
        rT   z
    }

    )r   r[   ra   r   rI   rb   declr   r   r   r   r   r   r   r   )rd   rI   r@   re   r8   r5   r5   r6   rf   j  s(   




z$GenLazyNativeFuncDefinition.__call__rw   )r   r   r   r   r   r   r   r"   r   r   r   r	   r   r   r   r   r   r   r   r   r   r   r   r   rf   r5   r5   r5   r6   r     sH   
 -

K

r   c                   @   sf   e Zd ZdZdededefddZdefdd	Zdefd
dZ	e
defddZe
defddZdS )r   zm
    Here we use the base name as the suffix of the signature to avoid generating for in-place variants.
    kernel_namer_   r4   c                C   s\   t |j|d| _ddd tj|j|dD | _ddd | jjddD | _|| _	d S )Nr`   r<   c                 S   s   g | ]}|  qS r5   )r   rE   r5   r5   r6   r;     r   z2ComputeShapeSignature.__init__.<locals>.<listcomp>c                 S   s   g | ]}|j  qS r5   ru   r9   r5   r5   r6   r;     r   T)	generator)
r   rI   Z_ComputeShapeSignature__schemar>   
dispatcherrJ   %_ComputeShapeSignature__dispatch_argsr=   !_ComputeShapeSignature__call_args#_ComputeShapeSignature__kernel_name)rd   r   r_   r4   r5   r5   r6   __init__  s   
zComputeShapeSignature.__init__r%   c                 C      | j  d| j dS Nrz   r'   )r   r   rd   r5   r5   r6   Z__decl_suffix     z#ComputeShapeSignature.__decl_suffixc                 C   r   r   )r   r   r   r5   r5   r6   Z__call_suffix  r   z#ComputeShapeSignature.__call_suffixc                 C      d|    S )Nz8TORCH_API std::vector<torch::lazy::Shape> compute_shape_)#_ComputeShapeSignature__decl_suffixr   r5   r5   r6   
shape_decl     z ComputeShapeSignature.shape_declc                 C   r   )Nztorch::lazy::compute_shape_)#_ComputeShapeSignature__call_suffixr   r5   r5   r6   r     r   z ComputeShapeSignature.shape_callN)r   r   r   __doc__r   r"   r   r   r   r   propertyr   r   r5   r5   r5   r6   r   ~  s    
r   c                   @   s8   e Zd ZU eed< eed< ededee fddZ	dS )GenLazyShapeInferenceDefinitionr[   r   r_   r%   c                 C   st   t || j}| j|}|d usJ d|jv }|jp|jd u}|s#|r%g S t|j|| d}d	|j
 dggS )Nr   r`   
r   )r   r[   ra   r   r   r   r   r   rb   r>   r   )rd   r_   r@   re   r   r   r   r5   r5   r6   rf     s   
z(GenLazyShapeInferenceDefinition.__call__N)
r   r   r   r   r   r   r   r"   r   rf   r5   r5   r5   r6   r     s
   
 r   
non_nativegen_lazy_irc                 C   st   g }| D ]3}t ddd}|dg D ]}t||d qtt|d |dd}|d|_|||d	  q|S )
z,Generate the non-native lazy IR node classesr   r   r   r   TrI   r`   r   r   )	r   getsetattrr   r    parser   rU   rc   )r   r   nodesopr   pr8   r5   r5   r6   !generate_non_native_lazy_ir_nodes  s   r   );r   abcr   dataclassesr   typingr   r   r   r   r   r	   Ztorchgen.api.dispatcherapir   Ztorchgen.api.lazyr
   r   r   r   r   r   Ztorchgen.api.translater   Ztorchgen.api.typesr   r   r   r   r   r   r   r   Ztorchgen.contextr   Ztorchgen.dest.lazy_ts_loweringr   Ztorchgen.modelr   r   r   r   r   r    r!   r"   r#   r   r7   r?   rP   rM   rX   rZ   r   r   r   r   r   r5   r5   r5   r6   <module>   sT      (
,;


 -8 r