L i. ~UdZddlZddlZddlZddlZddlZddlZddlZddlm Z ddl m Z m Z m Z mZddlmZddlZddlmZmZmZmZddlmZddlmZmZmZdd lmZmZdd l m!Z!dd l"m#Z#ejHjJZ&ejHjNZ(ed d Z)edZ*d8dZ+d9dZ,GddejZjBZ.dZ/ej`jcddZ2ej`jcddZ3ej`jcddZ4ejjd:dZ6ejndfdZ8dZ9dZ:Gdde;Z<ejz d9dZ>Gd d!e?Z@e@jejHjd"ZCd#ZDd$ZEd%ZFddd dd&d ddedd f d'ZGGd(d)eHe ZIGd*d+eHe ZJddd dd&d ddedd f d,ZKdZLe eMe e feNd-<dd dd&d ddedd f d.ZOd/ZPGd0d1eZQGd2d3eQZRd4e e*e)fd5e e*e)ffd6ZS d;d7ZTy)._get_interpreter_name_for_var-s$$&89 9 )a-LLE"#899 FA )a- >>NN$ 0DAq!U\\*saxKq/R/ 0)r+r-s` r,"_create_interpreter_name_lookup_fnr0,s& )(r.Fc|jd}t|}t}|jD]H\}}t ||vr|j t ||r|||<6|j ||<J|S)NT keep_vars) state_dicttypesetr"idadddetach)moduler3r4 filtered_dictseen_idsr)r*s r,_unique_state_dictr=Cs""T"2J$D$&MH  "*1 a5H   RU  M!  xxzM! * r.cHeZdZ dfd Zdej fdZxZS)ONNXTracedModulecht|||_||_||_||_||_yN)super__init__innerstrict_force_outplace_return_inputs_return_inputs_states)rrDrEforce_outplace return_inputsreturn_inputs_states __class__s r,rCzONNXTracedModule.__init__Vs8   -+%9"r.argsc t|\ttdj}g gg  fd}tj j ||ztjj\}}jr | d dfSjr | ddfS| dfS)NTr2c2g}ttD]>}t||tjs t d|j ||@t|} jr! j td|D jrj t| j  j| jr d|fd<t \}}t|dk(r|dSt|S)NzExpected Tensor argumentc3\K|]$}|jtj&yw) memory_formatN)cloner$preserve_format).0xs r, z.wrapper..}s!U1!''0E0E'FUs*,rr) rangelenr#r$r%rappend _unflattenrGtuplerHrD_flatten) rMin_argsr( trace_inputsout_vars_in_descin_vars inputs_statesouts ret_inputsrs r,wrapperz)ONNXTracedModule.forward..wrapperrs*,G3w<( (!$q'5<<8&'ABBtAw' ( &gw7L""!!UPTUU))$$Z%AB KK  L1 2))$1!$4l#C a "4.KHa8}!{"X&r.r) r]listr=valuesr$_C_create_graph_by_tracingr0rErFrGrH) rrM module_staterggraph_outrbrcrdrerfs ` @@@@@r,forwardzONNXTracedModule.forwardhs#D>.ttDKKMN    ' '0hh77  l " . 0 KK   t   $q':a=0 0  % %$q'=#33 3$q'> !r.)TFFF)__name__ __module__ __qualname__rCr$r%ro __classcell__rLs@r,r?r?Us&" :$/"U\\/"r.r?cLfdtjdd|S)Ncr|yt|tjrz|jj |j rdntj j|j}|j|j|_ |S|j tj S)NrQ) r#r$r%r9rS is_mkldnnrTrequires_grad_ requires_gradgrad)ar* clone_inputs r,r|z"_clone_inputs..clone_inputs 9 5<< ( Q[[Te>S>ST0  vv!$QVV,H77)>)>7? ?r.c6t|tjSrA)r#r$r%rVs r,z_clone_inputs..s*Q -r.tensors) condition_msg)r _nested_map)rMr|s @r, _clone_inputsrs/@  8  -{)    r.PYTORCH_JIT_TIMEPYTORCH_JIT_DISABLEPYTORCH_JIT_STATSc #dKts|rtjjsdytjj }tjj d}tjj d}|j | d|j ||jt|d|d|j|dy#|j ||jt|d|d|j|dwxYww)NT) enable_timing z time: z ms) _JIT_TIMEr$cuda is_availablecurrent_streamEvent record_event synchronizeprint elapsed_time) trace_namenametimestreamstartends r,_timers d5::+B+B+D  ZZ & & (F JJ  4  0E **    .C I C    AdV75+=+=c+B*C3GH C    AdV75+=+=c+B*C3GHs BD0C'"AD0'AD--D0c" ttjjs t dtt  t|t s|f} r#tjj}d fd }tjj|d5||d\}}j|sJ ddd rj||d\}} t|t| y#1swYBxYw) aB Verify that a JIT compiled model has the same behavior as its uncompiled version along with its backwards pass. If your model returns multiple outputs, you must also specify a `loss_fn` to produce a loss for which the backwards will be computed. This function has side-effects (e.g., it executes your model / saves and loads parameters), so don't expect the model to come out exactly the same as what you passed in. Args: model (compiled torch.nn.Module or function): the module/function to be verified. The module/function definition MUST have been decorated with `@torch.jit.compile`. args (tuple or Tensor): the positional arguments to pass to the compiled function/module to be verified. A non-tuple is assumed to be a single positional argument to be passed to the model. loss_fn (function, optional): the loss function to be applied to the output of the model, before backwards is invoked. By default, we assume that a model returns a single result, and we :func:`torch.sum` before calling backwards; if this is inappropriate, you can pass your own loss function. Note that if a model returns a tuple of results, these are passed as separate positional arguments to `loss_fn`. devices (iterable of device IDs, optional): the GPU devices which the compiled module will be run on. This determines the RNG state we must save when running both compiled and uncompiled versions of the model. zICannot verify an uncompiled module. Add @torch.jit.compile to compile itcrtjng}t||f\}}}|r|j|r |j}|}|r|jk(r t dt |ts|f}tjk(r&t|dk7rtdt|dt|\} }| D cgc]0} | jjtj2} } |} tjj!| g|} | D cgc]0} | jjtj2}} | |fScc} wcc} w)Nz#failed to use the compiled functionrzModel returns zO outputs, but default loss function (torch.sum) can only handle a single outputrQ)rh parametersr] clear_cachehitsrr#r\r$sumrY ValueErrorr9rSrTautogradrz)rM force_traceassert_compiledparamsrcra compiled_fnroutr`r* saved_outslossgrads saved_grads is_moduleloss_fnmodels r, run_fwd_bwdzverify..run_fwd_bwdsq-6e&&()BtVn-    # # % ##DTl {//47DE E#u%&C eii CHM S +>> sm !KS FGAHHJ  5+@+@  A  }##TFG4LQ FGAHHJ  5+@+@  A  K((  s 5E,15E1ztorch.jit.verify)_callerT)rN)r)FF)r#r$rjCompiledFunction TypeErrorrr\copydeepcopyr4randomfork_rng has_trace_forload_state_dict _verify_equal) rrMrdevices saved_stateruncompiled_outsuncompiled_grads compiled_outscompiled_gradsrs ` ` @r,verifyrsF eUXX66 7 W  5&)I dE "wmmE$4$4$67 )>   w0B  C*,7$,O))"u""D)))* k*$/d$K!M>/=1"N3**s $DDct||D]@\}}|j|jjdkDs7t dy)Ngư>z!JIT and real computation mismatch)zipsubabsmaxr)xsysrVys r,rr!sHB D1 558<<>   $ &BC CDr.cldj|jDcgc]}d|z c}Scc}w)N  )join splitlines)slines r,indentr's) 99alln=ddTk= >>=s 1c eZdZdfd ZxZS)TracingCheckErrorcZd|_||xj|dzz c_|6|xjdz c_|xjt|dzz c_|6|xjdz c_|xjt|dzz c_t| |jy)NzTracing failed sanity checks! rz+ERROR: Graphs differed across invocations! zERROR: Tensor-valued Constant nodes differed in value across invocations. This often indicates that the tracer has encountered untraceable code. )messagerrBrC)rgraph_diff_errortensor_compare_error extra_msgrLs r,rCzTracingCheckError.__init__,s8  LLI, ,L  ' LLJ JL LLF#34t; ;L  + LL3 L LLF#784? ?L &r.rA)rprqrrrCrsrts@r,rr+s ''r.rc |D]} t| tjr| f} |ri} | jD]\} } t | | | <tj j t|d|| d|||tjjd } | jjj| j} t| tjst| trisg| f} ncr0tj j|d|||t | d} n/tj j|t | d|||d} | fddfd}dgfd fd }|| d }||| d }|||d r|| d }|||d }td|Dst!|y)N__self__F) check_tracerErF _module_class_compilation_unitexample_inputs_is_kwarg _store_inputs)rrErFrexample_kwarg_inputsr)rrErFrrc tjjj}tjj |tjj |t |}tjdd|}tjjj}tjj |tjj |t |}tjdd|}d}||k7rUddl }|j|jd|jd}dtdj|zdz}t|j|jD]\}}t |t |k7s|dz }|jt |jdt |jd} dtdj| zdz} |j!} | r| d t| zdzz } |j!} | r| d t| zdzz } || z }nd} t|j|jD]\}}|j#|j#k7r|| fS|j#d k(s@|j%rQ|j%rb|j'd st|j)d d k7s|j)d d k7r|j+d }|j+d } tj,j/||d|| fS#t0t2f$ro}| d} | dtt |zdzz } |j!}|r| dt|zdzz } | dtt |zz } Yd}~|| fSd}~wwxYw)Nz___torch_mangle_[0-9]+\.rrTz Graph diff: rzFirst diverging operator: z Node diff: zTrace source location: zCheck source location: zprim::Constantvaluet) equal_nanzNode: zSource Location: zComparison exception: )r$rj_jit_pass_canonicalizerm_jit_pass_inline!_jit_pass_erase_shape_informationstrrerdifflibndiffrrrrnodes sourceRangekind mustBeNone hasAttributekindOfrtesting assert_closerAssertionError)mod_canonicalizedmod_strcheck_canonicalized check_strgraph_diff_errorsr graph_diffn_modn_check node_diffsource_printout mod_stack check_stacktensor_compare_errorsmod_tensor_valcheck_tensor_vale compare_stackcheck_mod_func traced_funcs r,graph_diagnostic_infoz+_check_trace..graph_diagnostic_info}s % ? ? @Q@Q R  HH % %&7 8 HH 6 67H I+,Gff8"gFG"'(("A"A.BVBV"W  HH % %&9 : HH 6 67J K/0I:B JI $ )#$]]&&t,i.B.B4.H %4fRWWZ=P6Q$QTX$X!&)%++-/B/H/H/J'NE75zS\1)-JJ)$+MMJ11$7W9P9PQU9V% +VBGGI4F-GG$N(%*$5$5$7 $+ :VI=N NQU UO'.&9&9&; &+ :VK=P PSW WO*_<)/2%) !"%!'')+>+D+D+F#" w::<7<<>1@%&;; ;=::<#33$$&'*<*<*> --g6 ||G,3w~~g7NRU7U %*WWW%5N'.yy'9$ 22*,<3%" H%&;; ;).9 08461-VCJ=O1ORV1VV-(-(9(9(; (1 4vm7L Lt S1.1IFFM2-$&;; ; s"NPA!O<<Pc,t|tr|S|fSrA)r#r\r~s r, wrap_retvalz!_check_trace..wrap_retvals"1e,1 61$ 6r.c Z t|tr r |di|}n |t|}|Dcgc]}t|tjs|!}}|Scc}w#t $r8} \}}d|dt t|}t||||d}~wwxYw)Nz+encountered an exception while running the z with test inputs. Exception: )rr/) r#dictrr$r% Exceptionrrr) modinputs running_whatrerrrrmsgrrrs r,!run_mod_and_filter_tensor_outputsz7_check_trace..run_mod_and_filter_tensor_outputss fd+0G&s}V}5D&sM&,A'BCD'+Mz#u||/LMM N ;P;R8!#8CL>Qqrxy|}~ysArBC'%)!  s.9A)A$A$ A)$A)) B*23B%%B*c |drydd<jjDcgc]}|js|}}t|dkDr`d}|dz }|dj |Dcgc]}t t |c}ddz }|dz }tj|td yycc}wcc}w) NrTz"Trace had nondeterministic nodes. z2Did you forget call .eval() on your model? Nodes: rzp This may cause errors in trace checking. To disable trace checking, pass check_trace=False to torch.jit.trace()category stacklevel) rmrisNondeterministicrYrrrwarningswarn TracerWarning)op nondeterm_opsnondeterministic_ops_warning has_warnedrs r,maybe_warn_nondeterministicz1_check_trace..maybe_warn_nondeterministics!} JqM(..446":O:O:QM=!A%/S,,I,- /<=VCG_=crB1,-C, 0=UV&>sB4B4*B9 c d}tt||D]p\}\}} |jr|j}|jr|j}|jr|j }|jr|j }|j s|j rmtjj|jtj|jtj t||ddni|js |jrOtjj|j|j t||ddnt|dds t|ddrt|ddt|ddk(sJt|j!|j!D]R\}}tjj|j#|j# t||ddTnMtjj|j#|j# t||dds|S#t$$rT}  t'j(dt+|dzzdz|zdzt+| zt,d d }Yd} ~ d} ~ wwxYw) NTr)rtolatolr is_nestedz Output nr zH. of the traced function does not match the corresponding output of the z. Detailed error: rF) enumerater is_quantized dequantizerwto_dense is_complexr$rrtocdoubleris_mpsfloatgetattrunbinddoublerrrrr) original reference match_whatall_okr(origreft_origt_refrcheck_tolerancers r,compare_outputsz%_check_trace..compare_outputssF"+C),D"E> #;D#=#((#0''!nn.~~#}}}}!lln~~'4??+< 22 GGEMM2FF5==1!0!3D#!>q!A&* 3 ;;#**!MM66 $ # %4%7c%B1%E*. 7%T;=dB$+4d#Cw #[$H$$25T[[]CJJL1Q"  % : :$*MMO$)LLN)8);FE)J1)M.2 !;!"""MM66 $ # %4%7c%B1%E*. 7U> #@M& #/1MM$a!e*%;;% % 0 0 a& !"/#$ #F #sI&J K,A K''K,tracezPython functionzrepeated tracec3$K|]}|du ywrAr/)rUinfos r,rWz_check_trace..Ms6Dt46s)r#r$r%r"rjit trace_moduler.rjCompilationUnit_c _get_methodrr r;anyr) check_inputsfuncrr9rErIis_trace_modulerrr  copied_dictrdata check_modrr: traced_outsfn_outs check_outs diag_inforrrrrs `` ` @@@@@r, _check_tracerN>sC0 fell +YF K$lln 8 d$1$$7 D! 8 ..j$/! .+"'((":":"<(?#/ I'\\55k6F6FGNK,,-F6ELL2fd+/ &!IIOO %!$2"/)6v)>"', "IIOO!&) %!$2"/"', 'NT  _recursive make_stubs_from_exported_methodscreate_script_moduleTopLevelTracedModule)r rrinfer_methods_stubs_fns r, make_modulerdjs#|$    / / 4!&!5!5!V!Vyy##88 'Ut9    0MS4EFFr.c6|y|Dcgc]}d|ic}Scc}w)Nror/)rDcs r,wrap_check_inputsrgxs#$0 1qYN 11 1s cddlmcm}|j|}|j|}t ||D]\}}|j |j k7ry|j t jk(ryt|t|k7ryt|t jjryt|t jr4|j|jk7ryt j||ry||k7syy)NrFT)torch.utils._pytreeutils_pytree tree_leavesrlayoutr$_mkldnnr5r# _subclasses FakeTensorr%dtypeallclose)exportr;pytree flat_export flat_tracer5loadeds r,$analyze_ts_result_with_export_resultrxs(($$V,K##E*JK4 f ;;&-- ' ;;%-- ' :f % dE--88 9 ell +zzV\\)>>$/v~'( r.gh㈵>c bt|tjjrt j d|St|tj jrO|t| tr| }n tdt|d|id|t|||||t| t|  St|drt|jtj jrh|jdk(rY|t| tr| }n tdt|jd|id|t|||||t| t|  St|tjtfr| |f}n| t|t s t!|}t#d} t|dr9t|jtj jr t%dt'|} t| tr1| }tj(j+| || | ||t-|}n.tj(j/| ||| ||t-|}|rH|#t1||||||d|t| t n#t1|g|||||d|t| t ||_|S) Nz`The input to trace is already a ScriptModule, tracing it is a no-op. Returning the object as is.z%example_kwarg_inputs should be a dictro)rrrrzVtrace doesn't support compiling individual module's functions. Please use trace_moduleFr)r#r$r>rrrnnrr rr?rghasattrrrpr%r\r0AttributeErrorr rj%_create_function_from_trace_with_dictr _create_function_from_tracerN_torchdynamo_inline)rErVoptimizerrDr9rErFrrrr var_lookup_fnrtraceds r, _trace_implrs$ ../  n  $(  !.5!5"#JKK   '   l +    $./CT$J'   j! t}}ehhoo 6 MMY &  !.5!5"#JKK MM  '   l +    $./CT$J'  >ELL$#78 ((*  %j.O~.6q9MtZ Z uxx%O &  4 D&--??      ' - 55       ' -   # (23G(N   (23G(N "&F Mr.c$eZdZdZdZdZdefdZy) _ExportType DIRECT_EXPORTTRACE_AND_EXPORTSOURCE_TO_SOURCEreturnc|jSrArrs r,__str__z_ExportType.__str__+ zzr.N)rprqrrrrrrrr/r.r,rr&s#M))r.rc(eZdZdZdZdZdZdefdZy)_ExportOutcomeSUCCESSFAILED_TO_EXPORT FAILED_TO_RUNACCURACY_ERRORrc|jSrArrs r,rz_ExportOutcome.__str__5rr.N) rprqrrrrrrrrr/r.r,rr/s#G)#M%Nr.rc ts|S|tjdtdddlm} t |||||||||| | | } | dt| | S)a Trace a function and return an executable or :class:`ScriptFunction` that will be optimized using just-in-time compilation. Tracing is ideal for code that operates only on ``Tensor``\\s and lists, dictionaries, and tuples of ``Tensor``\\s. Using `torch.jit.trace` and `torch.jit.trace_module`, you can turn an existing module or Python function into a TorchScript :class:`ScriptFunction` or :class:`ScriptModule`. You must provide example inputs, and we run the function, recording the operations performed on all the tensors. * The resulting recording of a standalone function produces `ScriptFunction`. * The resulting recording of `nn.Module.forward` or `nn.Module` produces `ScriptModule`. This module also contains any parameters that the original module had as well. Warning: Tracing only correctly records functions and modules which are not data dependent (e.g., do not have conditionals on data in tensors) and do not have any untracked external dependencies (e.g., perform input/output or access global variables). Tracing only records operations done when the given function is run on the given tensors. Therefore, the returned `ScriptModule` will always run the same traced graph on any input. This has some important implications when your module is expected to run different sets of operations, depending on the input and/or the module state. For example, * Tracing will not record any control-flow like if-statements or loops. When this control-flow is constant across your module, this is fine and it often inlines the control-flow decisions. But sometimes the control-flow is actually part of the model itself. For instance, a recurrent network is a loop over the (possibly dynamic) length of an input sequence. * In the returned :class:`ScriptModule`, operations that have different behaviors in ``training`` and ``eval`` modes will always behave as if it is in the mode it was in during tracing, no matter which mode the `ScriptModule` is in. In cases like these, tracing would not be appropriate and :func:`scripting ` is a better choice. If you trace such models, you may silently get incorrect results on subsequent invocations of the model. The tracer will try to emit warnings when doing something that may cause an incorrect trace to be produced. Args: func (callable or torch.nn.Module): A Python function or `torch.nn.Module` that will be run with `example_inputs`. `func` arguments and return values must be tensors or (possibly nested) tuples that contain tensors. When a module is passed `torch.jit.trace`, only the ``forward`` method is run and traced (see :func:`torch.jit.trace ` for details). Keyword arguments: example_inputs (tuple or torch.Tensor or None, optional): A tuple of example inputs that will be passed to the function while tracing. Default: ``None``. Either this argument or ``example_kwarg_inputs`` should be specified. The resulting trace can be run with inputs of different types and shapes assuming the traced operations support those types and shapes. `example_inputs` may also be a single Tensor in which case it is automatically wrapped in a tuple. When the value is None, ``example_kwarg_inputs`` should be specified. check_trace (``bool``, optional): Check if the same inputs run through traced code produce the same outputs. Default: ``True``. You might want to disable this if, for example, your network contains non- deterministic ops or if you are sure that the network is correct despite a checker failure. check_inputs (list of tuples, optional): A list of tuples of input arguments that should be used to check the trace against what is expected. Each tuple is equivalent to a set of input arguments that would be specified in ``example_inputs``. For best results, pass in a set of checking inputs representative of the space of shapes and types of inputs you expect the network to see. If not specified, the original ``example_inputs`` are used for checking check_tolerance (float, optional): Floating-point comparison tolerance to use in the checker procedure. This can be used to relax the checker strictness in the event that results diverge numerically for a known reason, such as operator fusion. strict (``bool``, optional): run the tracer in a strict mode or not (default: ``True``). Only turn this off when you want the tracer to record your mutable container types (currently ``list``/``dict``) and you are sure that the container you are using in your problem is a ``constant`` structure and does not get used as control flow (if, for) conditions. example_kwarg_inputs (dict, optional): This parameter is a pack of keyword arguments of example inputs that will be passed to the function while tracing. Default: ``None``. Either this argument or ``example_inputs`` should be specified. The dict will be unpacking by the arguments name of the traced function. If the keys of the dict don't not match with the traced function's arguments name, a runtime exception will be raised. Returns: If `func` is `nn.Module` or ``forward`` of `nn.Module`, `trace` returns a :class:`ScriptModule` object with a single ``forward`` method containing the traced code. The returned `ScriptModule` will have the same set of sub-modules and parameters as the original ``nn.Module``. If ``func`` is a standalone function, ``trace`` returns `ScriptFunction`. Example (tracing a function): .. testcode:: import torch def foo(x, y): return 2 * x + y # Run `foo` with the provided inputs and record the tensor operations traced_foo = torch.jit.trace(foo, (torch.rand(3), torch.rand(3))) # `traced_foo` can now be run with the TorchScript interpreter or saved # and loaded in a Python-free environment Example (tracing an existing module):: import torch import torch.nn as nn class Net(nn.Module): def __init__(self) -> None: super().__init__() self.conv = nn.Conv2d(1, 1, 3) def forward(self, x): return self.conv(x) n = Net() example_weight = torch.rand(1, 1, 3, 3) example_forward_input = torch.rand(1, 1, 3, 3) # Trace a specific method and construct `ScriptModule` with # a single `forward` method module = torch.jit.trace(n.forward, example_forward_input) # Trace a module (implicitly traces `forward`) and construct a # `ScriptModule` with a single `forward` method module = torch.jit.trace(n, example_forward_input) ^`optimize` is deprecated and has no effect. Use `with torch.jit.optimized_execution()` insteadrr)log_torchscript_usager;)model_id)rrr FutureWarningtorch._utils_internalrrr )rErVrrrDr9rErFrrrrrrs r,r;r;9suB    A   <  K'M+,FG r._trace_module_mapc ts|S|tjdtdt d} t |t jjs tdt |ts tdt jjj} ifd|d<t jj_ |dt||| }|jD]\}}|d k(r|}t!||}t#|}nt!||}t#|}t |trX| rV|D].}||vsd d j%|zd z}t'd |d||j(j+|||| |||| n-t-|}|j(j/|||| |||| |j(j1|}|s|t3||||||d||  t3|g|||||d||   | t jj_ |S#| t jj_ wxYw)a Trace a module and return an executable :class:`ScriptModule` that will be optimized using just-in-time compilation. When a module is passed to :func:`torch.jit.trace `, only the ``forward`` method is run and traced. With ``trace_module``, you can specify a dictionary of method names to example inputs to trace (see the ``inputs``) argument below. See :func:`torch.jit.trace ` for more information on tracing. Args: mod (torch.nn.Module): A ``torch.nn.Module`` containing methods whose names are specified in ``inputs``. The given methods will be compiled as a part of a single `ScriptModule`. inputs (dict): A dict containing sample inputs indexed by method names in ``mod``. The inputs will be passed to methods whose names correspond to inputs' keys while tracing. ``{ 'forward' : example_forward_input, 'method2': example_method2_input}`` Keyword arguments: check_trace (``bool``, optional): Check if the same inputs run through traced code produce the same outputs. Default: ``True``. You might want to disable this if, for example, your network contains non- deterministic ops or if you are sure that the network is correct despite a checker failure. check_inputs (list of dicts, optional): A list of dicts of input arguments that should be used to check the trace against what is expected. Each tuple is equivalent to a set of input arguments that would be specified in ``inputs``. For best results, pass in a set of checking inputs representative of the space of shapes and types of inputs you expect the network to see. If not specified, the original ``inputs`` are used for checking check_tolerance (float, optional): Floating-point comparison tolerance to use in the checker procedure. This can be used to relax the checker strictness in the event that results diverge numerically for a known reason, such as operator fusion. example_inputs_is_kwarg (``bool``, optional): This parameter indicate whether the example inputs is a pack pack of keyword arguments. Default: ``False``. Returns: A :class:`ScriptModule` object with a single ``forward`` method containing the traced code. When ``func`` is a ``torch.nn.Module``, the returned :class:`ScriptModule` will have the same set of sub-modules and parameters as ``func``. Example (tracing a module with multiple methods):: import torch import torch.nn as nn class Net(nn.Module): def __init__(self) -> None: super().__init__() self.conv = nn.Conv2d(1, 1, 3) def forward(self, x): return self.conv(x) def weighted_kernel_sum(self, weight): return weight * self.conv.weight n = Net() example_weight = torch.rand(1, 1, 3, 3) example_forward_input = torch.rand(1, 1, 3, 3) # Trace a specific method and construct `ScriptModule` with # a single `forward` method module = torch.jit.trace(n.forward, example_forward_input) # Trace a module (implicitly traces `forward`) and construct a # `ScriptModule` with a single `forward` method module = torch.jit.trace(n, example_forward_input) # Trace specific methods on a module (specified in `inputs`), constructs # a `ScriptModule` with `forward` and `weighted_kernel_sum` methods inputs = { "forward": example_forward_input, "weighted_kernel_sum": example_weight, } module = torch.jit.trace_module(n, inputs) rrrrz.expected torch.nn.Module as the first argumentz3expected a dictionary of (method_name, input) pairscb|jD]\}}|dz|z}||<||y)N.)named_children)r prefixrchildsubmod_qualnameregister_submodstrace_module_maps r,rz&trace_module..register_submodspsB"113 9 e"(3,"5*9 ' 8 9r.__modulero[,]'z\' is not in forward() method's arguments, valid arguments name are Trz)rrrrr0r#r$r{rr}r r>_tracerrdr"r.r r NameErrorrA#_create_method_from_trace_with_dictrW_create_method_from_tracerBrN)r r rrrDr9rErFrrrrrold_module_mapr: method_namerVrEforward_methodargument_nameskeyvalid_argumentscheck_trace_methodrrs @@r,r?r?s~    A   7q9M c588?? +MNN fd #RSSYY%%77NX<+- 9 (+$-= *j)S-1BC+1<<>G  'Ki'!(k!:!<^!LsK0!.QualnameWrapperN)rprqrrr/r.r,QualnameWrapperrs r.rcF|vr tdj|y)Nz=TracedModules don't support parameter sharing between modules)rr8)paramid_sets r, check_uniquez+TracedModule.__init__..check_uniques( S JJu r.z=Modules that have backward hooks assigned can't be compiled: r\cy)Nr/r/)r:s r,rz'TracedModule.__init__..sr.FTrY_name_actual_script_module) _parameters_buffers_modulestraining)rBrCr#r$r{rr6r]r r5_jit_override_qualnamerrr"r__dict__rj_jit_is_script_objectsetattr_backward_hooksrrrrdrr>r_rarpdelattr)rr5rrr tmp_modulerrrbufval submodule script_modulerLs ` r,rCzTracedModule.__init__s7 $000  ehhoo 271D1D1T1T J2 .%&  #mm ++113 $KD% /4 &&t,U# $,,. "ID#,/ ##D)S! ",,. /ID#..s3 0 00 - D#.  /   Od)  $}}224 OD) (3<4)J   %  ,,AA )uB "&d!4!4 g1> -.G D D$  r.ctd)Nz"Trace submodules cannot be called.)r)rrMkwargss r,rozTracedModule.forwards?@@r.cjd|jvrt| |St|j|SNr)rrB __getattr__r.r)rattrrLs r,rzTracedModule.__getattr__ s2 "$-- 77&t, ,t11488r.cpd|jvrt| ||St|j||yr)rrB __setattr__rr)rrrrLs r,rzTracedModule.__setattr__%s3 "$-- 77&tU3 3**D%8r.c|jSrArrs r, _get_namezTracedModule._get_name*rr.c d|jS)Nzoriginal_name=rrs r, extra_reprzTracedModule.extra_repr-s |,,r.)NN) rprqrr_disable_script_metarCrorrrrrsrts@r,rrs*C JA9 9 -r.rc6eZdZUeZedefed<dZy)rb.roc@|jdj|y)z Re-construct an instance of TopLevelTracedModule using an instance of a C++ module. Args: cpp_module: The C++ module that this TopLevelTracedModule will be rebuilt around. rN)r _reconstruct)r cpp_modules r,rz!TopLevelTracedModule._reconstruct4s -.;;JGr.N) rprqrrrrorr__annotations__rr/r.r,rbrb1s"0"2GXc3h 2Hr.rbfnrctjdtjdtjdt ffd _d_S)NrMrrcbts|i|Stj}||i|SrA)r[r __original_fn)rMrrrrgs r,rgz#_script_if_tracing..wrapper?s8|t&v& &&,W-B-B&C D+F++r.T) functoolswrapsrrMrrr__script_if_tracing_wrapper)rrgs`@r,_script_if_tracingr>sR__R,qvv,,a,,G*.G' Nr.c^|i}t|ts|f}t||||||i|}|S)aReturn a tuple on tracing a function or model. .. warning:: This function is internal-only and should only be used by the ONNX exporter. If you are trying to get a graph through tracing, please go through the public API instead:: trace = torch.jit.trace(nn.LSTMCell(), (input, hidden)) trace_graph = trace.graph Trace a function or model, returning a tuple consisting of the both the *trace* of an execution, as well as the original return value. If return_inputs, also returns the trace inputs as part of the tuple Tracing is guaranteed not to change the semantics of the function/module that is traced. Args: f (torch.nn.Module or function): the function or module to be traced. args (tuple or Tensor): the positional arguments to pass to the function/module to be traced. A non-tuple is assumed to be a single positional argument to be passed to the model. kwargs (dict): the keyword arguments to pass to the function/module to be traced. Example (trace a cell): .. testcode:: trace = torch.jit.trace(nn.LSTMCell(), (input, hidden)) )r#r\r?)frMrrErFrJrHres r,_get_trace_graphrNsRR~ dE "w   6?M3H    D Kr.)r)F)T)r/NTFFF)U__doc__ contextlibrrrosrrenumrtypingrrrrtyping_extensionsrr$torch._jit_internalr r r r torch.autogradr torch.jit._scriptrrrtorch.jit._staterrtorch.nnrtorch.testing._comparisonrrj _jit_flattenr]_jit_unflattenr[rrr0r=r{r?renvirongetr _JIT_DISABLE _JIT_STATScontextmanagerrrrrrr rno_gradrNWarningrrS_tracer_warn_use_pythonrWrdrgrxrrrrr;rr rr?r[rrbrrr/r.r,r s  33'  $BB18 88  XX $ $  C4  cN).$B"uxxB"J . JJNN-u 5 zz~~3U; ZZ^^/ 7   I I !& 4W4tD ?' '&"O0O0dEGE!!# " G2>   GT#tS$   |~/38DcN+2   !L^"Y-<Y-x H< H 8AqD> hq!tn $  0r.