L i&pddlZddlmZddlZddlZddlmZgdZGddZdZ dZ Gd d Z y) N) OrderedDict)Any)RemovableHandleunserializable_hookwarn_if_has_hooks BackwardHookczeZdZUdZeed<dZeed<dddeded dfd Zdd Z d Z dd Z ddZ dededed dfdZ y)ra] A handle which provides the capability to remove a hook. Args: hooks_dict (dict): A dictionary of hooks, indexed by hook ``id``. extra_dict (Union[dict, List[dict]]): An additional dictionary or list of dictionaries whose keys will be deleted when the same keys are removed from ``hooks_dict``. idrnext_idN) extra_dict hooks_dictr returncJtj||_tj|_txjdz c_d|_t|trtj|f|_yt|trtd|D|_yy)Nc3FK|]}tj|ywNweakrefref.0ds W/mnt/ssd/data/python-lab/Trading/venv/lib/python3.12/site-packages/torch/utils/hooks.py z+RemovableHandle.__init__..!s'K1 A'K!) rrhooks_dict_refrr r extra_dict_ref isinstancedictlisttuple)selfr r s r__init__zRemovableHandle.__init__sy%kk*5!))1$%' j$ '#*;;z#:".1s9_C#%9_s)rrr r"r#s r __getstate__zRemovableHandle.__getstate__-sQ    &'')4773 3'')477E9_4K^K^9_4_` `r%cl|d#tjt|_ntj|d|_|d|_t t j|jdzt _t|dks|dd|_ ytd|dD|_ y)Nrrrc3FK|]}tj|ywrrrs rrz/RemovableHandle.__setstate__..?s'I1 A'Ir) rrrrr maxrr lenrr")r#states r __setstate__zRemovableHandle.__setstate__3s 8 ")++km"U1X-"$D "''Ia'I"ID r%c|Srrr*s r __enter__zRemovableHandle.__enter__As r%typevaluetbc$|jyr)r')r#r6r7r8s r__exit__zRemovableHandle.__exit__Ds  r%)rN)rr)__name__ __module__ __qualname____doc__int__annotations__r rr$r'r+r3r5r:rr%rrr sp GGS=A L3 Ls Ld L(a JS#$r%rcd|_|S)z Mark a function as an unserializable hook with this decorator. This suppresses warnings that would otherwise arise if you attempt to serialize a tensor that has a hook. T)__torch_unserializable__)fs rrrHs"&A Hr%c|jrP|jD]@}|j|}t|drtjdt |dByy)NrBzbackward hook z on tensor will not be serialized. If this is expected, you can decorate the function with @torch.utils.hooks.unserializable_hook to suppress this warning)_backward_hookshasattrwarningswarnrepr)tensorkhooks rrrSs` '' :A))!,D4!;< tDzl;99: :r%c:eZdZdZdZdZdZdZdZdZ dZ y ) ra A wrapper class to implement nn.Module backward hooks. It handles: - Ignoring non-Tensor inputs and replacing them by None before calling the user hook - Generating the proper Node to capture a set of Tensor's gradients - Linking the gradients captures for the outputs with the gradients captured for the input - Calling the user hook once both output and input gradients are available ct||_||_||_d|_d|_d|_d|_d|_y)N) user_hooksuser_pre_hooksmodule grad_outputs n_outputsoutput_tensors_indexn_inputsinput_tensors_index)r#rRrPrQs rr$zBackwardHook.__init__hs>$,  $(! #' r%cVdg|z}t||D] \}}|||< t|Sr)zipr")r#indicesvaluessizeresidxvals r_pack_with_nonezBackwardHook._pack_with_noness<ftmGV, HCCH Szr%cF|Dcgc]}|| }}t|Scc}wr)r")r#rZr[r^r]s r _unpack_nonezBackwardHook._unpack_nonezs(&-.svc{..Sz/s c2fd}|j|y)Nc jyjj|j}jD]_}|j |j}|$t |t |k7r#tdt |dt ||}ad_jj|S)Nz.hooks  (&&t'?'?T]][C 4;;T->->?;s8s3x'&(..1#hZs3xj(RSS !%D $$T%=%=sC Cr% register_hook)r#grad_fnrLs` r_set_user_hookzBackwardHook._set_user_hooks D0 d#r%c,g}g}d}t|D]Q\}}t|tjs!|j ||j |||j z}S|rtj s|dfStjjjjj|}t|dk(r td|D cgc]9} | j| jjdk(s.| j;} } t| dk(r td|| dt!|} t#||D] \} } | | | < t%|t&urt'| }||fSt%|| }||fScc} w)NFrzCCannot set Module backward hook for a Module with no input Tensors.BackwardHookFunctionBackwardzaError while setting up backward hooks. Please open an issue with a code sample to reproduce this.) enumeratertorchTensorappend requires_gradis_grad_enablednnmodules _functionsBackwardHookFunctionapplyr1rfrlnamer!rYr6r")r#fnargs tensors_idxtensorsrtiarg new_tensorstgrad_fnsarg_listr^r_ris r_apply_on_tensorszBackwardHook._apply_on_tensorss  o 3FAs#u||,""1%s#!2!22  3 %"7"7"9: hh&&11FFLLgV { q de e'2D!aii6KPQPYPYP^P^P`eCQCAIIDD x=A  PQ Q 8A;:K5 HCHSM  : /CK$t*h'CKDsF)FFcjfd}j||\}}t|_|_|S)Nc(j|yr)rm)rlr#s rr|z)BackwardHook.setup_input_hook..fns    (r%)rr1rVrW)r#r}r|r] input_idxs` rsetup_input_hookzBackwardHook.setup_input_hooks8 )//D9YD  #,  r%cfd}d}t|ts|f}d}j||\}}t|_|_|s|d}|S)Nc2fd}|j|y)Nc j j| j _ jrnt j} jD]J}| j j}|#t |}||k7rtd|d|| _L j jtjdd jgg j} jD]P}| j | j}|$t|trtd|DrGtdd _ , jJt fd jDSy) NzABackward pre hook returned an invalid number of grad_output, got rezFull backward hook is firing when gradients are computed with respect to module outputs since no inputs require gradients. See https://docs.pytorch.org/docs/main/generated/torch.nn.Module.html#torch.nn.Module.register_full_backward_hook for more details.) stacklevelc3$K|]}|du ywrr)rels rrzKBackwardHook.setup_output_hook..fn..hook..sRlbdSUY]S]RlszoBackward hook for Modules where no input requires gradient should always return None or None for all gradients.c3(K|] }| ywrr)rrlocal_grad_outputss rrzKBackwardHook.setup_output_hook..fn..hook..s Z1!3A!6 Zs)r`rUrTrSrQr1rRrfrWrGrHrVrPrr"all) rh grad_output expected_len user_pre_hookhook_grad_outputs actual_len grad_inputs user_hookr]rr#s @rrLz8BackwardHook.setup_output_hook..fn..hooks$($8$89R9R9D9=%I!&&#&t'8'8#9L)-)<)< > ,9$++tGXGX,Y),4$%():%; %5".066@\Q]P^0`#aa,=) >&*%6%6"++3MM#6./ 0 #'"6"6r2t}}"MK%)__p ' [$BSBST?JsE4JsRlhkRlOl".0o#ppp )-D%%144@@@ Z@Y@Y ZZZ2r%rj)rlrLr#s rr|z*BackwardHook.setup_output_hook..fns' [R  ! !$ 'r%TFr)rr"rr1rTrU)r#r}r|is_tupler] output_idxs` rsetup_output_hookzBackwardHook.setup_output_hooks`* (X$&7DH00T:ZT$.!a&C r%N) r;r<r=r>r$r`rbrmrrrrr%rrr]s+ ( $6# J8r%r) rq collectionsrrrGtypingr__all__rrrrrr%rrs: # Y;;| :ccr%