L i3pddlmZmZmZddlZddlZddlZddlZddl Z ddl m Z ddl m Z mZddlmZmZmZ ddlZddlZe)e'ddlmZmZmZmZmZddlmZdd lmZdd l m!Z! dd l m"Z#d Z%Gd dZ&dZ'dZ(GddZ)dZ*GddeeZ+y#e$rdZdZYcwxYw#e$r dd l$m"Z#YIwxYw))absolute_importdivisionprint_functionN)uuid4)*_retrieve_traceback_capturing_wrapped_call_TracebackCapturingWrapper)AutoBatchingMixinParallelBackendBaseparallel_config)Client as_completed get_clientrejoinsecede)sizeof)funcname) thread_state) TimeoutErrorcN tj|y#t$rYywxYw)NTF)weakrefref TypeError)objs R/mnt/ssd/data/python-lab/Trading/venv/lib/python3.12/site-packages/joblib/_dask.pyis_weakrefabler+s( C s  $$c.eZdZdZdZdZdZdZdZy)_WeakKeyDictionaryaA variant of weakref.WeakKeyDictionary for unhashable objects. This datastructure is used to store futures for broadcasted data objects such as large numpy arrays or pandas dataframes that are not hashable and therefore cannot be used as keys of traditional python dicts. Furthermore using a dict with id(array) as key is not safe because the Python is likely to reuse id of recently collected arrays. ci|_yN_dataselfs r__init__z_WeakKeyDictionary.__init__>s  cd|jt|\}}||ur t||Sr )r"idKeyError)r$rrvals r __getitem__z_WeakKeyDictionary.__getitem__As1::bg&S 5 3-  r&ct| j\}}||ur t| ||fj<y#t$rfd}tj||}Y9wxYw)Nc j=yr r!)_keyr$s r on_destroyz2_WeakKeyDictionary.__setitem__..on_destroySsJJsOr&)r(r"r)rr)r$rvaluerr.r0r/s` @r __setitem__z_WeakKeyDictionary.__setitem__Hsvg /ZZ_FCuCsm# u* 3 / $++c:.C /s&A%A0/A0c,t|jSr )lenr"r#s r__len__z_WeakKeyDictionary.__len__Ys4::r&c8|jjyr )r"clearr#s rr7z_WeakKeyDictionary.clear\s r&N) __name__ __module__ __qualname____doc__r%r+r2r5r7r&rrr3s %"r&rc| t|tr|dd}t|S#t$r Yt|SwxYw)Nr) isinstancelist Exceptionr)xs r _funcnamerB`sH a !QA A;   A; s % ;;c|Dchc]\}}}| }}}}t|dk(rd}nd}t||t|fScc}}}w)z8Summarize of list of (func, args, kwargs) function callsrFT)r4rB)tasksfuncargskwargs unique_funcsmixeds r_make_tasks_summaryrJisQ3899/T4D9L9 <A u:ui. .. :s Ac$eZdZdZdZddZdZy)Batchz6dask-compatible wrapper that executes a batch of tasksc@t|\|_|_|_yr )rJ _num_tasks_mixedrB)r$rDs rr%zBatch.__init__ws8K57Q4dnr&Nc g}td5|D]\}}}|j||i||cdddS#1swYyxYw)Ndask)backend)r append)r$rDresultsrErFrGs r__call__zBatch.__call__|sS V , &+ 6"dFtT4V45 6   s $=Acbd|jd|jd}|jrd|z}|S)N batch_of_r._callsmixed_)rBrNrO)r$descrs r__repr__zBatch.__repr__s6DNN+1T__,=VD ;;u$E r&r )r8r9r:r;r%rUr[r<r&rrLrLts@R r&rLcyr r<r<r&r_joblib_probe_taskr]sr&ceZdZdZdZdZdZ dfd ZdZdZ dZ dd Z d Z d Z d Zd ZddZdZddZej(dZxZS)DaskDistributedBackendg?g?Tc t |t d}t|||rt ||d}n t }||_|7t|ttfs!tdt|jz|jt|dkDr\t||_|j j|d} t!|| D cic]\} } t#| | c} } |_ng|_i|_||_||_t+g|j,dd |_i|_i|_y#t$r}d}t||d}~wwxYwcc} } w) Nz{You are trying to use 'dask' as a joblib parallel backend but dask is not installed. Please install dask to fix this error.F)loopset_as_defaultzTo use Joblib with Dask first create a Dask Client from dask.distributed import Client client = Client() or client = Client('scheduler-address:8786')z&scatter must be a list/tuple, got `%s`rT) broadcast)rb with_results raise_errors)superr% distributed ValueErrorr rclientr>r?tuplertyper8r4_scatterscatterzipr( data_futureswait_for_workers_timeout submit_kwargsrrbwaiting_futures_results _callbacks) r$scheduler_hostrnrjrbrqrrmsge scatteredrAf __class__s rr%zDaskDistributedBackend.__init__s`   %  S/ ! >T%P 1'\F  z'D%='I84=;Q;QQ   3w<CctdfS)Nr<)r_r#s r __reduce__z!DaskDistributedBackend.__reduce__s &++r&c2t|jdfS)N)rjr`)r_rjr#s rget_nested_backendz)DaskDistributedBackend.get_nested_backends%T[[92==r&c 2||_|j|Sr )paralleleffective_n_jobs)r$n_jobsr backend_argss r configurez DaskDistributedBackend.configures  $$V,,r&cd|_|jjj|jt |_y)NT)rrjrb add_callbackrrcall_data_futuresr#s r start_callz!DaskDistributedBackend.start_calls0 %%dmm4!3!5r&cpd|_tjd|jj y)NFr~)rtimerrr7r#s r stop_callz DaskDistributedBackend.stop_calls+ 4 $$&r&c t|jjj}|dk7s |js|S |jj t j|jt|jjjS#t$rD}dj|jtdd|jz}t||d}~wwxYw)Nr)timeoutzDaskDistributedBackend has no worker after {} seconds. Make sure that workers are started and can properly connect to the scheduler and increase the joblib/dask connection timeout with: parallel_config(backend='dask', wait_for_workers_timeout={}) ) sumrjncoresvaluesrqsubmitr]r _TimeoutErrorformatmaxr)r$rrrx error_msgs rrz'DaskDistributedBackend.effective_n_jobsst{{113::<= q (E(E# #  1 KK  1 2 9 955 : 4;;%%'..011 1O f--BD999: y)q 0 1s9B00 C=9?C88C=c tKttddfd}g}|jD]r\}}}t||d{}tt |j ||j d{}|j|||ftt||fS7j71w)NrcKg}|D]}t|}|vr|j|' jj|d}|mk |d{}|[t |rPt |dkDrB jj|dd}tj|}||<|d{}||j||j||S7#t$rYwxYw7>w)Ng@@TF) asynchronoushash) r(rSrpgetr)rrrjrnrTask) rFoutargarg_idrz_corotr itemgettersr$s rmaybe_to_futuresz>DaskDistributedBackend._to_func_args..maybe_to_futuressC& $C[(JJ{623%%))&$79!2!>"3C"88y)#.6#;3D%)KK$7$7 #$U%8%E!( U 3A56-c2&'A=JJqMJJsOM& $NJ=9#.!(sIA C6C%C#C%AC65C46-C6#C%% C1.C60C11C6) dictgetattritemsr?rokeysrrSrL) r$rErrDrzrFrGrrs ` @@r _to_func_argsz$DaskDistributedBackend._to_func_argssf $D*=tD) V#zz ,OAtV.t445D#fkkm3CFMMO3T-TUVF LL!T6* + , e e$$ 5-Ts$AB8 B4 :B8B60B86B8ctjjj_fd}j j j|||S)NcfKj|d{\}}t|dtj}jj t |f||dj}jj||j|<j|<y7w)N-)rDr/) rreprrhexrjrr rrrsaddrurt)rErbatchrDr/ dask_futurerr$s rrzz-DaskDistributedBackend.apply_async..fNs!%!3!3D!99LE5%[M57;;-0C,$++,,*51$$ K  $ $[ 1+3DOOK ()2DMM+ &:sB1B/BB1) concurrentfuturesFuturerrrjrbr)r$rErrzrs` @r apply_asyncz"DaskDistributedBackend.apply_asyncJsM&&--/ !((  3 %%ax8r&ct|Sr )r)r$rs rretrieve_result_callbackz/DaskDistributedBackend.retrieve_result_callback`s 9#>>r&c||jj5|jjj|jjj sI|jjj |jjj sIdddy#1swYyxYw)zTell the client to cancel any task submitted via this instance joblib.Parallel will never access those results N)rslockrr7queueemptyr)r$ ensure_readys rabort_everythingz'DaskDistributedBackend.abort_everythingcs  ! ! & & 1  ( ( . . 0**00668$$**..0**00668 1 1 1s BB22B;c#~Kttdr tdttdr tyyw)zOverride ParallelBackendBase.retrieval_context to avoid deadlocks. This removes thread from the worker's thread pool (using 'secede'). Seceding avoids deadlock in nested parallelism settings. execution_stateN)hasattrrrrr#s rretrieval_contextz(DaskDistributedBackend.retrieval_contextms0 -6 '248%t,?1r&r_), __future__rrrrconcurrent.futuresrrrruuidr_utilsrr rr r r rQrh ImportErrordask.distributedr rrrr dask.sizeofr dask.utilsrdistributed.utilsrrr tornado.genrrrBrJrLr]r_r<r&rrs@@ NM  /##.> D **Z/, n.0Cny DK( >=>s#B%B' B$#B$' B54B5