JL i1pdZddlmZddlmZddlmZmZmZGddeZ dZ dZ e d k(re y y ) z A classifier model that decides which label to assign to a token on the basis of a tree structure, where branches correspond to conditions on feature values, and leaves correspond to label assignments. ) defaultdict) ClassifierI)FreqDist MLEProbDistentropyceZdZddZdZdZdZddZddZdZ e dd Z e d Z e d Z dd Ze dd Ze dZe ddZy)DecisionTreeClassifierNc<||_||_||_||_y)a :param label: The most likely label for tokens that reach this node in the decision tree. If this decision tree has no children, then this label will be assigned to any token that reaches this decision tree. :param feature_name: The name of the feature that this decision tree selects for. :param decisions: A dictionary mapping from feature values for the feature identified by ``feature_name`` to child decision trees. :param default: The child that will be used if the value of feature ``feature_name`` does not match any of the keys in ``decisions``. This is used when constructing binary decision trees. N)_label_fname _decisions_default)selflabel feature_name decisionsdefaults `/mnt/ssd/data/python-lab/Trading/venv/lib/python3.12/site-packages/nltk/classify/decisiontree.py__init__zDecisionTreeClassifier.__init__s  " # cB|jg}|j>|jjD]!}|j|j #|j )|j|j j t t|SN)r r valuesextendlabelsrlistset)rrdts rrzDecisionTreeClassifier.labels*sw++ ?? &oo,,. + biik* + == $ MM$--..0 1CK  rc&|j |jS|j|j}||jvr|j|j |S|j |j j |S|jSr)r r getr classifyr)r featuresetfvals rr!zDecisionTreeClassifier.classify3sy ;; ;; ~~dkk* 4?? "??4(11*= = ]] &==))*5 5;; rcjd}|D]\}}|j||k7s|dz }!|t|z S)Nr)r!len)rlabeled_featuresetserrorsr"rs rerrorzDecisionTreeClassifier.errorAsH!4  J}}Z(E1!  /000rc|j1|t|z dz }dj|d|z|jSd}t t |j jdD]\}\}}||jd|d} |dz t| z }|dj| d|z|jz }|j^|d kDsd||j||d z|d z z }|j|t|z d z }|d j|d|z|jjz }|jj+|d kDr&||jj||d z|d z z }|S) a  Return a string containing a pretty-printed version of this decision tree. Each line in this string corresponds to a single decision tree node or leaf, and indentation is used to display the structure of the decision tree. z{}{} {} .cH|ddvt|djfSNr)NFTstrloweritems rz6DecisionTreeClassifier.pretty_format..Ws&$q'-@"@#d1g,BTBTBV!Wrkey=z? r% z{}else: {} {} ) r r&formatr enumeratesortedr items pretty_formatr) rwidthprefixdepthnsir#resulthdrs rr?z$DecisionTreeClassifier.pretty_formatHs ;; F #b(A%%fcAgt{{C C !* %%'W " K A~f HT[[M4&3C SX%A ##CFMMB BA}}(UQYV))%$ JJ K == $F #b(A "))&#'4==;O;OP PA}}##/EAIT]]00 uqyQQrc |j|d|jdSd}t|jj dD]`\}}||d|jd|dz }|j$|d kDr|d|j |d z|d z zz }N|d|jdz }b|j t|jd k(rE|d j||jt|jjd z }n||d z }|j j/|d kDr*|d|j j |d z|d z zz }|S|d|j jdz }|S)z Return a string representation of this decision tree that expresses the decisions it makes as a nested set of pseudocode if statements. zreturn  r-cH|ddvt|djfSr/r0r3s rr5z3DecisionTreeClassifier.pseudocode..qs&d1g)<P>P>RSrr6zif z == z: r%r9z{}if {} != {!r}: rzelse: ) r r r=r r> pseudocoderr&r;rkeys)rrArBrDr#rFs rrKz!DecisionTreeClassifier.pseudocodefs ;; XWT[[O26 6 " OO ! ! #S  3LD& F83t{{m4xr: :A}}(UQYTF--ftmUQYGGGwv}}/r22 3 == $4??#q((//DKKdoo.B.B.D)Ea)Hxv&&}}##/EAITDMM44Vd]EAINNNwt}}336b99rc"|jSr)r?)rs r__str__zDecisionTreeClassifier.__str__s!!##rc t}|D]\}} |D]} |j| |G|rEtt}|D]1\}} |jD]\} } || j| 3|stj |||} ntj ||||} | j|||dz ||||| S)z :param binary: If true, then treat all feature/value pairs as individual binary features, rather than using a single n-way branch for each feature. r%)raddrr>r best_stumpbest_binary_stumprefine) r'entropy_cutoff depth_cutoffsupport_cutoffbinaryfeature_valuesverbose feature_namesr"rfnamer#trees rtrainzDecisionTreeClassifier.trains  !4 ) J# )!!%( ) )  !f(-N%8 4! E#-#3#3#54KE4"5)--d34 4 )442GD*;;2NGD   1       rcXtd|Dj}t|S)Nc3&K|] \}}| ywr.0r"rs r z.DecisionTreeClassifier.leaf..N#6JN)rmaxr )r'rs rleafzDecisionTreeClassifier.leafs'N:MNNRRT%e,,rc.td|Dj}tt}|D]&\}}|j|}|||xxdz cc<(|Dcic]}|t ||j!}}t |||Scc}w)Nc3&K|] \}}| ywrr`ras rrcz/DecisionTreeClassifier.stump..rdrer%)rrfrr r )rr'rfreqsr" feature_valuevalrs rstumpzDecisionTreeClassifier.stumpsN:MNNRRTH%!4 - J&NN<8M -  '1 , ' -OTTsS0s1ABBT T%e\9EEUs$Bc t||kry|jy|dkry|jD]}|D cgc]'\} } | j|j|k(r| | f)} } } t d| D} t t | |kDsatj| |||||||j|<|j|D cgc]0\} } | j|j|jvr| | f2} } } t d| D} t t | |kDr!tj| |||||||_ yyycc} } wcc} } w)Nrc3&K|] \}}| ywrr`ras rrcz0DecisionTreeClassifier.refine..s"U-@j%5"Urec3&K|] \}}| ywrr`ras rrcz0DecisionTreeClassifier.refine..s"X-@j%5"Xre) r&r r r rrrr r]r)rr'rTrUrVrWrXrYr#r"rfval_featuresets label_freqsdefault_featuresetss rrSzDecisionTreeClassifier.refines " #~ 5  ;;   1  OO D,? 'Z>>$++.$6U#    #"UDT"UUK{;/0>A(>(D(D$" "")% $ == $,?#'Z>>$++.dooEU## # #"XDW"XXK{;/0>A 6 < <'" ""! B %# $#s ,E 5Ec$tj|}|j|}|D]3}tj||}|j|}||ks0|}|}5|r/t dj t ||j||S)N0best stump for {:6d} toks uses {:20} err={:6.4f})r rgr)rmprintr;r&r )rZr'rYrQ best_errorr[rm stump_errors rrQz!DecisionTreeClassifier.best_stumps+001DE %%&9: " #E*008KLE++&9:KZ'( "  #  BII+,j.?.?  rctd|Dj}t}t}|D]4\}}|j||k(r||xxdz cc<(||xxdz cc<6i}|}|jdkDr|t |ji}|jdkDrt |j}t ||||S)Nc3&K|] \}}| ywrr`ras rrcz6DecisionTreeClassifier.binary_stump..rdrer%r)rrfr Nr ) rrkr'r pos_fdist neg_fdistr"rrs r binary_stumpz#DecisionTreeClassifier.binary_stumpsN:MNNRRTJ J !4 & J~~l+}<% A% % A%  &   ;;=1 &(>y}}(OPI ;;=1 ,Y]]_=G%e\9gNNrctj|}|j|}|D]>}||D]4}tj|||}|j|} | |ks1| }|}6@|rt|jrAdj |j t|jjd} nd} tdj t|| ||S)Nz{}={}rz (default)ru) r rgr)r~r r;r rrLrvr&) rZr'rXrYrQrwr[r#rmrxdescrs rrRz(DecisionTreeClassifier.best_binary_stump+s,001DE %%&9: " 'E&u- '.;;4!4$kk*=> +!,J!&J ' ' $$%%tJ,A,A,F,F,H'I!'L$ BII+,eZ  r)NNN)Fr-)r-r)g?d FNF)FNF)F)__name__ __module__ __qualname__rrr!r)r?rKrN staticmethodr]rgrmrSrQr~rRr`rrr r s *! 1<>$11f-- F F$2h"OO,DIrr c2tj|ddS)NT)rWrY)r r])xs rfrNs ! ' '$ ' EErcddlm}m}|t|}t |j dt |j dy)Nr)binary_names_demo_features names_demo)rB)nltk.classify.utilrrrrvr?rK)rr classifiers rdemorRsCI %J * " " " +, *  a  ()r__main__N) __doc__ collectionsrnltk.classify.apirnltk.probabilityrrrr rrrr`rrrsF $);;r[rt F* zFr