VL iAdZddlmZddlZddlZddlZddlZddlZddlm Z ddl m Z ddl m Z ddl mZddlmZdd lmZdd lmZdd lmZdd lmZej,eZeej4ej6gZeej4ej6ej8ej:gZee e!e e!e!ffZ"e ee#ee#e"fZ$Gd dZ%ddZ&ddZ'ddZ(ddZ)ddZ*ddZ+y)zFModule containing our file processor that tokenizes a file for checks.) annotationsN) Generator)Any)defaults)utils) FSTRING_END)FSTRING_MIDDLE) TSTRING_END)TSTRING_MIDDLE) LoadedPluginc6eZdZdZdZ d ddZejddZd dZ d dZ d!dZ d"d Z d"d Z d"d Zd#d Zd$d Zd"dZd%dZd&dZd'dZ d(dZd)dZd*dZejd+dZd,dZd-dZd.dZd.dZd.dZd/dZd"dZy)0 FileProcessora=Processes a file and holds state. This processes a file by generating tokens, logical and physical lines, and AST trees. This also provides a way of passing state about the file to checks expecting that state. Any public attribute on this object can be requested by a plugin. The known public attributes are: - :attr:`blank_before` - :attr:`blank_lines` - :attr:`checker_state` - :attr:`indent_char` - :attr:`indent_level` - :attr:`line_number` - :attr:`logical_line` - :attr:`max_line_length` - :attr:`max_doc_length` - :attr:`multiline` - :attr:`noqa` - :attr:`previous_indent_level` - :attr:`previous_logical` - :attr:`previous_unindented_logical_line` - :attr:`tokens` - :attr:`file_tokens` - :attr:`total_lines` - :attr:`verbose` FNc4||_||_||n|j|_|j d|_d|_i|_i|_|j|_ d|_ d|_ |j|_ d|_ d|_|j|_|j |_d|_d|_d|_d|_g|_t-|j|_|j0|_ddi|_dx|_|_y)z]Initialize our file processor. :param filename: Name of the file to process NrF logical lines)optionsfilename read_lineslines strip_utf_bom blank_before blank_lines_checker_states checker_state hang_closing indent_char indent_level indent_size line_number logical_linemax_line_lengthmax_doc_length multilineprevious_indent_levelprevious_logical previous_unindented_logical_linetokenslen total_linesverbose statistics_fstring_start_tstring_start)selfrrrs V/mnt/ssd/data/python-lab/Trading/venv/lib/python3.12/site-packages/flake8/processor.py__init__zFileProcessor.__init__>s    #/UT__5F  :<-/#00'+"..&66%44%&" "02-02 tzz? *A.466d1cpt|jttjfdS)z-Return the complete set of tokens for a file.ctSN)next) line_itersr0z+FileProcessor.file_tokens..|s T)_r2)iterrlisttokenizegenerate_tokens)r/r7s @r0 file_tokenszFileProcessor.file_tokensxs*$ H,,-DEFFr2c||_y)z#Signal the beginning of an fstring.N)r-r/linenos r0 fstring_startzFileProcessor.fstring_start~ $r2c||_y)z#Signal the beginning of an tstring.N)r.r?s r0 tstring_startzFileProcessor.tstring_startrBr2c#vK|jtk(r |j}n/|jtk(r |j}n|j d}d|_||_t||jdD]5}|j|jdz |xjdz c_7d|_yw)z0Iterate through the lines of a multiline string.rTFN) typerr-r r.startr$r rangeendr)r/tokenrH_s r0multiline_stringzFileProcessor.multiline_strings :: $''E ZZ; &''EKKNE ueiil+ "A**T--12 2    !  "sB7B9cd|_y)z)Reset the blank_before attribute to zero.rN)rr/s r0reset_blank_beforez FileProcessor.reset_blank_befores r2c|jd=y)z-Delete the first token in the list of tokens.rN)r(rOs r0delete_first_tokenz FileProcessor.delete_first_tokens KKNr2c.|xjdz c_y)z&Note that we visited a new blank line.rFN)rrOs r0visited_new_blank_linez$FileProcessor.visited_new_blank_lines Ar2c|dd\}}|j|dz }t|d||_|j|jkr|j|_yy)z:Update the indent level based on the logical line mapping.rrFN)r expand_indentrrr)r/mapping start_row start_col start_lines r0 update_statezFileProcessor.update_statesa!(AIZZ A . )*Zi*@A   t// / $ 0 0D  0r2cxd|jvr,|jj|ji|_yy)z2Update the checker_state attribute for the plugin.rN) parametersr setdefault entry_namer)r/plugins r0update_checker_state_forz&FileProcessor.update_checker_state_fors9 f// /!%!5!5!@!@!!2"D  0r2c|jr?|j|_|j|_|js|j|_d|_g|_y)zoRecord the previous logical line. This also resets the tokens list and the blank_lines count. rN)r!rr%r&r'rr(rOs r0next_logical_linezFileProcessor.next_logical_linesO   )-):):D &$($5$5D !$$8<8I8I5 r2cg}g}g}d}dx}}|jD]-\}}} } } |tvr|sd| fg}|tjk(r|j |?|tj k(r t |}nO|tthvrA|jd|jdz} dt|| zz}| d| d| zf} |H|F| \} }|| k7r/|dz }|dz }|j||}|dk(s|dvr|d vrd |}n ||k7r| |||z}|j ||t|z }|j || f| \}}0|||fS) z4Build the mapping, comments, and logical line lists.rN{}xrF,z{[(}]) ) r( SKIP_TOKENSr;COMMENTappendSTRING mutate_stringr r countr)r)r/logicalcommentsrWlength previous_rowprevious_column token_typetextrHrJline brace_offsetrX start_column row_index column_index previous_texts r0build_logical_line_tokensz'FileProcessor.build_logical_line_tokenss#%)-- 26++$ 2 .JeS$[(u:,X---%X__,$T*  $zz#C@ c$i,671vs1v 45'O,G,1)L9, ,q 0I#2Q#6L$(JJy$9,$GM$+%U2t57H!"4&z$ 4 =DD NN4 c$i F NNFC= ).1 +\?I$ 2J'))r2c^tjdj|jS)z5Build an abstract syntax tree from the list of lines.r)astparsejoinrrOs r0 build_astzFileProcessor.build_astsyy,--r2c|j\}}}dj|}dj||_|jdxxdz cc<||j|fS)z2Build a logical line from the current tokens list.rrrF)r~rr!r,)r/rrrq mapping_listjoined_commentss r0build_logical_linez FileProcessor.build_logical_lines]*.*H*H*J''<''(+GGG, (A-( 1 1<??r2ci}|jD]\}}||vr t||||<|S#t$r|rtj d|YCwxYw)z8Generate the keyword arguments for a list of parameters.zPPlugin requested optional parameter "%s" but this is not an available parameter.)itemsgetattrAttributeErrorLOGwarning)r/r] argumentsretparamrequireds r0keyword_arguments_forz#FileProcessor.keyword_arguments_forsx )//1 OE8 ! $T51E  " KKB  s2"AAc#Ktj|jD]8}|dd|jkDry|jj ||:yw)z'Tokenize the file and yield the tokens.rN)r;r< next_liner*r(rm)r/rKs r0r<zFileProcessor.generate_tokenssT--dnn= EQx{T--- KK  u %K  sAAct||dz}dj|j|dz |}tj ||S)NrFr)rIrrdictfromkeys)r/min_linemax_line line_rangejoineds r0_noqa_line_rangezFileProcessor._noqa_line_ranges@8X\2 HqL8<=}}Z00r2c |j}i}t|jdz}d}|D]\}}\}}\}}}|tjk(s|tj k(r6t ||}t||}|tjtjfvsq|j|j||t|jdz}d}|S#tjtf$ricYSwxYw)zz3FileProcessor.should_ignore_file..ks&1 /3H   $ $T *1 )+Tc3ZK|]#}tjj|%ywr5)rrsearchrs r0rz3FileProcessor.should_ignore_file..os!HT##**40Hrz[Detected `flake8: noqa` on line with code. To ignore an error on a line use `noqa` instead.F)r disable_noqaanyrrrrOs r0should_ignore_filez FileProcessor.should_ignore_filedsY||((S1 7;zz1 .  HTZZH H KK6 r2c|jsy|jddddk(r |jddd|jd<y|jddddk(r |jddd|jd<yy)z-Strip the UTF bom from the lines of the file.NrrFuu)rrOs r0rzFileProcessor.strip_utf_bomxsyzz  ::a=!  ( JJqM!"-DJJqM ZZ]2A . 0 JJqM!"-DJJqM1r2r5)rstrrzargparse.Namespacerzlist[str] | NonereturnNone)rzlist[tokenize.TokenInfo])r@intrr)rKtokenize.TokenInforzGenerator[str])rr)rW_LogicalMappingrr)r`r rr)r_Logical)rzast.AST)rz tuple[str, str, _LogicalMapping])r]zdict[str, bool]rdict[str, Any]rr)rzGenerator[tokenize.TokenInfo])rrrrrdict[int, str])rr)r rrz str | None)rr)rz list[str])rbool) __name__ __module__ __qualname____doc__noqar1 functoolscached_propertyr=rArDrMrPrRrTr[rarcr~rrrr<rrrrrrrrrr2r0rrs8 D #' 8787$87 87  87tGG %%$1 ,*\.@#"  .1 88 &'( .r2rcZ|dtvxs|d|dddjdk(S)z+Check if the token is an end-of-line token.rrrFNz\ )rlstriprKs r0 is_eol_tokenrs7 8w  L%(58A;="9"@"@"Bf"LLr2c|jtthvxs-|jtjk(xrd|j vS)z$Check if this is a multiline string. )rGrr r;rnstringrs r0is_multiline_stringrs; ::+{3 3  hoo%>$%,,*>r2c|dtvS)z0Check if the token type is a newline token type.r)rrs r0token_is_newliners 8w r2c*|dvr|dzS|dvr|dz S|S)z Count the number of parentheses.z([{rFrir)current_parentheses_count token_texts r0count_parenthesesrs.U(1,, u (1,, $$r2c6t|jdS)zReturn the amount of indentation. Tabs are expanded to the next multiple of 8. >>> expand_indent(' ') 4 >>> expand_indent('\t') 8 >>> expand_indent(' \t') 8 >>> expand_indent(' \t') 16 )r) expandtabs)rxs r0rVrVs tq! ""r2c|j|ddz}t|dz }|dddvr |dz }|dz}|d|d||z zz||dzS)zReplace contents with 'xxx' to prevent syntax matching. >>> mutate_string('"abc"') '"xxx"' >>> mutate_string("'''abc'''") "'''xxx'''" >>> mutate_string("r'abc'") "r'xxx'" rrFN)z"""z'''rrg)indexr))rwrHrJs r0rorosm JJtBx 1 $E d)a-C BCyN"   q <#u- -ST ::r2)rKrrr)rrrrrr)rxrrr)rwrrr),r __future__rargparserrloggingr;collections.abcrtypingrflake8rrflake8._compatrr r r flake8.plugins.finderr getLoggerrr frozensetrrINDENTrrkr:tuplerrrrrrrrrrVrorr2r0rsL" %&)&).g! X[[("2"23 4 [[(""HOOX__E uS%S/123 cDI6 7c.c.L M  %#(;r2