PNG  IHDRX cHRMz&u0`:pQ<bKGD pHYsodtIME MeqIDATxw]Wug^Qd˶ 6`!N:!@xI~)%7%@Bh&`lnjVF29gΨ4E$|>cɚ{gk= %,a KX%,a KX%,a KX%,a KX%,a KX%,a KX%, b` ǟzeאfp]<!SJmɤY޲ڿ,%c ~ع9VH.!Ͳz&QynֺTkRR.BLHi٪:l;@(!MԴ=žI,:o&N'Kù\vRmJ雵֫AWic H@" !: Cé||]k-Ha oݜ:y F())u]aG7*JV@J415p=sZH!=!DRʯvɱh~V\}v/GKY$n]"X"}t@ xS76^[bw4dsce)2dU0 CkMa-U5tvLƀ~mlMwfGE/-]7XAƟ`׮g ewxwC4\[~7@O-Q( a*XGƒ{ ՟}$_y3tĐƤatgvێi|K=uVyrŲlLӪuܿzwk$m87k( `múcE)"@rK( z4$D; 2kW=Xb$V[Ru819קR~qloѱDyįݎ*mxw]y5e4K@ЃI0A D@"BDk_)N\8͜9dz"fK0zɿvM /.:2O{ Nb=M=7>??Zuo32 DLD@D| &+֎C #B8ַ`bOb $D#ͮҪtx]%`ES`Ru[=¾!@Od37LJ0!OIR4m]GZRJu$‡c=%~s@6SKy?CeIh:[vR@Lh | (BhAMy=݃  G"'wzn޺~8ԽSh ~T*A:xR[ܹ?X[uKL_=fDȊ؂p0}7=D$Ekq!/t.*2ʼnDbŞ}DijYaȲ(""6HA;:LzxQ‘(SQQ}*PL*fc\s `/d'QXW, e`#kPGZuŞuO{{wm[&NBTiiI0bukcA9<4@SӊH*؎4U/'2U5.(9JuDfrޱtycU%j(:RUbArLֺN)udA':uGQN"-"Is.*+k@ `Ojs@yU/ H:l;@yyTn}_yw!VkRJ4P)~y#)r,D =ě"Q]ci'%HI4ZL0"MJy 8A{ aN<8D"1#IJi >XjX֔#@>-{vN!8tRݻ^)N_╗FJEk]CT՟ YP:_|H1@ CBk]yKYp|og?*dGvzنzӴzjֺNkC~AbZƷ`.H)=!QͷVTT(| u78y֮}|[8-Vjp%2JPk[}ԉaH8Wpqhwr:vWª<}l77_~{s۴V+RCģ%WRZ\AqHifɤL36: #F:p]Bq/z{0CU6ݳEv_^k7'>sq*+kH%a`0ԣisqにtү04gVgW΂iJiS'3w.w}l6MC2uԯ|>JF5`fV5m`Y**Db1FKNttu]4ccsQNnex/87+}xaUW9y>ͯ骵G{䩓Գ3+vU}~jJ.NFRD7<aJDB1#ҳgSb,+CS?/ VG J?|?,2#M9}B)MiE+G`-wo߫V`fio(}S^4e~V4bHOYb"b#E)dda:'?}׮4繏`{7Z"uny-?ǹ;0MKx{:_pÚmFמ:F " .LFQLG)Q8qN q¯¯3wOvxDb\. BKD9_NN &L:4D{mm o^tֽ:q!ƥ}K+<"m78N< ywsard5+вz~mnG)=}lYݧNj'QJS{S :UYS-952?&O-:W}(!6Mk4+>A>j+i|<<|;ر^߉=HE|V#F)Emm#}/"y GII웻Jі94+v뾧xu~5C95~ūH>c@덉pʃ1/4-A2G%7>m;–Y,cyyaln" ?ƻ!ʪ<{~h~i y.zZB̃/,雋SiC/JFMmBH&&FAbϓO^tubbb_hZ{_QZ-sύodFgO(6]TJA˯#`۶ɟ( %$&+V'~hiYy>922 Wp74Zkq+Ovn錄c>8~GqܲcWꂎz@"1A.}T)uiW4="jJ2W7mU/N0gcqܗOO}?9/wìXžΏ0 >֩(V^Rh32!Hj5`;O28؇2#ݕf3 ?sJd8NJ@7O0 b־?lldщ̡&|9C.8RTWwxWy46ah嘦mh٤&l zCy!PY?: CJyв]dm4ǜҐR޻RլhX{FƯanшQI@x' ao(kUUuxW_Ñ줮[w8 FRJ(8˼)_mQ _!RJhm=!cVmm ?sFOnll6Qk}alY}; "baӌ~M0w,Ggw2W:G/k2%R,_=u`WU R.9T"v,<\Ik޽/2110Ӿxc0gyC&Ny޽JҢrV6N ``یeA16"J³+Rj*;BϜkZPJaÍ<Jyw:NP8/D$ 011z֊Ⱳ3ι֘k1V_"h!JPIΣ'ɜ* aEAd:ݺ>y<}Lp&PlRfTb1]o .2EW\ͮ]38؋rTJsǏP@芎sF\> P^+dYJLbJ C-xϐn> ι$nj,;Ǖa FU *择|h ~izť3ᤓ`K'-f tL7JK+vf2)V'-sFuB4i+m+@My=O҈0"|Yxoj,3]:cо3 $#uŘ%Y"y죯LebqtҢVzq¼X)~>4L׶m~[1_k?kxֺQ`\ |ٛY4Ѯr!)N9{56(iNq}O()Em]=F&u?$HypWUeB\k]JɩSع9 Zqg4ZĊo oMcjZBU]B\TUd34ݝ~:7ڶSUsB0Z3srx 7`:5xcx !qZA!;%͚7&P H<WL!džOb5kF)xor^aujƍ7 Ǡ8/p^(L>ὴ-B,{ۇWzֺ^k]3\EE@7>lYBȝR.oHnXO/}sB|.i@ɥDB4tcm,@ӣgdtJ!lH$_vN166L__'Z)y&kH;:,Y7=J 9cG) V\hjiE;gya~%ks_nC~Er er)muuMg2;֫R)Md) ,¶ 2-wr#F7<-BBn~_(o=KO㭇[Xv eN_SMgSҐ BS헃D%g_N:/pe -wkG*9yYSZS.9cREL !k}<4_Xs#FmҶ:7R$i,fi!~' # !6/S6y@kZkZcX)%5V4P]VGYq%H1!;e1MV<!ϐHO021Dp= HMs~~a)ަu7G^];git!Frl]H/L$=AeUvZE4P\.,xi {-~p?2b#amXAHq)MWǾI_r`S Hz&|{ +ʖ_= (YS(_g0a03M`I&'9vl?MM+m~}*xT۲(fY*V4x@29s{DaY"toGNTO+xCAO~4Ϳ;p`Ѫ:>Ҵ7K 3}+0 387x\)a"/E>qpWB=1 ¨"MP(\xp߫́A3+J] n[ʼnӼaTbZUWb={~2ooKױӰp(CS\S筐R*JغV&&"FA}J>G֐p1ٸbk7 ŘH$JoN <8s^yk_[;gy-;߉DV{c B yce% aJhDȶ 2IdйIB/^n0tNtџdcKj4϶v~- CBcgqx9= PJ) dMsjpYB] GD4RDWX +h{y`,3ꊕ$`zj*N^TP4L:Iz9~6s) Ga:?y*J~?OrMwP\](21sZUD ?ܟQ5Q%ggW6QdO+\@ ̪X'GxN @'4=ˋ+*VwN ne_|(/BDfj5(Dq<*tNt1х!MV.C0 32b#?n0pzj#!38}޴o1KovCJ`8ŗ_"]] rDUy޲@ Ȗ-;xџ'^Y`zEd?0„ DAL18IS]VGq\4o !swV7ˣι%4FѮ~}6)OgS[~Q vcYbL!wG3 7띸*E Pql8=jT\꘿I(z<[6OrR8ºC~ډ]=rNl[g|v TMTղb-o}OrP^Q]<98S¤!k)G(Vkwyqyr޽Nv`N/e p/~NAOk \I:G6]4+K;j$R:Mi #*[AȚT,ʰ,;N{HZTGMoּy) ]%dHء9Պ䠬|<45,\=[bƟ8QXeB3- &dҩ^{>/86bXmZ]]yޚN[(WAHL$YAgDKp=5GHjU&99v簪C0vygln*P)9^͞}lMuiH!̍#DoRBn9l@ xA/_v=ȺT{7Yt2N"4!YN`ae >Q<XMydEB`VU}u]嫇.%e^ánE87Mu\t`cP=AD/G)sI"@MP;)]%fH9'FNsj1pVhY&9=0pfuJ&gޤx+k:!r˭wkl03׼Ku C &ѓYt{.O.zҏ z}/tf_wEp2gvX)GN#I ݭ߽v/ .& и(ZF{e"=V!{zW`, ]+LGz"(UJp|j( #V4, 8B 0 9OkRrlɱl94)'VH9=9W|>PS['G(*I1==C<5"Pg+x'K5EMd؞Af8lG ?D FtoB[je?{k3zQ vZ;%Ɠ,]E>KZ+T/ EJxOZ1i #T<@ I}q9/t'zi(EMqw`mYkU6;[t4DPeckeM;H}_g pMww}k6#H㶏+b8雡Sxp)&C $@'b,fPߑt$RbJ'vznuS ~8='72_`{q纶|Q)Xk}cPz9p7O:'|G~8wx(a 0QCko|0ASD>Ip=4Q, d|F8RcU"/KM opKle M3#i0c%<7׿p&pZq[TR"BpqauIp$ 8~Ĩ!8Սx\ւdT>>Z40ks7 z2IQ}ItԀ<-%S⍤};zIb$I 5K}Q͙D8UguWE$Jh )cu4N tZl+[]M4k8֦Zeq֮M7uIqG 1==tLtR,ƜSrHYt&QP윯Lg' I,3@P'}'R˪e/%-Auv·ñ\> vDJzlӾNv5:|K/Jb6KI9)Zh*ZAi`?S {aiVDԲuy5W7pWeQJk֤#5&V<̺@/GH?^τZL|IJNvI:'P=Ϛt"¨=cud S Q.Ki0 !cJy;LJR;G{BJy޺[^8fK6)=yʊ+(k|&xQ2`L?Ȓ2@Mf 0C`6-%pKpm')c$׻K5[J*U[/#hH!6acB JA _|uMvDyk y)6OPYjœ50VT K}cǻP[ $:]4MEA.y)|B)cf-A?(e|lɉ#P9V)[9t.EiQPDѠ3ϴ;E:+Օ t ȥ~|_N2,ZJLt4! %ա]u {+=p.GhNcŞQI?Nd'yeh n7zi1DB)1S | S#ًZs2|Ɛy$F SxeX{7Vl.Src3E℃Q>b6G ўYCmtկ~=K0f(=LrAS GN'ɹ9<\!a`)֕y[uՍ[09` 9 +57ts6}b4{oqd+J5fa/,97J#6yν99mRWxJyѡyu_TJc`~W>l^q#Ts#2"nD1%fS)FU w{ܯ R{ ˎ󅃏џDsZSQS;LV;7 Od1&1n$ N /.q3~eNɪ]E#oM~}v֯FڦwyZ=<<>Xo稯lfMFV6p02|*=tV!c~]fa5Y^Q_WN|Vs 0ҘދU97OI'N2'8N֭fgg-}V%y]U4 峧p*91#9U kCac_AFңĪy뚇Y_AiuYyTTYЗ-(!JFLt›17uTozc. S;7A&&<ԋ5y;Ro+:' *eYJkWR[@F %SHWP 72k4 qLd'J "zB6{AC0ƁA6U.'F3:Ȅ(9ΜL;D]m8ڥ9}dU "v!;*13Rg^fJyShyy5auA?ɩGHRjo^]׽S)Fm\toy 4WQS@mE#%5ʈfFYDX ~D5Ϡ9tE9So_aU4?Ѽm%&c{n>.KW1Tlb}:j uGi(JgcYj0qn+>) %\!4{LaJso d||u//P_y7iRJ߬nHOy) l+@$($VFIQ9%EeKʈU. ia&FY̒mZ=)+qqoQn >L!qCiDB;Y<%} OgBxB!ØuG)WG9y(Ą{_yesuZmZZey'Wg#C~1Cev@0D $a@˲(.._GimA:uyw֬%;@!JkQVM_Ow:P.s\)ot- ˹"`B,e CRtaEUP<0'}r3[>?G8xU~Nqu;Wm8\RIkբ^5@k+5(By'L&'gBJ3ݶ!/㮻w҅ yqPWUg<e"Qy*167΃sJ\oz]T*UQ<\FԎ`HaNmڜ6DysCask8wP8y9``GJ9lF\G g's Nn͵MLN֪u$| /|7=]O)6s !ĴAKh]q_ap $HH'\1jB^s\|- W1:=6lJBqjY^LsPk""`]w)󭃈,(HC ?䔨Y$Sʣ{4Z+0NvQkhol6C.婧/u]FwiVjZka&%6\F*Ny#8O,22+|Db~d ~Çwc N:FuuCe&oZ(l;@ee-+Wn`44AMK➝2BRՈt7g*1gph9N) *"TF*R(#'88pm=}X]u[i7bEc|\~EMn}P瘊J)K.0i1M6=7'_\kaZ(Th{K*GJyytw"IO-PWJk)..axӝ47"89Cc7ĐBiZx 7m!fy|ϿF9CbȩV 9V-՛^pV̌ɄS#Bv4-@]Vxt-Z, &ֺ*diؠ2^VXbs֔Ìl.jQ]Y[47gj=幽ex)A0ip׳ W2[ᎇhuE^~q흙L} #-b۸oFJ_QP3r6jr+"nfzRJTUqoaۍ /$d8Mx'ݓ= OՃ| )$2mcM*cЙj}f };n YG w0Ia!1Q.oYfr]DyISaP}"dIӗթO67jqR ҊƐƈaɤGG|h;t]䗖oSv|iZqX)oalv;۩meEJ\!8=$4QU4Xo&VEĊ YS^E#d,yX_> ۘ-e\ "Wa6uLĜZi`aD9.% w~mB(02G[6y.773a7 /=o7D)$Z 66 $bY^\CuP. (x'"J60׿Y:Oi;F{w佩b+\Yi`TDWa~|VH)8q/=9!g߆2Y)?ND)%?Ǐ`k/sn:;O299yB=a[Ng 3˲N}vLNy;*?x?~L&=xyӴ~}q{qE*IQ^^ͧvü{Huu=R|>JyUlZV, B~/YF!Y\u_ݼF{_C)LD]m {H 0ihhadd nUkf3oٺCvE\)QJi+֥@tDJkB$1!Đr0XQ|q?d2) Ӣ_}qv-< FŊ߫%roppVBwü~JidY4:}L6M7f٬F "?71<2#?Jyy4뷢<_a7_=Q E=S1И/9{+93֮E{ǂw{))?maÆm(uLE#lïZ  ~d];+]h j?!|$F}*"4(v'8s<ŏUkm7^7no1w2ؗ}TrͿEk>p'8OB7d7R(A 9.*Mi^ͳ; eeUwS+C)uO@ =Sy]` }l8^ZzRXj[^iUɺ$tj))<sbDJfg=Pk_{xaKo1:-uyG0M ԃ\0Lvuy'ȱc2Ji AdyVgVh!{]/&}}ċJ#%d !+87<;qN޼Nفl|1N:8ya  8}k¾+-$4FiZYÔXk*I&'@iI99)HSh4+2G:tGhS^繿 Kتm0 вDk}֚+QT4;sC}rՅE,8CX-e~>G&'9xpW,%Fh,Ry56Y–hW-(v_,? ; qrBk4-V7HQ;ˇ^Gv1JVV%,ik;D_W!))+BoS4QsTM;gt+ndS-~:11Sgv!0qRVh!"Ȋ(̦Yl.]PQWgٳE'`%W1{ndΗBk|Ž7ʒR~,lnoa&:ü$ 3<a[CBݮwt"o\ePJ=Hz"_c^Z.#ˆ*x z̝grY]tdkP*:97YľXyBkD4N.C_[;F9`8& !AMO c `@BA& Ost\-\NX+Xp < !bj3C&QL+*&kAQ=04}cC!9~820G'PC9xa!w&bo_1 Sw"ܱ V )Yl3+ס2KoXOx]"`^WOy :3GO0g;%Yv㐫(R/r (s } u B &FeYZh0y> =2<Ϟc/ -u= c&׭,.0"g"7 6T!vl#sc>{u/Oh Bᾈ)۴74]x7 gMӒ"d]U)}" v4co[ ɡs 5Gg=XR14?5A}D "b{0$L .\4y{_fe:kVS\\O]c^W52LSBDM! C3Dhr̦RtArx4&agaN3Cf<Ԉp4~ B'"1@.b_/xQ} _߃҉/gٓ2Qkqp0շpZ2fԫYz< 4L.Cyυι1t@鎫Fe sYfsF}^ V}N<_`p)alٶ "(XEAVZ<)2},:Ir*#m_YӼ R%a||EƼIJ,,+f"96r/}0jE/)s)cjW#w'Sʯ5<66lj$a~3Kʛy 2:cZ:Yh))+a߭K::N,Q F'qB]={.]h85C9cr=}*rk?vwV렵ٸW Rs%}rNAkDv|uFLBkWY YkX מ|)1!$#3%y?pF<@<Rr0}: }\J [5FRxY<9"SQdE(Q*Qʻ)q1E0B_O24[U'],lOb ]~WjHޏTQ5Syu wq)xnw8~)c 쫬gٲߠ H% k5dƝk> kEj,0% b"vi2Wس_CuK)K{n|>t{P1򨾜j>'kEkƗBg*H%'_aY6Bn!TL&ɌOb{c`'d^{t\i^[uɐ[}q0lM˕G:‚4kb祔c^:?bpg… +37stH:0}en6x˟%/<]BL&* 5&fK9Mq)/iyqtA%kUe[ڛKN]Ě^,"`/ s[EQQm?|XJ߅92m]G.E΃ח U*Cn.j_)Tѧj̿30ڇ!A0=͜ar I3$C^-9#|pk!)?7.x9 @OO;WƝZBFU keZ75F6Tc6"ZȚs2y/1 ʵ:u4xa`C>6Rb/Yм)^=+~uRd`/|_8xbB0?Ft||Z\##|K 0>>zxv8۴吅q 8ĥ)"6>~\8:qM}#͚'ĉ#p\׶ l#bA?)|g g9|8jP(cr,BwV (WliVxxᡁ@0Okn;ɥh$_ckCgriv}>=wGzβ KkBɛ[˪ !J)h&k2%07δt}!d<9;I&0wV/ v 0<H}L&8ob%Hi|޶o&h1L|u֦y~󛱢8fٲUsւ)0oiFx2}X[zVYr_;N(w]_4B@OanC?gĦx>мgx>ΛToZoOMp>40>V Oy V9iq!4 LN,ˢu{jsz]|"R޻&'ƚ{53ўFu(<٪9:΋]B;)B>1::8;~)Yt|0(pw2N%&X,URBK)3\zz&}ax4;ǟ(tLNg{N|Ǽ\G#C9g$^\}p?556]/RP.90 k,U8/u776s ʪ_01چ|\N 0VV*3H鴃J7iI!wG_^ypl}r*jɤSR 5QN@ iZ#1ٰy;_\3\BQQ x:WJv츟ٯ$"@6 S#qe딇(/P( Dy~TOϻ<4:-+F`0||;Xl-"uw$Цi󼕝mKʩorz"mϺ$F:~E'ҐvD\y?Rr8_He@ e~O,T.(ފR*cY^m|cVR[8 JҡSm!ΆԨb)RHG{?MpqrmN>߶Y)\p,d#xۆWY*,l6]v0h15M˙MS8+EdI='LBJIH7_9{Caз*Lq,dt >+~ّeʏ?xԕ4bBAŚjﵫ!'\Ը$WNvKO}ӽmSşذqsOy?\[,d@'73'j%kOe`1.g2"e =YIzS2|zŐƄa\U,dP;jhhhaxǶ?КZ՚.q SE+XrbOu%\GتX(H,N^~]JyEZQKceTQ]VGYqnah;y$cQahT&QPZ*iZ8UQQM.qo/T\7X"u?Mttl2Xq(IoW{R^ ux*SYJ! 4S.Jy~ BROS[V|žKNɛP(L6V^|cR7i7nZW1Fd@ Ara{詑|(T*dN]Ko?s=@ |_EvF]׍kR)eBJc" MUUbY6`~V޴dJKß&~'d3i WWWWWW
Current Directory: /opt/imh-python/lib/python3.9/site-packages/astroid
Viewing File: /opt/imh-python/lib/python3.9/site-packages/astroid/protocols.py
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE # Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt """This module contains a set of functions to handle python protocols for nodes where it makes sense. """ from __future__ import annotations import collections import itertools import operator as operator_mod from collections.abc import Callable, Generator, Iterator, Sequence from typing import TYPE_CHECKING, Any, TypeVar from astroid import bases, decorators, nodes, util from astroid.const import Context from astroid.context import InferenceContext, copy_context from astroid.exceptions import ( AstroidIndexError, AstroidTypeError, AttributeInferenceError, InferenceError, NoDefault, ) from astroid.nodes import node_classes from astroid.typing import ( ConstFactoryResult, InferenceResult, SuccessfulInferenceResult, ) if TYPE_CHECKING: _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List) _CONTEXTLIB_MGR = "contextlib.contextmanager" _UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = { "+": operator_mod.pos, "-": operator_mod.neg, "~": operator_mod.invert, "not": operator_mod.not_, } def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult: """Perform unary operation on `obj`, unless it is `NotImplemented`. Can raise TypeError if operation is unsupported. """ if obj is NotImplemented: value = obj else: func = _UNARY_OPERATORS[op] value = func(obj) return nodes.const_factory(value) def tuple_infer_unary_op(self, op): return _infer_unary_op(tuple(self.elts), op) def list_infer_unary_op(self, op): return _infer_unary_op(self.elts, op) def set_infer_unary_op(self, op): return _infer_unary_op(set(self.elts), op) def const_infer_unary_op(self, op): return _infer_unary_op(self.value, op) def dict_infer_unary_op(self, op): return _infer_unary_op(dict(self.items), op) # Binary operations BIN_OP_IMPL = { "+": lambda a, b: a + b, "-": lambda a, b: a - b, "/": lambda a, b: a / b, "//": lambda a, b: a // b, "*": lambda a, b: a * b, "**": lambda a, b: a**b, "%": lambda a, b: a % b, "&": lambda a, b: a & b, "|": lambda a, b: a | b, "^": lambda a, b: a ^ b, "<<": lambda a, b: a << b, ">>": lambda a, b: a >> b, "@": operator_mod.matmul, } for _KEY, _IMPL in list(BIN_OP_IMPL.items()): BIN_OP_IMPL[_KEY + "="] = _IMPL @decorators.yes_if_nothing_inferred def const_infer_binary_op( self: nodes.Const, opnode: nodes.AugAssign | nodes.BinOp, operator: str, other: InferenceResult, context: InferenceContext, _: SuccessfulInferenceResult, ) -> Generator[ConstFactoryResult | util.UninferableBase]: not_implemented = nodes.Const(NotImplemented) if isinstance(other, nodes.Const): if ( operator == "**" and isinstance(self.value, (int, float)) and isinstance(other.value, (int, float)) and (self.value > 1e5 or other.value > 1e5) ): yield not_implemented return try: impl = BIN_OP_IMPL[operator] try: yield nodes.const_factory(impl(self.value, other.value)) except TypeError: # ArithmeticError is not enough: float >> float is a TypeError yield not_implemented except Exception: # pylint: disable=broad-except yield util.Uninferable except TypeError: yield not_implemented elif isinstance(self.value, str) and operator == "%": # TODO(cpopa): implement string interpolation later on. yield util.Uninferable else: yield not_implemented def _multiply_seq_by_int( self: _TupleListNodeT, opnode: nodes.AugAssign | nodes.BinOp, value: int, context: InferenceContext, ) -> _TupleListNodeT: node = self.__class__(parent=opnode) if value > 1e8: node.elts = [util.Uninferable] return node filtered_elts = ( util.safe_infer(elt, context) or util.Uninferable for elt in self.elts if not isinstance(elt, util.UninferableBase) ) node.elts = list(filtered_elts) * value return node def _filter_uninferable_nodes( elts: Sequence[InferenceResult], context: InferenceContext ) -> Iterator[SuccessfulInferenceResult]: for elt in elts: if isinstance(elt, util.UninferableBase): yield nodes.Unknown() else: for inferred in elt.infer(context): if not isinstance(inferred, util.UninferableBase): yield inferred else: yield nodes.Unknown() @decorators.yes_if_nothing_inferred def tl_infer_binary_op( self: _TupleListNodeT, opnode: nodes.AugAssign | nodes.BinOp, operator: str, other: InferenceResult, context: InferenceContext, method: SuccessfulInferenceResult, ) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase]: """Infer a binary operation on a tuple or list. The instance on which the binary operation is performed is a tuple or list. This refers to the left-hand side of the operation, so: 'tuple() + 1' or '[] + A()' """ from astroid import helpers # pylint: disable=import-outside-toplevel # For tuples and list the boundnode is no longer the tuple or list instance context.boundnode = None not_implemented = nodes.Const(NotImplemented) if isinstance(other, self.__class__) and operator == "+": node = self.__class__(parent=opnode) node.elts = list( itertools.chain( _filter_uninferable_nodes(self.elts, context), _filter_uninferable_nodes(other.elts, context), ) ) yield node elif isinstance(other, nodes.Const) and operator == "*": if not isinstance(other.value, int): yield not_implemented return yield _multiply_seq_by_int(self, opnode, other.value, context) elif isinstance(other, bases.Instance) and operator == "*": # Verify if the instance supports __index__. as_index = helpers.class_instance_as_index(other) if not as_index: yield util.Uninferable elif not isinstance(as_index.value, int): # pragma: no cover # already checked by class_instance_as_index() but faster than casting raise AssertionError("Please open a bug report.") else: yield _multiply_seq_by_int(self, opnode, as_index.value, context) else: yield not_implemented @decorators.yes_if_nothing_inferred def instance_class_infer_binary_op( self: nodes.ClassDef, opnode: nodes.AugAssign | nodes.BinOp, operator: str, other: InferenceResult, context: InferenceContext, method: SuccessfulInferenceResult, ) -> Generator[InferenceResult]: return method.infer_call_result(self, context) # assignment ################################################################## # pylint: disable-next=pointless-string-statement """The assigned_stmts method is responsible to return the assigned statement (e.g. not inferred) according to the assignment type. The `assign_path` argument is used to record the lhs path of the original node. For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path will be [1, 1] once arrived to the Assign node. The `context` argument is the current inference context which should be given to any intermediary inference necessary. """ def _resolve_looppart(parts, assign_path, context): """Recursive function to resolve multiple assignments on loops.""" assign_path = assign_path[:] index = assign_path.pop(0) for part in parts: if isinstance(part, util.UninferableBase): continue if not hasattr(part, "itered"): continue try: itered = part.itered() except TypeError: continue try: if isinstance(itered[index], (nodes.Const, nodes.Name)): itered = [part] except IndexError: pass for stmt in itered: index_node = nodes.Const(index) try: assigned = stmt.getitem(index_node, context) except (AttributeError, AstroidTypeError, AstroidIndexError): continue if not assign_path: # we achieved to resolved the assignment path, # don't infer the last part yield assigned elif isinstance(assigned, util.UninferableBase): break else: # we are not yet on the last part of the path # search on each possibly inferred value try: yield from _resolve_looppart( assigned.infer(context), assign_path, context ) except InferenceError: break @decorators.raise_if_nothing_inferred def for_assigned_stmts( self: nodes.For | nodes.Comprehension, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False): # Skip inferring of async code for now return { "node": self, "unknown": node, "assign_path": assign_path, "context": context, } if assign_path is None: for lst in self.iter.infer(context): if isinstance(lst, (nodes.Tuple, nodes.List)): yield from lst.elts else: yield from _resolve_looppart(self.iter.infer(context), assign_path, context) return { "node": self, "unknown": node, "assign_path": assign_path, "context": context, } def sequence_assigned_stmts( self: nodes.Tuple | nodes.List, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: if assign_path is None: assign_path = [] try: index = self.elts.index(node) # type: ignore[arg-type] except ValueError as exc: raise InferenceError( "Tried to retrieve a node {node!r} which does not exist", node=self, assign_path=assign_path, context=context, ) from exc assign_path.insert(0, index) return self.parent.assigned_stmts( node=self, context=context, assign_path=assign_path ) def assend_assigned_stmts( self: nodes.AssignName | nodes.AssignAttr, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: return self.parent.assigned_stmts(node=self, context=context) def _arguments_infer_argname( self, name: str | None, context: InferenceContext ) -> Generator[InferenceResult]: # arguments information may be missing, in which case we can't do anything # more from astroid import arguments # pylint: disable=import-outside-toplevel if not self.arguments: yield util.Uninferable return args = [arg for arg in self.arguments if arg.name not in [self.vararg, self.kwarg]] functype = self.parent.type # first argument of instance/class method if ( args and getattr(self.arguments[0], "name", None) == name and functype != "staticmethod" ): cls = self.parent.parent.scope() is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass" # If this is a metaclass, then the first argument will always # be the class, not an instance. if context.boundnode and isinstance(context.boundnode, bases.Instance): cls = context.boundnode._proxied if is_metaclass or functype == "classmethod": yield cls return if functype == "method": yield cls.instantiate_class() return if context and context.callcontext: callee = context.callcontext.callee while hasattr(callee, "_proxied"): callee = callee._proxied if getattr(callee, "name", None) == self.parent.name: call_site = arguments.CallSite(context.callcontext, context.extra_context) yield from call_site.infer_argument(self.parent, name, context) return if name == self.vararg: vararg = nodes.const_factory(()) vararg.parent = self if not args and self.parent.name == "__init__": cls = self.parent.parent.scope() vararg.elts = [cls.instantiate_class()] yield vararg return if name == self.kwarg: kwarg = nodes.const_factory({}) kwarg.parent = self yield kwarg return # if there is a default value, yield it. And then yield Uninferable to reflect # we can't guess given argument value try: context = copy_context(context) yield from self.default_value(name).infer(context) yield util.Uninferable except NoDefault: yield util.Uninferable def arguments_assigned_stmts( self: nodes.Arguments, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: from astroid import arguments # pylint: disable=import-outside-toplevel try: node_name = node.name # type: ignore[union-attr] except AttributeError: # Added to handle edge cases where node.name is not defined. # https://github.com/pylint-dev/astroid/pull/1644#discussion_r901545816 node_name = None # pragma: no cover if context and context.callcontext: callee = context.callcontext.callee while hasattr(callee, "_proxied"): callee = callee._proxied else: return _arguments_infer_argname(self, node_name, context) if node and getattr(callee, "name", None) == node.frame().name: # reset call context/name callcontext = context.callcontext context = copy_context(context) context.callcontext = None args = arguments.CallSite(callcontext, context=context) return args.infer_argument(self.parent, node_name, context) return _arguments_infer_argname(self, node_name, context) @decorators.raise_if_nothing_inferred def assign_assigned_stmts( self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign | nodes.TypeAlias, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: if not assign_path: yield self.value return None yield from _resolve_assignment_parts( self.value.infer(context), assign_path, context ) return { "node": self, "unknown": node, "assign_path": assign_path, "context": context, } def assign_annassigned_stmts( self: nodes.AnnAssign, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: for inferred in assign_assigned_stmts(self, node, context, assign_path): if inferred is None: yield util.Uninferable else: yield inferred def _resolve_assignment_parts(parts, assign_path, context): """Recursive function to resolve multiple assignments.""" assign_path = assign_path[:] index = assign_path.pop(0) for part in parts: assigned = None if isinstance(part, nodes.Dict): # A dictionary in an iterating context try: assigned, _ = part.items[index] except IndexError: return elif hasattr(part, "getitem"): index_node = nodes.Const(index) try: assigned = part.getitem(index_node, context) except (AstroidTypeError, AstroidIndexError): return if not assigned: return if not assign_path: # we achieved to resolved the assignment path, don't infer the # last part yield assigned elif isinstance(assigned, util.UninferableBase): return else: # we are not yet on the last part of the path search on each # possibly inferred value try: yield from _resolve_assignment_parts( assigned.infer(context), assign_path, context ) except InferenceError: return @decorators.raise_if_nothing_inferred def excepthandler_assigned_stmts( self: nodes.ExceptHandler, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: from astroid import objects # pylint: disable=import-outside-toplevel for assigned in node_classes.unpack_infer(self.type): if isinstance(assigned, nodes.ClassDef): assigned = objects.ExceptionInstance(assigned) yield assigned return { "node": self, "unknown": node, "assign_path": assign_path, "context": context, } def _infer_context_manager(self, mgr, context): try: inferred = next(mgr.infer(context=context)) except StopIteration as e: raise InferenceError(node=mgr) from e if isinstance(inferred, bases.Generator): # Check if it is decorated with contextlib.contextmanager. func = inferred.parent if not func.decorators: raise InferenceError( "No decorators found on inferred generator %s", node=func ) for decorator_node in func.decorators.nodes: decorator = next(decorator_node.infer(context=context), None) if isinstance(decorator, nodes.FunctionDef): if decorator.qname() == _CONTEXTLIB_MGR: break else: # It doesn't interest us. raise InferenceError(node=func) try: yield next(inferred.infer_yield_types()) except StopIteration as e: raise InferenceError(node=func) from e elif isinstance(inferred, bases.Instance): try: enter = next(inferred.igetattr("__enter__", context=context)) except (InferenceError, AttributeInferenceError, StopIteration) as exc: raise InferenceError(node=inferred) from exc if not isinstance(enter, bases.BoundMethod): raise InferenceError(node=enter) yield from enter.infer_call_result(self, context) else: raise InferenceError(node=mgr) @decorators.raise_if_nothing_inferred def with_assigned_stmts( self: nodes.With, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: """Infer names and other nodes from a *with* statement. This enables only inference for name binding in a *with* statement. For instance, in the following code, inferring `func` will return the `ContextManager` class, not whatever ``__enter__`` returns. We are doing this intentionally, because we consider that the context manager result is whatever __enter__ returns and what it is binded using the ``as`` keyword. class ContextManager(object): def __enter__(self): return 42 with ContextManager() as f: pass # ContextManager().infer() will return ContextManager # f.infer() will return 42. Arguments: self: nodes.With node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`. context: Inference context used for caching already inferred objects assign_path: A list of indices, where each index specifies what item to fetch from the inference results. """ try: mgr = next(mgr for (mgr, vars) in self.items if vars == node) except StopIteration: return None if assign_path is None: yield from _infer_context_manager(self, mgr, context) else: for result in _infer_context_manager(self, mgr, context): # Walk the assign_path and get the item at the final index. obj = result for index in assign_path: if not hasattr(obj, "elts"): raise InferenceError( "Wrong type ({targets!r}) for {node!r} assignment", node=self, targets=node, assign_path=assign_path, context=context, ) try: obj = obj.elts[index] except IndexError as exc: raise InferenceError( "Tried to infer a nonexistent target with index {index} " "in {node!r}.", node=self, targets=node, assign_path=assign_path, context=context, ) from exc except TypeError as exc: raise InferenceError( "Tried to unpack a non-iterable value in {node!r}.", node=self, targets=node, assign_path=assign_path, context=context, ) from exc yield obj return { "node": self, "unknown": node, "assign_path": assign_path, "context": context, } @decorators.raise_if_nothing_inferred def named_expr_assigned_stmts( self: nodes.NamedExpr, node: node_classes.AssignedStmtsPossibleNode, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: """Infer names and other nodes from an assignment expression.""" if self.target == node: yield from self.value.infer(context=context) else: raise InferenceError( "Cannot infer NamedExpr node {node!r}", node=self, assign_path=assign_path, context=context, ) @decorators.yes_if_nothing_inferred def starred_assigned_stmts( # noqa: C901 self: nodes.Starred, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, assign_path: list[int] | None = None, ) -> Any: """ Arguments: self: nodes.Starred node: a node related to the current underlying Node. context: Inference context used for caching already inferred objects assign_path: A list of indices, where each index specifies what item to fetch from the inference results. """ # pylint: disable=too-many-locals,too-many-statements def _determine_starred_iteration_lookups( starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]] ) -> None: # Determine the lookups for the rhs of the iteration itered = target.itered() for index, element in enumerate(itered): if ( isinstance(element, nodes.Starred) and element.value.name == starred.value.name ): lookups.append((index, len(itered))) break if isinstance(element, nodes.Tuple): lookups.append((index, len(element.itered()))) _determine_starred_iteration_lookups(starred, element, lookups) stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): raise InferenceError( "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.", node=self, stmt=stmt, unknown=node, context=context, ) if context is None: context = InferenceContext() if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if not isinstance(lhs, nodes.BaseContainer): yield util.Uninferable return if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1: raise InferenceError( "Too many starred arguments in the assignment targets {lhs!r}.", node=self, targets=lhs, unknown=node, context=context, ) try: rhs = next(value.infer(context)) except (InferenceError, StopIteration): yield util.Uninferable return if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"): yield util.Uninferable return try: elts = collections.deque(rhs.itered()) # type: ignore[union-attr] except TypeError: yield util.Uninferable return # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left # to remove anything after the starred node. for index, left_node in enumerate(lhs.elts): if not isinstance(left_node, nodes.Starred): if not elts: break elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) for right_node in lhs_elts: if not isinstance(right_node, nodes.Starred): if not elts: break elts.pop() continue # We're done unpacking. packed = nodes.List( ctx=Context.Store, parent=self, lineno=lhs.lineno, col_offset=lhs.col_offset, ) packed.postinit(elts=list(elts)) yield packed break if isinstance(stmt, nodes.For): try: inferred_iterable = next(stmt.iter.infer(context=context)) except (InferenceError, StopIteration): yield util.Uninferable return if isinstance(inferred_iterable, util.UninferableBase) or not hasattr( inferred_iterable, "itered" ): yield util.Uninferable return try: itered = inferred_iterable.itered() # type: ignore[union-attr] except TypeError: yield util.Uninferable return target = stmt.target if not isinstance(target, nodes.Tuple): raise InferenceError( "Could not make sense of this, the target must be a tuple", context=context, ) lookups: list[tuple[int, int]] = [] _determine_starred_iteration_lookups(self, target, lookups) if not lookups: raise InferenceError( "Could not make sense of this, needs at least a lookup", context=context ) # Make the last lookup a slice, since that what we want for a Starred node last_element_index, last_element_length = lookups[-1] is_starred_last = last_element_index == (last_element_length - 1) lookup_slice = slice( last_element_index, None if is_starred_last else (last_element_length - last_element_index), ) last_lookup = lookup_slice for element in itered: # We probably want to infer the potential values *for each* element in an # iterable, but we can't infer a list of all values, when only a list of # step values are expected: # # for a, *b in [...]: # b # # *b* should now point to just the elements at that particular iteration step, # which astroid can't know about. found_element = None for index, lookup in enumerate(lookups): if not hasattr(element, "itered"): break if index + 1 is len(lookups): cur_lookup: slice | int = last_lookup else: # Grab just the index, not the whole length cur_lookup = lookup[0] try: itered_inner_element = element.itered() element = itered_inner_element[cur_lookup] except IndexError: break except TypeError: # Most likely the itered() call failed, cannot make sense of this yield util.Uninferable return else: found_element = element unpacked = nodes.List( ctx=Context.Store, parent=self, lineno=self.lineno, col_offset=self.col_offset, ) unpacked.postinit(elts=found_element or []) yield unpacked return yield util.Uninferable @decorators.yes_if_nothing_inferred def match_mapping_assigned_stmts( self: nodes.MatchMapping, node: nodes.AssignName, context: InferenceContext | None = None, assign_path: None = None, ) -> Generator[nodes.NodeNG]: """Return empty generator (return -> raises StopIteration) so inferred value is Uninferable. """ return yield @decorators.yes_if_nothing_inferred def match_star_assigned_stmts( self: nodes.MatchStar, node: nodes.AssignName, context: InferenceContext | None = None, assign_path: None = None, ) -> Generator[nodes.NodeNG]: """Return empty generator (return -> raises StopIteration) so inferred value is Uninferable. """ return yield @decorators.yes_if_nothing_inferred def match_as_assigned_stmts( self: nodes.MatchAs, node: nodes.AssignName, context: InferenceContext | None = None, assign_path: None = None, ) -> Generator[nodes.NodeNG]: """Infer MatchAs as the Match subject if it's the only MatchCase pattern else raise StopIteration to yield Uninferable. """ if ( isinstance(self.parent, nodes.MatchCase) and isinstance(self.parent.parent, nodes.Match) and self.pattern is None ): yield self.parent.parent.subject @decorators.yes_if_nothing_inferred def generic_type_assigned_stmts( self: nodes.TypeVar | nodes.TypeVarTuple | nodes.ParamSpec, node: nodes.AssignName, context: InferenceContext | None = None, assign_path: None = None, ) -> Generator[nodes.NodeNG]: """Hack. Return any Node so inference doesn't fail when evaluating __class_getitem__. Revert if it's causing issues. """ yield nodes.Const(None)