PNG  IHDRX cHRMz&u0`:pQ<bKGD pHYsodtIME MeqIDATxw]Wug^Qd˶ 6`!N:!@xI~)%7%@Bh&`lnjVF29gΨ4E$|>cɚ{gk= %,a KX%,a KX%,a KX%,a KX%,a KX%,a KX%, b` ǟzeאfp]<!SJmɤY޲ڿ,%c ~ع9VH.!Ͳz&QynֺTkRR.BLHi٪:l;@(!MԴ=žI,:o&N'Kù\vRmJ雵֫AWic H@" !: Cé||]k-Ha oݜ:y F())u]aG7*JV@J415p=sZH!=!DRʯvɱh~V\}v/GKY$n]"X"}t@ xS76^[bw4dsce)2dU0 CkMa-U5tvLƀ~mlMwfGE/-]7XAƟ`׮g ewxwC4\[~7@O-Q( a*XGƒ{ ՟}$_y3tĐƤatgvێi|K=uVyrŲlLӪuܿzwk$m87k( `múcE)"@rK( z4$D; 2kW=Xb$V[Ru819קR~qloѱDyįݎ*mxw]y5e4K@ЃI0A D@"BDk_)N\8͜9dz"fK0zɿvM /.:2O{ Nb=M=7>??Zuo32 DLD@D| &+֎C #B8ַ`bOb $D#ͮҪtx]%`ES`Ru[=¾!@Od37LJ0!OIR4m]GZRJu$‡c=%~s@6SKy?CeIh:[vR@Lh | (BhAMy=݃  G"'wzn޺~8ԽSh ~T*A:xR[ܹ?X[uKL_=fDȊ؂p0}7=D$Ekq!/t.*2ʼnDbŞ}DijYaȲ(""6HA;:LzxQ‘(SQQ}*PL*fc\s `/d'QXW, e`#kPGZuŞuO{{wm[&NBTiiI0bukcA9<4@SӊH*؎4U/'2U5.(9JuDfrޱtycU%j(:RUbArLֺN)udA':uGQN"-"Is.*+k@ `Ojs@yU/ H:l;@yyTn}_yw!VkRJ4P)~y#)r,D =ě"Q]ci'%HI4ZL0"MJy 8A{ aN<8D"1#IJi >XjX֔#@>-{vN!8tRݻ^)N_╗FJEk]CT՟ YP:_|H1@ CBk]yKYp|og?*dGvzنzӴzjֺNkC~AbZƷ`.H)=!QͷVTT(| u78y֮}|[8-Vjp%2JPk[}ԉaH8Wpqhwr:vWª<}l77_~{s۴V+RCģ%WRZ\AqHifɤL36: #F:p]Bq/z{0CU6ݳEv_^k7'>sq*+kH%a`0ԣisqにtү04gVgW΂iJiS'3w.w}l6MC2uԯ|>JF5`fV5m`Y**Db1FKNttu]4ccsQNnex/87+}xaUW9y>ͯ骵G{䩓Գ3+vU}~jJ.NFRD7<aJDB1#ҳgSb,+CS?/ VG J?|?,2#M9}B)MiE+G`-wo߫V`fio(}S^4e~V4bHOYb"b#E)dda:'?}׮4繏`{7Z"uny-?ǹ;0MKx{:_pÚmFמ:F " .LFQLG)Q8qN q¯¯3wOvxDb\. BKD9_NN &L:4D{mm o^tֽ:q!ƥ}K+<"m78N< ywsard5+вz~mnG)=}lYݧNj'QJS{S :UYS-952?&O-:W}(!6Mk4+>A>j+i|<<|;ر^߉=HE|V#F)Emm#}/"y GII웻Jі94+v뾧xu~5C95~ūH>c@덉pʃ1/4-A2G%7>m;–Y,cyyaln" ?ƻ!ʪ<{~h~i y.zZB̃/,雋SiC/JFMmBH&&FAbϓO^tubbb_hZ{_QZ-sύodFgO(6]TJA˯#`۶ɟ( %$&+V'~hiYy>922 Wp74Zkq+Ovn錄c>8~GqܲcWꂎz@"1A.}T)uiW4="jJ2W7mU/N0gcqܗOO}?9/wìXžΏ0 >֩(V^Rh32!Hj5`;O28؇2#ݕf3 ?sJd8NJ@7O0 b־?lldщ̡&|9C.8RTWwxWy46ah嘦mh٤&l zCy!PY?: CJyв]dm4ǜҐR޻RլhX{FƯanшQI@x' ao(kUUuxW_Ñ줮[w8 FRJ(8˼)_mQ _!RJhm=!cVmm ?sFOnll6Qk}alY}; "baӌ~M0w,Ggw2W:G/k2%R,_=u`WU R.9T"v,<\Ik޽/2110Ӿxc0gyC&Ny޽JҢrV6N ``یeA16"J³+Rj*;BϜkZPJaÍ<Jyw:NP8/D$ 011z֊Ⱳ3ι֘k1V_"h!JPIΣ'ɜ* aEAd:ݺ>y<}Lp&PlRfTb1]o .2EW\ͮ]38؋rTJsǏP@芎sF\> P^+dYJLbJ C-xϐn> ι$nj,;Ǖa FU *择|h ~izť3ᤓ`K'-f tL7JK+vf2)V'-sFuB4i+m+@My=O҈0"|Yxoj,3]:cо3 $#uŘ%Y"y죯LebqtҢVzq¼X)~>4L׶m~[1_k?kxֺQ`\ |ٛY4Ѯr!)N9{56(iNq}O()Em]=F&u?$HypWUeB\k]JɩSع9 Zqg4ZĊo oMcjZBU]B\TUd34ݝ~:7ڶSUsB0Z3srx 7`:5xcx !qZA!;%͚7&P H<WL!džOb5kF)xor^aujƍ7 Ǡ8/p^(L>ὴ-B,{ۇWzֺ^k]3\EE@7>lYBȝR.oHnXO/}sB|.i@ɥDB4tcm,@ӣgdtJ!lH$_vN166L__'Z)y&kH;:,Y7=J 9cG) V\hjiE;gya~%ks_nC~Er er)muuMg2;֫R)Md) ,¶ 2-wr#F7<-BBn~_(o=KO㭇[Xv eN_SMgSҐ BS헃D%g_N:/pe -wkG*9yYSZS.9cREL !k}<4_Xs#FmҶ:7R$i,fi!~' # !6/S6y@kZkZcX)%5V4P]VGYq%H1!;e1MV<!ϐHO021Dp= HMs~~a)ަu7G^];git!Frl]H/L$=AeUvZE4P\.,xi {-~p?2b#amXAHq)MWǾI_r`S Hz&|{ +ʖ_= (YS(_g0a03M`I&'9vl?MM+m~}*xT۲(fY*V4x@29s{DaY"toGNTO+xCAO~4Ϳ;p`Ѫ:>Ҵ7K 3}+0 387x\)a"/E>qpWB=1 ¨"MP(\xp߫́A3+J] n[ʼnӼaTbZUWb={~2ooKױӰp(CS\S筐R*JغV&&"FA}J>G֐p1ٸbk7 ŘH$JoN <8s^yk_[;gy-;߉DV{c B yce% aJhDȶ 2IdйIB/^n0tNtџdcKj4϶v~- CBcgqx9= PJ) dMsjpYB] GD4RDWX +h{y`,3ꊕ$`zj*N^TP4L:Iz9~6s) Ga:?y*J~?OrMwP\](21sZUD ?ܟQ5Q%ggW6QdO+\@ ̪X'GxN @'4=ˋ+*VwN ne_|(/BDfj5(Dq<*tNt1х!MV.C0 32b#?n0pzj#!38}޴o1KovCJ`8ŗ_"]] rDUy޲@ Ȗ-;xџ'^Y`zEd?0„ DAL18IS]VGq\4o !swV7ˣι%4FѮ~}6)OgS[~Q vcYbL!wG3 7띸*E Pql8=jT\꘿I(z<[6OrR8ºC~ډ]=rNl[g|v TMTղb-o}OrP^Q]<98S¤!k)G(Vkwyqyr޽Nv`N/e p/~NAOk \I:G6]4+K;j$R:Mi #*[AȚT,ʰ,;N{HZTGMoּy) ]%dHء9Պ䠬|<45,\=[bƟ8QXeB3- &dҩ^{>/86bXmZ]]yޚN[(WAHL$YAgDKp=5GHjU&99v簪C0vygln*P)9^͞}lMuiH!̍#DoRBn9l@ xA/_v=ȺT{7Yt2N"4!YN`ae >Q<XMydEB`VU}u]嫇.%e^ánE87Mu\t`cP=AD/G)sI"@MP;)]%fH9'FNsj1pVhY&9=0pfuJ&gޤx+k:!r˭wkl03׼Ku C &ѓYt{.O.zҏ z}/tf_wEp2gvX)GN#I ݭ߽v/ .& и(ZF{e"=V!{zW`, ]+LGz"(UJp|j( #V4, 8B 0 9OkRrlɱl94)'VH9=9W|>PS['G(*I1==C<5"Pg+x'K5EMd؞Af8lG ?D FtoB[je?{k3zQ vZ;%Ɠ,]E>KZ+T/ EJxOZ1i #T<@ I}q9/t'zi(EMqw`mYkU6;[t4DPeckeM;H}_g pMww}k6#H㶏+b8雡Sxp)&C $@'b,fPߑt$RbJ'vznuS ~8='72_`{q纶|Q)Xk}cPz9p7O:'|G~8wx(a 0QCko|0ASD>Ip=4Q, d|F8RcU"/KM opKle M3#i0c%<7׿p&pZq[TR"BpqauIp$ 8~Ĩ!8Սx\ւdT>>Z40ks7 z2IQ}ItԀ<-%S⍤};zIb$I 5K}Q͙D8UguWE$Jh )cu4N tZl+[]M4k8֦Zeq֮M7uIqG 1==tLtR,ƜSrHYt&QP윯Lg' I,3@P'}'R˪e/%-Auv·ñ\> vDJzlӾNv5:|K/Jb6KI9)Zh*ZAi`?S {aiVDԲuy5W7pWeQJk֤#5&V<̺@/GH?^τZL|IJNvI:'P=Ϛt"¨=cud S Q.Ki0 !cJy;LJR;G{BJy޺[^8fK6)=yʊ+(k|&xQ2`L?Ȓ2@Mf 0C`6-%pKpm')c$׻K5[J*U[/#hH!6acB JA _|uMvDyk y)6OPYjœ50VT K}cǻP[ $:]4MEA.y)|B)cf-A?(e|lɉ#P9V)[9t.EiQPDѠ3ϴ;E:+Օ t ȥ~|_N2,ZJLt4! %ա]u {+=p.GhNcŞQI?Nd'yeh n7zi1DB)1S | S#ًZs2|Ɛy$F SxeX{7Vl.Src3E℃Q>b6G ўYCmtկ~=K0f(=LrAS GN'ɹ9<\!a`)֕y[uՍ[09` 9 +57ts6}b4{oqd+J5fa/,97J#6yν99mRWxJyѡyu_TJc`~W>l^q#Ts#2"nD1%fS)FU w{ܯ R{ ˎ󅃏џDsZSQS;LV;7 Od1&1n$ N /.q3~eNɪ]E#oM~}v֯FڦwyZ=<<>Xo稯lfMFV6p02|*=tV!c~]fa5Y^Q_WN|Vs 0ҘދU97OI'N2'8N֭fgg-}V%y]U4 峧p*91#9U kCac_AFңĪy뚇Y_AiuYyTTYЗ-(!JFLt›17uTozc. S;7A&&<ԋ5y;Ro+:' *eYJkWR[@F %SHWP 72k4 qLd'J "zB6{AC0ƁA6U.'F3:Ȅ(9ΜL;D]m8ڥ9}dU "v!;*13Rg^fJyShyy5auA?ɩGHRjo^]׽S)Fm\toy 4WQS@mE#%5ʈfFYDX ~D5Ϡ9tE9So_aU4?Ѽm%&c{n>.KW1Tlb}:j uGi(JgcYj0qn+>) %\!4{LaJso d||u//P_y7iRJ߬nHOy) l+@$($VFIQ9%EeKʈU. ia&FY̒mZ=)+qqoQn >L!qCiDB;Y<%} OgBxB!ØuG)WG9y(Ą{_yesuZmZZey'Wg#C~1Cev@0D $a@˲(.._GimA:uyw֬%;@!JkQVM_Ow:P.s\)ot- ˹"`B,e CRtaEUP<0'}r3[>?G8xU~Nqu;Wm8\RIkբ^5@k+5(By'L&'gBJ3ݶ!/㮻w҅ yqPWUg<e"Qy*167΃sJ\oz]T*UQ<\FԎ`HaNmڜ6DysCask8wP8y9``GJ9lF\G g's Nn͵MLN֪u$| /|7=]O)6s !ĴAKh]q_ap $HH'\1jB^s\|- W1:=6lJBqjY^LsPk""`]w)󭃈,(HC ?䔨Y$Sʣ{4Z+0NvQkhol6C.婧/u]FwiVjZka&%6\F*Ny#8O,22+|Db~d ~Çwc N:FuuCe&oZ(l;@ee-+Wn`44AMK➝2BRՈt7g*1gph9N) *"TF*R(#'88pm=}X]u[i7bEc|\~EMn}P瘊J)K.0i1M6=7'_\kaZ(Th{K*GJyytw"IO-PWJk)..axӝ47"89Cc7ĐBiZx 7m!fy|ϿF9CbȩV 9V-՛^pV̌ɄS#Bv4-@]Vxt-Z, &ֺ*diؠ2^VXbs֔Ìl.jQ]Y[47gj=幽ex)A0ip׳ W2[ᎇhuE^~q흙L} #-b۸oFJ_QP3r6jr+"nfzRJTUqoaۍ /$d8Mx'ݓ= OՃ| )$2mcM*cЙj}f };n YG w0Ia!1Q.oYfr]DyISaP}"dIӗթO67jqR ҊƐƈaɤGG|h;t]䗖oSv|iZqX)oalv;۩meEJ\!8=$4QU4Xo&VEĊ YS^E#d,yX_> ۘ-e\ "Wa6uLĜZi`aD9.% w~mB(02G[6y.773a7 /=o7D)$Z 66 $bY^\CuP. (x'"J60׿Y:Oi;F{w佩b+\Yi`TDWa~|VH)8q/=9!g߆2Y)?ND)%?Ǐ`k/sn:;O299yB=a[Ng 3˲N}vLNy;*?x?~L&=xyӴ~}q{qE*IQ^^ͧvü{Huu=R|>JyUlZV, B~/YF!Y\u_ݼF{_C)LD]m {H 0ihhadd nUkf3oٺCvE\)QJi+֥@tDJkB$1!Đr0XQ|q?d2) Ӣ_}qv-< FŊ߫%roppVBwü~JidY4:}L6M7f٬F "?71<2#?Jyy4뷢<_a7_=Q E=S1И/9{+93֮E{ǂw{))?maÆm(uLE#lïZ  ~d];+]h j?!|$F}*"4(v'8s<ŏUkm7^7no1w2ؗ}TrͿEk>p'8OB7d7R(A 9.*Mi^ͳ; eeUwS+C)uO@ =Sy]` }l8^ZzRXj[^iUɺ$tj))<sbDJfg=Pk_{xaKo1:-uyG0M ԃ\0Lvuy'ȱc2Ji AdyVgVh!{]/&}}ċJ#%d !+87<;qN޼Nفl|1N:8ya  8}k¾+-$4FiZYÔXk*I&'@iI99)HSh4+2G:tGhS^繿 Kتm0 вDk}֚+QT4;sC}rՅE,8CX-e~>G&'9xpW,%Fh,Ry56Y–hW-(v_,? ; qrBk4-V7HQ;ˇ^Gv1JVV%,ik;D_W!))+BoS4QsTM;gt+ndS-~:11Sgv!0qRVh!"Ȋ(̦Yl.]PQWgٳE'`%W1{ndΗBk|Ž7ʒR~,lnoa&:ü$ 3<a[CBݮwt"o\ePJ=Hz"_c^Z.#ˆ*x z̝grY]tdkP*:97YľXyBkD4N.C_[;F9`8& !AMO c `@BA& Ost\-\NX+Xp < !bj3C&QL+*&kAQ=04}cC!9~820G'PC9xa!w&bo_1 Sw"ܱ V )Yl3+ס2KoXOx]"`^WOy :3GO0g;%Yv㐫(R/r (s } u B &FeYZh0y> =2<Ϟc/ -u= c&׭,.0"g"7 6T!vl#sc>{u/Oh Bᾈ)۴74]x7 gMӒ"d]U)}" v4co[ ɡs 5Gg=XR14?5A}D "b{0$L .\4y{_fe:kVS\\O]c^W52LSBDM! C3Dhr̦RtArx4&agaN3Cf<Ԉp4~ B'"1@.b_/xQ} _߃҉/gٓ2Qkqp0շpZ2fԫYz< 4L.Cyυι1t@鎫Fe sYfsF}^ V}N<_`p)alٶ "(XEAVZ<)2},:Ir*#m_YӼ R%a||EƼIJ,,+f"96r/}0jE/)s)cjW#w'Sʯ5<66lj$a~3Kʛy 2:cZ:Yh))+a߭K::N,Q F'qB]={.]h85C9cr=}*rk?vwV렵ٸW Rs%}rNAkDv|uFLBkWY YkX מ|)1!$#3%y?pF<@<Rr0}: }\J [5FRxY<9"SQdE(Q*Qʻ)q1E0B_O24[U'],lOb ]~WjHޏTQ5Syu wq)xnw8~)c 쫬gٲߠ H% k5dƝk> kEj,0% b"vi2Wس_CuK)K{n|>t{P1򨾜j>'kEkƗBg*H%'_aY6Bn!TL&ɌOb{c`'d^{t\i^[uɐ[}q0lM˕G:‚4kb祔c^:?bpg… +37stH:0}en6x˟%/<]BL&* 5&fK9Mq)/iyqtA%kUe[ڛKN]Ě^,"`/ s[EQQm?|XJ߅92m]G.E΃ח U*Cn.j_)Tѧj̿30ڇ!A0=͜ar I3$C^-9#|pk!)?7.x9 @OO;WƝZBFU keZ75F6Tc6"ZȚs2y/1 ʵ:u4xa`C>6Rb/Yм)^=+~uRd`/|_8xbB0?Ft||Z\##|K 0>>zxv8۴吅q 8ĥ)"6>~\8:qM}#͚'ĉ#p\׶ l#bA?)|g g9|8jP(cr,BwV (WliVxxᡁ@0Okn;ɥh$_ckCgriv}>=wGzβ KkBɛ[˪ !J)h&k2%07δt}!d<9;I&0wV/ v 0<H}L&8ob%Hi|޶o&h1L|u֦y~󛱢8fٲUsւ)0oiFx2}X[zVYr_;N(w]_4B@OanC?gĦx>мgx>ΛToZoOMp>40>V Oy V9iq!4 LN,ˢu{jsz]|"R޻&'ƚ{53ўFu(<٪9:΋]B;)B>1::8;~)Yt|0(pw2N%&X,URBK)3\zz&}ax4;ǟ(tLNg{N|Ǽ\G#C9g$^\}p?556]/RP.90 k,U8/u776s ʪ_01چ|\N 0VV*3H鴃J7iI!wG_^ypl}r*jɤSR 5QN@ iZ#1ٰy;_\3\BQQ x:WJv츟ٯ$"@6 S#qe딇(/P( Dy~TOϻ<4:-+F`0||;Xl-"uw$Цi󼕝mKʩorz"mϺ$F:~E'ҐvD\y?Rr8_He@ e~O,T.(ފR*cY^m|cVR[8 JҡSm!ΆԨb)RHG{?MpqrmN>߶Y)\p,d#xۆWY*,l6]v0h15M˙MS8+EdI='LBJIH7_9{Caз*Lq,dt >+~ّeʏ?xԕ4bBAŚjﵫ!'\Ը$WNvKO}ӽmSşذqsOy?\[,d@'73'j%kOe`1.g2"e =YIzS2|zŐƄa\U,dP;jhhhaxǶ?КZ՚.q SE+XrbOu%\GتX(H,N^~]JyEZQKceTQ]VGYqnah;y$cQahT&QPZ*iZ8UQQM.qo/T\7X"u?Mttl2Xq(IoW{R^ ux*SYJ! 4S.Jy~ BROS[V|žKNɛP(L6V^|cR7i7nZW1Fd@ Ara{詑|(T*dN]Ko?s=@ |_EvF]׍kR)eBJc" MUUbY6`~V޴dJKß&~'d3i WWWWWW
Current Directory: /usr/lib64/python3.11/zoneinfo
Viewing File: /usr/lib64/python3.11/zoneinfo/_zoneinfo.py
import bisect import calendar import collections import functools import re import weakref from datetime import datetime, timedelta, tzinfo from . import _common, _tzpath EPOCH = datetime(1970, 1, 1) EPOCHORDINAL = datetime(1970, 1, 1).toordinal() # It is relatively expensive to construct new timedelta objects, and in most # cases we're looking at the same deltas, like integer numbers of hours, etc. # To improve speed and memory use, we'll keep a dictionary with references # to the ones we've already used so far. # # Loading every time zone in the 2020a version of the time zone database # requires 447 timedeltas, which requires approximately the amount of space # that ZoneInfo("America/New_York") with 236 transitions takes up, so we will # set the cache size to 512 so that in the common case we always get cache # hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts # of memory. @functools.lru_cache(maxsize=512) def _load_timedelta(seconds): return timedelta(seconds=seconds) class ZoneInfo(tzinfo): _strong_cache_size = 8 _strong_cache = collections.OrderedDict() _weak_cache = weakref.WeakValueDictionary() __module__ = "zoneinfo" def __init_subclass__(cls): cls._strong_cache = collections.OrderedDict() cls._weak_cache = weakref.WeakValueDictionary() def __new__(cls, key): instance = cls._weak_cache.get(key, None) if instance is None: instance = cls._weak_cache.setdefault(key, cls._new_instance(key)) instance._from_cache = True # Update the "strong" cache cls._strong_cache[key] = cls._strong_cache.pop(key, instance) if len(cls._strong_cache) > cls._strong_cache_size: cls._strong_cache.popitem(last=False) return instance @classmethod def no_cache(cls, key): obj = cls._new_instance(key) obj._from_cache = False return obj @classmethod def _new_instance(cls, key): obj = super().__new__(cls) obj._key = key obj._file_path = obj._find_tzfile(key) if obj._file_path is not None: file_obj = open(obj._file_path, "rb") else: file_obj = _common.load_tzdata(key) with file_obj as f: obj._load_file(f) return obj @classmethod def from_file(cls, fobj, /, key=None): obj = super().__new__(cls) obj._key = key obj._file_path = None obj._load_file(fobj) obj._file_repr = repr(fobj) # Disable pickling for objects created from files obj.__reduce__ = obj._file_reduce return obj @classmethod def clear_cache(cls, *, only_keys=None): if only_keys is not None: for key in only_keys: cls._weak_cache.pop(key, None) cls._strong_cache.pop(key, None) else: cls._weak_cache.clear() cls._strong_cache.clear() @property def key(self): return self._key def utcoffset(self, dt): return self._find_trans(dt).utcoff def dst(self, dt): return self._find_trans(dt).dstoff def tzname(self, dt): return self._find_trans(dt).tzname def fromutc(self, dt): """Convert from datetime in UTC to datetime in local time""" if not isinstance(dt, datetime): raise TypeError("fromutc() requires a datetime argument") if dt.tzinfo is not self: raise ValueError("dt.tzinfo is not self") timestamp = self._get_local_timestamp(dt) num_trans = len(self._trans_utc) if num_trans >= 1 and timestamp < self._trans_utc[0]: tti = self._tti_before fold = 0 elif ( num_trans == 0 or timestamp > self._trans_utc[-1] ) and not isinstance(self._tz_after, _ttinfo): tti, fold = self._tz_after.get_trans_info_fromutc( timestamp, dt.year ) elif num_trans == 0: tti = self._tz_after fold = 0 else: idx = bisect.bisect_right(self._trans_utc, timestamp) if num_trans > 1 and timestamp >= self._trans_utc[1]: tti_prev, tti = self._ttinfos[idx - 2 : idx] elif timestamp > self._trans_utc[-1]: tti_prev = self._ttinfos[-1] tti = self._tz_after else: tti_prev = self._tti_before tti = self._ttinfos[0] # Detect fold shift = tti_prev.utcoff - tti.utcoff fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1] dt += tti.utcoff if fold: return dt.replace(fold=1) else: return dt def _find_trans(self, dt): if dt is None: if self._fixed_offset: return self._tz_after else: return _NO_TTINFO ts = self._get_local_timestamp(dt) lt = self._trans_local[dt.fold] num_trans = len(lt) if num_trans and ts < lt[0]: return self._tti_before elif not num_trans or ts > lt[-1]: if isinstance(self._tz_after, _TZStr): return self._tz_after.get_trans_info(ts, dt.year, dt.fold) else: return self._tz_after else: # idx is the transition that occurs after this timestamp, so we # subtract off 1 to get the current ttinfo idx = bisect.bisect_right(lt, ts) - 1 assert idx >= 0 return self._ttinfos[idx] def _get_local_timestamp(self, dt): return ( (dt.toordinal() - EPOCHORDINAL) * 86400 + dt.hour * 3600 + dt.minute * 60 + dt.second ) def __str__(self): if self._key is not None: return f"{self._key}" else: return repr(self) def __repr__(self): if self._key is not None: return f"{self.__class__.__name__}(key={self._key!r})" else: return f"{self.__class__.__name__}.from_file({self._file_repr})" def __reduce__(self): return (self.__class__._unpickle, (self._key, self._from_cache)) def _file_reduce(self): import pickle raise pickle.PicklingError( "Cannot pickle a ZoneInfo file created from a file stream." ) @classmethod def _unpickle(cls, key, from_cache, /): if from_cache: return cls(key) else: return cls.no_cache(key) def _find_tzfile(self, key): return _tzpath.find_tzfile(key) def _load_file(self, fobj): # Retrieve all the data as it exists in the zoneinfo file trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data( fobj ) # Infer the DST offsets (needed for .dst()) from the data dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst) # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time" trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff) # Construct `_ttinfo` objects for each transition in the file _ttinfo_list = [ _ttinfo( _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname ) for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr) ] self._trans_utc = trans_utc self._trans_local = trans_local self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx] # Find the first non-DST transition for i in range(len(isdst)): if not isdst[i]: self._tti_before = _ttinfo_list[i] break else: if self._ttinfos: self._tti_before = self._ttinfos[0] else: self._tti_before = None # Set the "fallback" time zone if tz_str is not None and tz_str != b"": self._tz_after = _parse_tz_str(tz_str.decode()) else: if not self._ttinfos and not _ttinfo_list: raise ValueError("No time zone information found.") if self._ttinfos: self._tz_after = self._ttinfos[-1] else: self._tz_after = _ttinfo_list[-1] # Determine if this is a "fixed offset" zone, meaning that the output # of the utcoffset, dst and tzname functions does not depend on the # specific datetime passed. # # We make three simplifying assumptions here: # # 1. If _tz_after is not a _ttinfo, it has transitions that might # actually occur (it is possible to construct TZ strings that # specify STD and DST but no transitions ever occur, such as # AAA0BBB,0/0,J365/25). # 2. If _ttinfo_list contains more than one _ttinfo object, the objects # represent different offsets. # 3. _ttinfo_list contains no unused _ttinfos (in which case an # otherwise fixed-offset zone with extra _ttinfos defined may # appear to *not* be a fixed offset zone). # # Violations to these assumptions would be fairly exotic, and exotic # zones should almost certainly not be used with datetime.time (the # only thing that would be affected by this). if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo): self._fixed_offset = False elif not _ttinfo_list: self._fixed_offset = True else: self._fixed_offset = _ttinfo_list[0] == self._tz_after @staticmethod def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts): # Now we must transform our ttis and abbrs into `_ttinfo` objects, # but there is an issue: .dst() must return a timedelta with the # difference between utcoffset() and the "standard" offset, but # the "base offset" and "DST offset" are not encoded in the file; # we can infer what they are from the isdst flag, but it is not # sufficient to just look at the last standard offset, because # occasionally countries will shift both DST offset and base offset. typecnt = len(isdsts) dstoffs = [0] * typecnt # Provisionally assign all to 0. dst_cnt = sum(isdsts) dst_found = 0 for i in range(1, len(trans_idx)): if dst_cnt == dst_found: break idx = trans_idx[i] dst = isdsts[idx] # We're only going to look at daylight saving time if not dst: continue # Skip any offsets that have already been assigned if dstoffs[idx] != 0: continue dstoff = 0 utcoff = utcoffsets[idx] comp_idx = trans_idx[i - 1] if not isdsts[comp_idx]: dstoff = utcoff - utcoffsets[comp_idx] if not dstoff and idx < (typecnt - 1): comp_idx = trans_idx[i + 1] # If the following transition is also DST and we couldn't # find the DST offset by this point, we're going to have to # skip it and hope this transition gets assigned later if isdsts[comp_idx]: continue dstoff = utcoff - utcoffsets[comp_idx] if dstoff: dst_found += 1 dstoffs[idx] = dstoff else: # If we didn't find a valid value for a given index, we'll end up # with dstoff = 0 for something where `isdst=1`. This is obviously # wrong - one hour will be a much better guess than 0 for idx in range(typecnt): if not dstoffs[idx] and isdsts[idx]: dstoffs[idx] = 3600 return dstoffs @staticmethod def _ts_to_local(trans_idx, trans_list_utc, utcoffsets): """Generate number of seconds since 1970 *in the local time*. This is necessary to easily find the transition times in local time""" if not trans_list_utc: return [[], []] # Start with the timestamps and modify in-place trans_list_wall = [list(trans_list_utc), list(trans_list_utc)] if len(utcoffsets) > 1: offset_0 = utcoffsets[0] offset_1 = utcoffsets[trans_idx[0]] if offset_1 > offset_0: offset_1, offset_0 = offset_0, offset_1 else: offset_0 = offset_1 = utcoffsets[0] trans_list_wall[0][0] += offset_0 trans_list_wall[1][0] += offset_1 for i in range(1, len(trans_idx)): offset_0 = utcoffsets[trans_idx[i - 1]] offset_1 = utcoffsets[trans_idx[i]] if offset_1 > offset_0: offset_1, offset_0 = offset_0, offset_1 trans_list_wall[0][i] += offset_0 trans_list_wall[1][i] += offset_1 return trans_list_wall class _ttinfo: __slots__ = ["utcoff", "dstoff", "tzname"] def __init__(self, utcoff, dstoff, tzname): self.utcoff = utcoff self.dstoff = dstoff self.tzname = tzname def __eq__(self, other): return ( self.utcoff == other.utcoff and self.dstoff == other.dstoff and self.tzname == other.tzname ) def __repr__(self): # pragma: nocover return ( f"{self.__class__.__name__}" + f"({self.utcoff}, {self.dstoff}, {self.tzname})" ) _NO_TTINFO = _ttinfo(None, None, None) class _TZStr: __slots__ = ( "std", "dst", "start", "end", "get_trans_info", "get_trans_info_fromutc", "dst_diff", ) def __init__( self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None ): self.dst_diff = dst_offset - std_offset std_offset = _load_timedelta(std_offset) self.std = _ttinfo( utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr ) self.start = start self.end = end dst_offset = _load_timedelta(dst_offset) delta = _load_timedelta(self.dst_diff) self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr) # These are assertions because the constructor should only be called # by functions that would fail before passing start or end assert start is not None, "No transition start specified" assert end is not None, "No transition end specified" self.get_trans_info = self._get_trans_info self.get_trans_info_fromutc = self._get_trans_info_fromutc def transitions(self, year): start = self.start.year_to_epoch(year) end = self.end.year_to_epoch(year) return start, end def _get_trans_info(self, ts, year, fold): """Get the information about the current transition - tti""" start, end = self.transitions(year) # With fold = 0, the period (denominated in local time) with the # smaller offset starts at the end of the gap and ends at the end of # the fold; with fold = 1, it runs from the start of the gap to the # beginning of the fold. # # So in order to determine the DST boundaries we need to know both # the fold and whether DST is positive or negative (rare), and it # turns out that this boils down to fold XOR is_positive. if fold == (self.dst_diff >= 0): end -= self.dst_diff else: start += self.dst_diff if start < end: isdst = start <= ts < end else: isdst = not (end <= ts < start) return self.dst if isdst else self.std def _get_trans_info_fromutc(self, ts, year): start, end = self.transitions(year) start -= self.std.utcoff.total_seconds() end -= self.dst.utcoff.total_seconds() if start < end: isdst = start <= ts < end else: isdst = not (end <= ts < start) # For positive DST, the ambiguous period is one dst_diff after the end # of DST; for negative DST, the ambiguous period is one dst_diff before # the start of DST. if self.dst_diff > 0: ambig_start = end ambig_end = end + self.dst_diff else: ambig_start = start ambig_end = start - self.dst_diff fold = ambig_start <= ts < ambig_end return (self.dst if isdst else self.std, fold) def _post_epoch_days_before_year(year): """Get the number of days between 1970-01-01 and YEAR-01-01""" y = year - 1 return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL class _DayOffset: __slots__ = ["d", "julian", "hour", "minute", "second"] def __init__(self, d, julian, hour=2, minute=0, second=0): min_day = 0 + julian # convert bool to int if not min_day <= d <= 365: raise ValueError(f"d must be in [{min_day}, 365], not: {d}") self.d = d self.julian = julian self.hour = hour self.minute = minute self.second = second def year_to_epoch(self, year): days_before_year = _post_epoch_days_before_year(year) d = self.d if self.julian and d >= 59 and calendar.isleap(year): d += 1 epoch = (days_before_year + d) * 86400 epoch += self.hour * 3600 + self.minute * 60 + self.second return epoch class _CalendarOffset: __slots__ = ["m", "w", "d", "hour", "minute", "second"] _DAYS_BEFORE_MONTH = ( -1, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, ) def __init__(self, m, w, d, hour=2, minute=0, second=0): if not 1 <= m <= 12: raise ValueError("m must be in [1, 12]") if not 1 <= w <= 5: raise ValueError("w must be in [1, 5]") if not 0 <= d <= 6: raise ValueError("d must be in [0, 6]") self.m = m self.w = w self.d = d self.hour = hour self.minute = minute self.second = second @classmethod def _ymd2ord(cls, year, month, day): return ( _post_epoch_days_before_year(year) + cls._DAYS_BEFORE_MONTH[month] + (month > 2 and calendar.isleap(year)) + day ) # TODO: These are not actually epoch dates as they are expressed in local time def year_to_epoch(self, year): """Calculates the datetime of the occurrence from the year""" # We know year and month, we need to convert w, d into day of month # # Week 1 is the first week in which day `d` (where 0 = Sunday) appears. # Week 5 represents the last occurrence of day `d`, so we need to know # the range of the month. first_day, days_in_month = calendar.monthrange(year, self.m) # This equation seems magical, so I'll break it down: # 1. calendar says 0 = Monday, POSIX says 0 = Sunday # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday, # which is still equivalent because this math is mod 7 # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need # to do anything to adjust negative numbers. # 3. Add 1 because month days are a 1-based index. month_day = (self.d - (first_day + 1)) % 7 + 1 # Now use a 0-based index version of `w` to calculate the w-th # occurrence of `d` month_day += (self.w - 1) * 7 # month_day will only be > days_in_month if w was 5, and `w` means # "last occurrence of `d`", so now we just check if we over-shot the # end of the month and if so knock off 1 week. if month_day > days_in_month: month_day -= 7 ordinal = self._ymd2ord(year, self.m, month_day) epoch = ordinal * 86400 epoch += self.hour * 3600 + self.minute * 60 + self.second return epoch def _parse_tz_str(tz_str): # The tz string has the format: # # std[offset[dst[offset],start[/time],end[/time]]] # # std and dst must be 3 or more characters long and must not contain # a leading colon, embedded digits, commas, nor a plus or minus signs; # The spaces between "std" and "offset" are only for display and are # not actually present in the string. # # The format of the offset is ``[+|-]hh[:mm[:ss]]`` offset_str, *start_end_str = tz_str.split(",", 1) parser_re = re.compile( r""" (?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+-]+>) (?: (?P<stdoff>[+-]?\d{1,3}(?::\d{2}(?::\d{2})?)?) (?: (?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+-]+>) (?P<dstoff>[+-]?\d{1,3}(?::\d{2}(?::\d{2})?)?)? )? # dst )? # stdoff """, re.ASCII|re.VERBOSE ) m = parser_re.fullmatch(offset_str) if m is None: raise ValueError(f"{tz_str} is not a valid TZ string") std_abbr = m.group("std") dst_abbr = m.group("dst") dst_offset = None std_abbr = std_abbr.strip("<>") if dst_abbr: dst_abbr = dst_abbr.strip("<>") if std_offset := m.group("stdoff"): try: std_offset = _parse_tz_delta(std_offset) except ValueError as e: raise ValueError(f"Invalid STD offset in {tz_str}") from e else: std_offset = 0 if dst_abbr is not None: if dst_offset := m.group("dstoff"): try: dst_offset = _parse_tz_delta(dst_offset) except ValueError as e: raise ValueError(f"Invalid DST offset in {tz_str}") from e else: dst_offset = std_offset + 3600 if not start_end_str: raise ValueError(f"Missing transition rules: {tz_str}") start_end_strs = start_end_str[0].split(",", 1) try: start, end = (_parse_dst_start_end(x) for x in start_end_strs) except ValueError as e: raise ValueError(f"Invalid TZ string: {tz_str}") from e return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end) elif start_end_str: raise ValueError(f"Transition rule present without DST: {tz_str}") else: # This is a static ttinfo, don't return _TZStr return _ttinfo( _load_timedelta(std_offset), _load_timedelta(0), std_abbr ) def _parse_dst_start_end(dststr): date, *time = dststr.split("/", 1) type = date[:1] if type == "M": n_is_julian = False m = re.fullmatch(r"M(\d{1,2})\.(\d).(\d)", date, re.ASCII) if m is None: raise ValueError(f"Invalid dst start/end date: {dststr}") date_offset = tuple(map(int, m.groups())) offset = _CalendarOffset(*date_offset) else: if type == "J": n_is_julian = True date = date[1:] else: n_is_julian = False doy = int(date) offset = _DayOffset(doy, n_is_julian) if time: offset.hour, offset.minute, offset.second = _parse_transition_time(time[0]) return offset def _parse_transition_time(time_str): match = re.fullmatch( r"(?P<sign>[+-])?(?P<h>\d{1,3})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?", time_str, re.ASCII ) if match is None: raise ValueError(f"Invalid time: {time_str}") h, m, s = (int(v or 0) for v in match.group("h", "m", "s")) if h > 167: raise ValueError( f"Hour must be in [0, 167]: {time_str}" ) if match.group("sign") == "-": h, m, s = -h, -m, -s return h, m, s def _parse_tz_delta(tz_delta): match = re.fullmatch( r"(?P<sign>[+-])?(?P<h>\d{1,3})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?", tz_delta, re.ASCII ) # Anything passed to this function should already have hit an equivalent # regular expression to find the section to parse. assert match is not None, tz_delta h, m, s = (int(v or 0) for v in match.group("h", "m", "s")) total = h * 3600 + m * 60 + s if h > 24: raise ValueError( f"Offset hours must be in [0, 24]: {tz_delta}" ) # Yes, +5 maps to an offset of -5h if match.group("sign") != "-": total = -total return total