PNG  IHDRX cHRMz&u0`:pQ<bKGD pHYsodtIME MeqIDATxw]Wug^Qd˶ 6`!N:!@xI~)%7%@Bh&`lnjVF29gΨ4E$|>cɚ{gk= %,a KX%,a KX%,a KX%,a KX%,a KX%,a KX%, b` ǟzeאfp]<!SJmɤY޲ڿ,%c ~ع9VH.!Ͳz&QynֺTkRR.BLHi٪:l;@(!MԴ=žI,:o&N'Kù\vRmJ雵֫AWic H@" !: Cé||]k-Ha oݜ:y F())u]aG7*JV@J415p=sZH!=!DRʯvɱh~V\}v/GKY$n]"X"}t@ xS76^[bw4dsce)2dU0 CkMa-U5tvLƀ~mlMwfGE/-]7XAƟ`׮g ewxwC4\[~7@O-Q( a*XGƒ{ ՟}$_y3tĐƤatgvێi|K=uVyrŲlLӪuܿzwk$m87k( `múcE)"@rK( z4$D; 2kW=Xb$V[Ru819קR~qloѱDyįݎ*mxw]y5e4K@ЃI0A D@"BDk_)N\8͜9dz"fK0zɿvM /.:2O{ Nb=M=7>??Zuo32 DLD@D| &+֎C #B8ַ`bOb $D#ͮҪtx]%`ES`Ru[=¾!@Od37LJ0!OIR4m]GZRJu$‡c=%~s@6SKy?CeIh:[vR@Lh | (BhAMy=݃  G"'wzn޺~8ԽSh ~T*A:xR[ܹ?X[uKL_=fDȊ؂p0}7=D$Ekq!/t.*2ʼnDbŞ}DijYaȲ(""6HA;:LzxQ‘(SQQ}*PL*fc\s `/d'QXW, e`#kPGZuŞuO{{wm[&NBTiiI0bukcA9<4@SӊH*؎4U/'2U5.(9JuDfrޱtycU%j(:RUbArLֺN)udA':uGQN"-"Is.*+k@ `Ojs@yU/ H:l;@yyTn}_yw!VkRJ4P)~y#)r,D =ě"Q]ci'%HI4ZL0"MJy 8A{ aN<8D"1#IJi >XjX֔#@>-{vN!8tRݻ^)N_╗FJEk]CT՟ YP:_|H1@ CBk]yKYp|og?*dGvzنzӴzjֺNkC~AbZƷ`.H)=!QͷVTT(| u78y֮}|[8-Vjp%2JPk[}ԉaH8Wpqhwr:vWª<}l77_~{s۴V+RCģ%WRZ\AqHifɤL36: #F:p]Bq/z{0CU6ݳEv_^k7'>sq*+kH%a`0ԣisqにtү04gVgW΂iJiS'3w.w}l6MC2uԯ|>JF5`fV5m`Y**Db1FKNttu]4ccsQNnex/87+}xaUW9y>ͯ骵G{䩓Գ3+vU}~jJ.NFRD7<aJDB1#ҳgSb,+CS?/ VG J?|?,2#M9}B)MiE+G`-wo߫V`fio(}S^4e~V4bHOYb"b#E)dda:'?}׮4繏`{7Z"uny-?ǹ;0MKx{:_pÚmFמ:F " .LFQLG)Q8qN q¯¯3wOvxDb\. BKD9_NN &L:4D{mm o^tֽ:q!ƥ}K+<"m78N< ywsard5+вz~mnG)=}lYݧNj'QJS{S :UYS-952?&O-:W}(!6Mk4+>A>j+i|<<|;ر^߉=HE|V#F)Emm#}/"y GII웻Jі94+v뾧xu~5C95~ūH>c@덉pʃ1/4-A2G%7>m;–Y,cyyaln" ?ƻ!ʪ<{~h~i y.zZB̃/,雋SiC/JFMmBH&&FAbϓO^tubbb_hZ{_QZ-sύodFgO(6]TJA˯#`۶ɟ( %$&+V'~hiYy>922 Wp74Zkq+Ovn錄c>8~GqܲcWꂎz@"1A.}T)uiW4="jJ2W7mU/N0gcqܗOO}?9/wìXžΏ0 >֩(V^Rh32!Hj5`;O28؇2#ݕf3 ?sJd8NJ@7O0 b־?lldщ̡&|9C.8RTWwxWy46ah嘦mh٤&l zCy!PY?: CJyв]dm4ǜҐR޻RլhX{FƯanшQI@x' ao(kUUuxW_Ñ줮[w8 FRJ(8˼)_mQ _!RJhm=!cVmm ?sFOnll6Qk}alY}; "baӌ~M0w,Ggw2W:G/k2%R,_=u`WU R.9T"v,<\Ik޽/2110Ӿxc0gyC&Ny޽JҢrV6N ``یeA16"J³+Rj*;BϜkZPJaÍ<Jyw:NP8/D$ 011z֊Ⱳ3ι֘k1V_"h!JPIΣ'ɜ* aEAd:ݺ>y<}Lp&PlRfTb1]o .2EW\ͮ]38؋rTJsǏP@芎sF\> P^+dYJLbJ C-xϐn> ι$nj,;Ǖa FU *择|h ~izť3ᤓ`K'-f tL7JK+vf2)V'-sFuB4i+m+@My=O҈0"|Yxoj,3]:cо3 $#uŘ%Y"y죯LebqtҢVzq¼X)~>4L׶m~[1_k?kxֺQ`\ |ٛY4Ѯr!)N9{56(iNq}O()Em]=F&u?$HypWUeB\k]JɩSع9 Zqg4ZĊo oMcjZBU]B\TUd34ݝ~:7ڶSUsB0Z3srx 7`:5xcx !qZA!;%͚7&P H<WL!džOb5kF)xor^aujƍ7 Ǡ8/p^(L>ὴ-B,{ۇWzֺ^k]3\EE@7>lYBȝR.oHnXO/}sB|.i@ɥDB4tcm,@ӣgdtJ!lH$_vN166L__'Z)y&kH;:,Y7=J 9cG) V\hjiE;gya~%ks_nC~Er er)muuMg2;֫R)Md) ,¶ 2-wr#F7<-BBn~_(o=KO㭇[Xv eN_SMgSҐ BS헃D%g_N:/pe -wkG*9yYSZS.9cREL !k}<4_Xs#FmҶ:7R$i,fi!~' # !6/S6y@kZkZcX)%5V4P]VGYq%H1!;e1MV<!ϐHO021Dp= HMs~~a)ަu7G^];git!Frl]H/L$=AeUvZE4P\.,xi {-~p?2b#amXAHq)MWǾI_r`S Hz&|{ +ʖ_= (YS(_g0a03M`I&'9vl?MM+m~}*xT۲(fY*V4x@29s{DaY"toGNTO+xCAO~4Ϳ;p`Ѫ:>Ҵ7K 3}+0 387x\)a"/E>qpWB=1 ¨"MP(\xp߫́A3+J] n[ʼnӼaTbZUWb={~2ooKױӰp(CS\S筐R*JغV&&"FA}J>G֐p1ٸbk7 ŘH$JoN <8s^yk_[;gy-;߉DV{c B yce% aJhDȶ 2IdйIB/^n0tNtџdcKj4϶v~- CBcgqx9= PJ) dMsjpYB] GD4RDWX +h{y`,3ꊕ$`zj*N^TP4L:Iz9~6s) Ga:?y*J~?OrMwP\](21sZUD ?ܟQ5Q%ggW6QdO+\@ ̪X'GxN @'4=ˋ+*VwN ne_|(/BDfj5(Dq<*tNt1х!MV.C0 32b#?n0pzj#!38}޴o1KovCJ`8ŗ_"]] rDUy޲@ Ȗ-;xџ'^Y`zEd?0„ DAL18IS]VGq\4o !swV7ˣι%4FѮ~}6)OgS[~Q vcYbL!wG3 7띸*E Pql8=jT\꘿I(z<[6OrR8ºC~ډ]=rNl[g|v TMTղb-o}OrP^Q]<98S¤!k)G(Vkwyqyr޽Nv`N/e p/~NAOk \I:G6]4+K;j$R:Mi #*[AȚT,ʰ,;N{HZTGMoּy) ]%dHء9Պ䠬|<45,\=[bƟ8QXeB3- &dҩ^{>/86bXmZ]]yޚN[(WAHL$YAgDKp=5GHjU&99v簪C0vygln*P)9^͞}lMuiH!̍#DoRBn9l@ xA/_v=ȺT{7Yt2N"4!YN`ae >Q<XMydEB`VU}u]嫇.%e^ánE87Mu\t`cP=AD/G)sI"@MP;)]%fH9'FNsj1pVhY&9=0pfuJ&gޤx+k:!r˭wkl03׼Ku C &ѓYt{.O.zҏ z}/tf_wEp2gvX)GN#I ݭ߽v/ .& и(ZF{e"=V!{zW`, ]+LGz"(UJp|j( #V4, 8B 0 9OkRrlɱl94)'VH9=9W|>PS['G(*I1==C<5"Pg+x'K5EMd؞Af8lG ?D FtoB[je?{k3zQ vZ;%Ɠ,]E>KZ+T/ EJxOZ1i #T<@ I}q9/t'zi(EMqw`mYkU6;[t4DPeckeM;H}_g pMww}k6#H㶏+b8雡Sxp)&C $@'b,fPߑt$RbJ'vznuS ~8='72_`{q纶|Q)Xk}cPz9p7O:'|G~8wx(a 0QCko|0ASD>Ip=4Q, d|F8RcU"/KM opKle M3#i0c%<7׿p&pZq[TR"BpqauIp$ 8~Ĩ!8Սx\ւdT>>Z40ks7 z2IQ}ItԀ<-%S⍤};zIb$I 5K}Q͙D8UguWE$Jh )cu4N tZl+[]M4k8֦Zeq֮M7uIqG 1==tLtR,ƜSrHYt&QP윯Lg' I,3@P'}'R˪e/%-Auv·ñ\> vDJzlӾNv5:|K/Jb6KI9)Zh*ZAi`?S {aiVDԲuy5W7pWeQJk֤#5&V<̺@/GH?^τZL|IJNvI:'P=Ϛt"¨=cud S Q.Ki0 !cJy;LJR;G{BJy޺[^8fK6)=yʊ+(k|&xQ2`L?Ȓ2@Mf 0C`6-%pKpm')c$׻K5[J*U[/#hH!6acB JA _|uMvDyk y)6OPYjœ50VT K}cǻP[ $:]4MEA.y)|B)cf-A?(e|lɉ#P9V)[9t.EiQPDѠ3ϴ;E:+Օ t ȥ~|_N2,ZJLt4! %ա]u {+=p.GhNcŞQI?Nd'yeh n7zi1DB)1S | S#ًZs2|Ɛy$F SxeX{7Vl.Src3E℃Q>b6G ўYCmtկ~=K0f(=LrAS GN'ɹ9<\!a`)֕y[uՍ[09` 9 +57ts6}b4{oqd+J5fa/,97J#6yν99mRWxJyѡyu_TJc`~W>l^q#Ts#2"nD1%fS)FU w{ܯ R{ ˎ󅃏џDsZSQS;LV;7 Od1&1n$ N /.q3~eNɪ]E#oM~}v֯FڦwyZ=<<>Xo稯lfMFV6p02|*=tV!c~]fa5Y^Q_WN|Vs 0ҘދU97OI'N2'8N֭fgg-}V%y]U4 峧p*91#9U kCac_AFңĪy뚇Y_AiuYyTTYЗ-(!JFLt›17uTozc. S;7A&&<ԋ5y;Ro+:' *eYJkWR[@F %SHWP 72k4 qLd'J "zB6{AC0ƁA6U.'F3:Ȅ(9ΜL;D]m8ڥ9}dU "v!;*13Rg^fJyShyy5auA?ɩGHRjo^]׽S)Fm\toy 4WQS@mE#%5ʈfFYDX ~D5Ϡ9tE9So_aU4?Ѽm%&c{n>.KW1Tlb}:j uGi(JgcYj0qn+>) %\!4{LaJso d||u//P_y7iRJ߬nHOy) l+@$($VFIQ9%EeKʈU. ia&FY̒mZ=)+qqoQn >L!qCiDB;Y<%} OgBxB!ØuG)WG9y(Ą{_yesuZmZZey'Wg#C~1Cev@0D $a@˲(.._GimA:uyw֬%;@!JkQVM_Ow:P.s\)ot- ˹"`B,e CRtaEUP<0'}r3[>?G8xU~Nqu;Wm8\RIkբ^5@k+5(By'L&'gBJ3ݶ!/㮻w҅ yqPWUg<e"Qy*167΃sJ\oz]T*UQ<\FԎ`HaNmڜ6DysCask8wP8y9``GJ9lF\G g's Nn͵MLN֪u$| /|7=]O)6s !ĴAKh]q_ap $HH'\1jB^s\|- W1:=6lJBqjY^LsPk""`]w)󭃈,(HC ?䔨Y$Sʣ{4Z+0NvQkhol6C.婧/u]FwiVjZka&%6\F*Ny#8O,22+|Db~d ~Çwc N:FuuCe&oZ(l;@ee-+Wn`44AMK➝2BRՈt7g*1gph9N) *"TF*R(#'88pm=}X]u[i7bEc|\~EMn}P瘊J)K.0i1M6=7'_\kaZ(Th{K*GJyytw"IO-PWJk)..axӝ47"89Cc7ĐBiZx 7m!fy|ϿF9CbȩV 9V-՛^pV̌ɄS#Bv4-@]Vxt-Z, &ֺ*diؠ2^VXbs֔Ìl.jQ]Y[47gj=幽ex)A0ip׳ W2[ᎇhuE^~q흙L} #-b۸oFJ_QP3r6jr+"nfzRJTUqoaۍ /$d8Mx'ݓ= OՃ| )$2mcM*cЙj}f };n YG w0Ia!1Q.oYfr]DyISaP}"dIӗթO67jqR ҊƐƈaɤGG|h;t]䗖oSv|iZqX)oalv;۩meEJ\!8=$4QU4Xo&VEĊ YS^E#d,yX_> ۘ-e\ "Wa6uLĜZi`aD9.% w~mB(02G[6y.773a7 /=o7D)$Z 66 $bY^\CuP. (x'"J60׿Y:Oi;F{w佩b+\Yi`TDWa~|VH)8q/=9!g߆2Y)?ND)%?Ǐ`k/sn:;O299yB=a[Ng 3˲N}vLNy;*?x?~L&=xyӴ~}q{qE*IQ^^ͧvü{Huu=R|>JyUlZV, B~/YF!Y\u_ݼF{_C)LD]m {H 0ihhadd nUkf3oٺCvE\)QJi+֥@tDJkB$1!Đr0XQ|q?d2) Ӣ_}qv-< FŊ߫%roppVBwü~JidY4:}L6M7f٬F "?71<2#?Jyy4뷢<_a7_=Q E=S1И/9{+93֮E{ǂw{))?maÆm(uLE#lïZ  ~d];+]h j?!|$F}*"4(v'8s<ŏUkm7^7no1w2ؗ}TrͿEk>p'8OB7d7R(A 9.*Mi^ͳ; eeUwS+C)uO@ =Sy]` }l8^ZzRXj[^iUɺ$tj))<sbDJfg=Pk_{xaKo1:-uyG0M ԃ\0Lvuy'ȱc2Ji AdyVgVh!{]/&}}ċJ#%d !+87<;qN޼Nفl|1N:8ya  8}k¾+-$4FiZYÔXk*I&'@iI99)HSh4+2G:tGhS^繿 Kتm0 вDk}֚+QT4;sC}rՅE,8CX-e~>G&'9xpW,%Fh,Ry56Y–hW-(v_,? ; qrBk4-V7HQ;ˇ^Gv1JVV%,ik;D_W!))+BoS4QsTM;gt+ndS-~:11Sgv!0qRVh!"Ȋ(̦Yl.]PQWgٳE'`%W1{ndΗBk|Ž7ʒR~,lnoa&:ü$ 3<a[CBݮwt"o\ePJ=Hz"_c^Z.#ˆ*x z̝grY]tdkP*:97YľXyBkD4N.C_[;F9`8& !AMO c `@BA& Ost\-\NX+Xp < !bj3C&QL+*&kAQ=04}cC!9~820G'PC9xa!w&bo_1 Sw"ܱ V )Yl3+ס2KoXOx]"`^WOy :3GO0g;%Yv㐫(R/r (s } u B &FeYZh0y> =2<Ϟc/ -u= c&׭,.0"g"7 6T!vl#sc>{u/Oh Bᾈ)۴74]x7 gMӒ"d]U)}" v4co[ ɡs 5Gg=XR14?5A}D "b{0$L .\4y{_fe:kVS\\O]c^W52LSBDM! C3Dhr̦RtArx4&agaN3Cf<Ԉp4~ B'"1@.b_/xQ} _߃҉/gٓ2Qkqp0շpZ2fԫYz< 4L.Cyυι1t@鎫Fe sYfsF}^ V}N<_`p)alٶ "(XEAVZ<)2},:Ir*#m_YӼ R%a||EƼIJ,,+f"96r/}0jE/)s)cjW#w'Sʯ5<66lj$a~3Kʛy 2:cZ:Yh))+a߭K::N,Q F'qB]={.]h85C9cr=}*rk?vwV렵ٸW Rs%}rNAkDv|uFLBkWY YkX מ|)1!$#3%y?pF<@<Rr0}: }\J [5FRxY<9"SQdE(Q*Qʻ)q1E0B_O24[U'],lOb ]~WjHޏTQ5Syu wq)xnw8~)c 쫬gٲߠ H% k5dƝk> kEj,0% b"vi2Wس_CuK)K{n|>t{P1򨾜j>'kEkƗBg*H%'_aY6Bn!TL&ɌOb{c`'d^{t\i^[uɐ[}q0lM˕G:‚4kb祔c^:?bpg… +37stH:0}en6x˟%/<]BL&* 5&fK9Mq)/iyqtA%kUe[ڛKN]Ě^,"`/ s[EQQm?|XJ߅92m]G.E΃ח U*Cn.j_)Tѧj̿30ڇ!A0=͜ar I3$C^-9#|pk!)?7.x9 @OO;WƝZBFU keZ75F6Tc6"ZȚs2y/1 ʵ:u4xa`C>6Rb/Yм)^=+~uRd`/|_8xbB0?Ft||Z\##|K 0>>zxv8۴吅q 8ĥ)"6>~\8:qM}#͚'ĉ#p\׶ l#bA?)|g g9|8jP(cr,BwV (WliVxxᡁ@0Okn;ɥh$_ckCgriv}>=wGzβ KkBɛ[˪ !J)h&k2%07δt}!d<9;I&0wV/ v 0<H}L&8ob%Hi|޶o&h1L|u֦y~󛱢8fٲUsւ)0oiFx2}X[zVYr_;N(w]_4B@OanC?gĦx>мgx>ΛToZoOMp>40>V Oy V9iq!4 LN,ˢu{jsz]|"R޻&'ƚ{53ўFu(<٪9:΋]B;)B>1::8;~)Yt|0(pw2N%&X,URBK)3\zz&}ax4;ǟ(tLNg{N|Ǽ\G#C9g$^\}p?556]/RP.90 k,U8/u776s ʪ_01چ|\N 0VV*3H鴃J7iI!wG_^ypl}r*jɤSR 5QN@ iZ#1ٰy;_\3\BQQ x:WJv츟ٯ$"@6 S#qe딇(/P( Dy~TOϻ<4:-+F`0||;Xl-"uw$Цi󼕝mKʩorz"mϺ$F:~E'ҐvD\y?Rr8_He@ e~O,T.(ފR*cY^m|cVR[8 JҡSm!ΆԨb)RHG{?MpqrmN>߶Y)\p,d#xۆWY*,l6]v0h15M˙MS8+EdI='LBJIH7_9{Caз*Lq,dt >+~ّeʏ?xԕ4bBAŚjﵫ!'\Ը$WNvKO}ӽmSşذqsOy?\[,d@'73'j%kOe`1.g2"e =YIzS2|zŐƄa\U,dP;jhhhaxǶ?КZ՚.q SE+XrbOu%\GتX(H,N^~]JyEZQKceTQ]VGYqnah;y$cQahT&QPZ*iZ8UQQM.qo/T\7X"u?Mttl2Xq(IoW{R^ ux*SYJ! 4S.Jy~ BROS[V|žKNɛP(L6V^|cR7i7nZW1Fd@ Ara{詑|(T*dN]Ko?s=@ |_EvF]׍kR)eBJc" MUUbY6`~V޴dJKß&~'d3i WWWWWW
Current Directory: /opt/saltstack/salt/lib/python3.10/site-packages/salt/fileserver
Viewing File: /opt/saltstack/salt/lib/python3.10/site-packages/salt/fileserver/__init__.py
""" File server pluggable modules and generic backend functions """ import errno import fnmatch import logging import os import re import time from collections.abc import Sequence import salt.loader import salt.utils.files import salt.utils.path import salt.utils.url import salt.utils.versions from salt.utils.args import get_function_argspec as _argspec from salt.utils.decorators import ensure_unicode_args log = logging.getLogger(__name__) def _unlock_cache(w_lock): """ Unlock a FS file/dir based lock """ if not os.path.exists(w_lock): return try: if os.path.isdir(w_lock): os.rmdir(w_lock) elif os.path.isfile(w_lock): os.unlink(w_lock) except OSError as exc: log.trace("Error removing lockfile %s: %s", w_lock, exc) def _lock_cache(w_lock): try: os.mkdir(w_lock) except OSError as exc: if exc.errno != errno.EEXIST: raise return False else: log.trace("Lockfile %s created", w_lock) return True def wait_lock(lk_fn, dest, wait_timeout=0): """ If the write lock is there, check to see if the file is actually being written. If there is no change in the file size after a short sleep, remove the lock and move forward. """ if not os.path.exists(lk_fn): return False if not os.path.exists(dest): # The dest is not here, sleep for a bit, if the dest is not here yet # kill the lockfile and start the write time.sleep(1) if not os.path.isfile(dest): _unlock_cache(lk_fn) return False timeout = None if wait_timeout: timeout = time.time() + wait_timeout # There is a lock file, the dest is there, stat the dest, sleep and check # that the dest is being written, if it is not being written kill the lock # file and continue. Also check if the lock file is gone. s_count = 0 s_size = os.stat(dest).st_size while True: time.sleep(1) if not os.path.exists(lk_fn): return False size = os.stat(dest).st_size if size == s_size: s_count += 1 if s_count >= 3: # The file is not being written to, kill the lock and proceed _unlock_cache(lk_fn) return False else: s_size = size if timeout: if time.time() > timeout: raise ValueError( "Timeout({}s) for {} (lock: {}) elapsed".format( wait_timeout, dest, lk_fn ) ) return False def check_file_list_cache(opts, form, list_cache, w_lock): """ Checks the cache file to see if there is a new enough file list cache, and returns the match (if found, along with booleans used by the fileserver backend to determine if the cache needs to be refreshed/written). """ refresh_cache = False save_cache = True wait_lock(w_lock, list_cache, 5 * 60) if not os.path.isfile(list_cache) and _lock_cache(w_lock): refresh_cache = True else: attempt = 0 while attempt < 11: try: if os.path.exists(w_lock): # wait for a filelist lock for max 15min wait_lock(w_lock, list_cache, 15 * 60) if os.path.exists(list_cache): # calculate filelist age is possible cache_stat = os.stat(list_cache) # st_time can have a greater precision than time, removing # float precision makes sure age will never be a negative # number. current_time = int(time.time()) file_mtime = int(cache_stat.st_mtime) if file_mtime > current_time: log.debug( "Cache file modified time is in the future, ignoring. " "file=%s mtime=%s current_time=%s", list_cache, current_time, file_mtime, ) age = -1 else: age = current_time - file_mtime else: # if filelist does not exists yet, mark it as expired age = opts.get("fileserver_list_cache_time", 20) + 1 if age < 0: # Cache is from the future! Warn and mark cache invalid. log.warning("The file list_cache was created in the future!") if 0 <= age < opts.get("fileserver_list_cache_time", 20): # Young enough! Load this sucker up! with salt.utils.files.fopen(list_cache, "rb") as fp_: log.debug( "Returning file list from cache: age=%s cache_time=%s %s", age, opts.get("fileserver_list_cache_time", 20), list_cache, ) return salt.payload.load(fp_).get(form, []), False, False elif _lock_cache(w_lock): # Set the w_lock and go refresh_cache = True break except Exception: # pylint: disable=broad-except time.sleep(0.2) attempt += 1 continue if attempt > 10: save_cache = False refresh_cache = True return None, refresh_cache, save_cache def write_file_list_cache(opts, data, list_cache, w_lock): """ Checks the cache file to see if there is a new enough file list cache, and returns the match (if found, along with booleans used by the fileserver backend to determine if the cache needs to be refreshed/written). """ with salt.utils.files.fopen(list_cache, "w+b") as fp_: fp_.write(salt.payload.dumps(data)) _unlock_cache(w_lock) log.trace("Lockfile %s removed", w_lock) def check_env_cache(opts, env_cache): """ Returns cached env names, if present. Otherwise returns None. """ if not os.path.isfile(env_cache): return None try: with salt.utils.files.fopen(env_cache, "rb") as fp_: log.trace("Returning env cache data from %s", env_cache) return salt.payload.load(fp_) except OSError: pass return None def generate_mtime_map(opts, path_map): """ Generate a dict of filename -> mtime """ file_map = {} for saltenv, path_list in path_map.items(): for path in path_list: for directory, _, filenames in salt.utils.path.os_walk(path): for item in filenames: try: file_path = os.path.join(directory, item) # Don't walk any directories that match # file_ignore_regex or glob if is_file_ignored(opts, file_path): continue file_map[file_path] = os.path.getmtime(file_path) except OSError: # skip dangling symlinks log.info( "Failed to get mtime on %s, dangling symlink?", file_path ) continue return file_map def diff_mtime_map(map1, map2): """ Is there a change to the mtime map? return a boolean """ # check if the mtimes are the same if sorted(map1) != sorted(map2): return True # map1 and map2 are guaranteed to have same keys, # so compare mtimes for filename, mtime in map1.items(): if map2[filename] != mtime: return True # we made it, that means we have no changes return False def reap_fileserver_cache_dir(cache_base, find_func): """ Remove unused cache items assuming the cache directory follows a directory convention: cache_base -> saltenv -> relpath """ for saltenv in os.listdir(cache_base): env_base = os.path.join(cache_base, saltenv) for root, dirs, files in salt.utils.path.os_walk(env_base): # if we have an empty directory, lets cleanup # This will only remove the directory on the second time # "_reap_cache" is called (which is intentional) if not dirs and not files: # only remove if empty directory is older than 60s if time.time() - os.path.getctime(root) > 60: os.rmdir(root) continue # if not, lets check the files in the directory for file_ in files: file_path = os.path.join(root, file_) file_rel_path = os.path.relpath(file_path, env_base) try: filename, _, hash_type = file_rel_path.rsplit(".", 2) except ValueError: log.warning( "Found invalid hash file [%s] when attempting to reap " "cache directory", file_, ) continue # do we have the file? ret = find_func(filename, saltenv=saltenv) # if we don't actually have the file, lets clean up the cache # object if ret["path"] == "": os.unlink(file_path) def is_file_ignored(opts, fname): """ If file_ignore_regex or file_ignore_glob were given in config, compare the given file path against all of them and return True on the first match. """ if opts["file_ignore_regex"]: for regex in opts["file_ignore_regex"]: if re.search(regex, fname): log.debug("File matching file_ignore_regex. Skipping: %s", fname) return True if opts["file_ignore_glob"]: for glob in opts["file_ignore_glob"]: if fnmatch.fnmatch(fname, glob): log.debug("File matching file_ignore_glob. Skipping: %s", fname) return True return False def clear_lock(clear_func, role, remote=None, lock_type="update"): """ Function to allow non-fileserver functions to clear update locks clear_func A function reference. This function will be run (with the ``remote`` param as an argument) to clear the lock, and must return a 2-tuple of lists, one containing messages describing successfully cleared locks, and one containing messages describing errors encountered. role What type of lock is being cleared (gitfs, git_pillar, etc.). Used solely for logging purposes. remote Optional string which should be used in ``func`` to pattern match so that a subset of remotes can be targeted. lock_type : update Which type of lock to clear Returns the return data from ``clear_func``. """ msg = f"Clearing {lock_type} lock for {role} remotes" if remote: msg += f" matching {remote}" log.debug(msg) return clear_func(remote=remote, lock_type=lock_type) class Fileserver: """ Create a fileserver wrapper object that wraps the fileserver functions and iterates over them to execute the desired function within the scope of the desired fileserver backend. """ def __init__(self, opts): self.opts = opts self.servers = salt.loader.fileserver(opts, opts["fileserver_backend"]) def backends(self, back=None): """ Return the backend list """ if not back: back = self.opts["fileserver_backend"] else: if not isinstance(back, list): try: back = back.split(",") except AttributeError: back = str(back).split(",") if isinstance(back, Sequence): # The test suite uses an ImmutableList type (based on # collections.abc.Sequence) for lists, which breaks this function in # the test suite. This normalizes the value from the opts into a # list if it is based on collections.abc.Sequence. back = list(back) ret = [] if not isinstance(back, list): return ret # Avoid error logging when performing lookups in the LazyDict by # instead doing the membership check on the result of a call to its # .keys() attribute rather than on the LazyDict itself. server_funcs = self.servers.keys() try: subtract_only = all(x.startswith("-") for x in back) except AttributeError: pass else: if subtract_only: # Only subtracting backends from enabled ones ret = self.opts["fileserver_backend"] for sub in back: if f"{sub[1:]}.envs" in server_funcs: ret.remove(sub[1:]) return ret for sub in back: if f"{sub}.envs" in server_funcs: ret.append(sub) return ret def master_opts(self, load): """ Simplify master opts """ return self.opts def update_opts(self): # This fix func monkey patching by pillar for name, func in self.servers.items(): try: if "__opts__" in func.__globals__: func.__globals__["__opts__"].update(self.opts) except AttributeError: pass def clear_cache(self, back=None): """ Clear the cache of all of the fileserver backends that support the clear_cache function or the named backend(s) only. """ back = self.backends(back) cleared = [] errors = [] for fsb in back: fstr = f"{fsb}.clear_cache" if fstr in self.servers: log.debug("Clearing %s fileserver cache", fsb) failed = self.servers[fstr]() if failed: errors.extend(failed) else: cleared.append( f"The {fsb} fileserver cache was successfully cleared" ) return cleared, errors def lock(self, back=None, remote=None): """ ``remote`` can either be a dictionary containing repo configuration information, or a pattern. If the latter, then remotes for which the URL matches the pattern will be locked. """ back = self.backends(back) locked = [] errors = [] for fsb in back: fstr = f"{fsb}.lock" if fstr in self.servers: msg = f"Setting update lock for {fsb} remotes" if remote: if not isinstance(remote, str): errors.append(f"Badly formatted remote pattern '{remote}'") continue else: msg += f" matching {remote}" log.debug(msg) good, bad = self.servers[fstr](remote=remote) locked.extend(good) errors.extend(bad) return locked, errors def clear_lock(self, back=None, remote=None): """ Clear the update lock for the enabled fileserver backends back Only clear the update lock for the specified backend(s). The default is to clear the lock for all enabled backends remote If specified, then any remotes which contain the passed string will have their lock cleared. """ back = self.backends(back) cleared = [] errors = [] for fsb in back: fstr = f"{fsb}.clear_lock" if fstr in self.servers: good, bad = clear_lock(self.servers[fstr], fsb, remote=remote) cleared.extend(good) errors.extend(bad) return cleared, errors def update(self, back=None, **kwargs): """ Update all of the enabled fileserver backends which support the update function """ back = self.backends(back) for fsb in back: fstr = f"{fsb}.update" if fstr in self.servers: log.debug("Updating %s fileserver cache", fsb) self.servers[fstr](**kwargs) def update_intervals(self, back=None): """ Return the update intervals for all of the enabled fileserver backends which support variable update intervals. """ back = self.backends(back) ret = {} for fsb in back: fstr = f"{fsb}.update_intervals" if fstr in self.servers: ret[fsb] = self.servers[fstr]() return ret def envs(self, back=None, sources=False): """ Return the environments for the named backend or all backends """ back = self.backends(back) ret = set() if sources: ret = {} for fsb in back: fstr = f"{fsb}.envs" kwargs = ( {"ignore_cache": True} if "ignore_cache" in _argspec(self.servers[fstr]).args and self.opts["__role"] == "minion" else {} ) if sources: ret[fsb] = self.servers[fstr](**kwargs) else: ret.update(self.servers[fstr](**kwargs)) if sources: return ret return list(ret) def file_envs(self, load=None): """ Return environments for all backends for requests from fileclient """ if load is None: load = {} load.pop("cmd", None) return self.envs(back=load.get("back", None), sources=load.get("sources", None)) def init(self, back=None): """ Initialize the backend, only do so if the fs supports an init function """ back = self.backends(back) for fsb in back: fstr = f"{fsb}.init" if fstr in self.servers: self.servers[fstr]() def _find_file(self, load): """ Convenience function for calls made using the RemoteClient """ path = load.get("path") if not path: return {"path": "", "rel": ""} tgt_env = load.get("saltenv", "base") return self.find_file(path, tgt_env) def file_find(self, load): """ Convenience function for calls made using the LocalClient """ path = load.get("path") if not path: return {"path": "", "rel": ""} tgt_env = load.get("saltenv", "base") return self.find_file(path, tgt_env) def find_file(self, path, saltenv, back=None): """ Find the path and return the fnd structure, this structure is passed to other backend interfaces. """ path = salt.utils.stringutils.to_unicode(path) saltenv = salt.utils.stringutils.to_unicode(saltenv) back = self.backends(back) kwargs = {} if salt.utils.url.is_escaped(path): # don't attempt to find URL query arguments in the path path = salt.utils.url.unescape(path) else: if "?" in path: hcomps = path.split("?") path = hcomps[0] comps = hcomps[1].split("&") for comp in comps: if "=" not in comp: # Invalid option, skip it continue args = comp.split("=", 1) kwargs[args[0]] = args[1] fnd = {"path": "", "rel": ""} if os.path.isabs(path) or "../" in path: return fnd if "env" in kwargs: # "env" is not supported; Use "saltenv". kwargs.pop("env") if "saltenv" in kwargs: saltenv = kwargs.pop("saltenv") if not isinstance(saltenv, str): saltenv = str(saltenv) for fsb in back: fstr = f"{fsb}.find_file" if fstr in self.servers: fnd = self.servers[fstr](path, saltenv, **kwargs) if fnd.get("path"): fnd["back"] = fsb return fnd return fnd def serve_file(self, load): """ Serve up a chunk of a file """ ret = {"data": "", "dest": ""} if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") if "path" not in load or "loc" not in load or "saltenv" not in load: return ret if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) fnd = self.find_file(load["path"], load["saltenv"]) if not fnd.get("back"): return ret fstr = "{}.serve_file".format(fnd["back"]) if fstr in self.servers: return self.servers[fstr](load, fnd) return ret def __file_hash_and_stat(self, load): """ Common code for hashing and stating files """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") if "path" not in load or "saltenv" not in load: return "", None if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) fnd = self.find_file( salt.utils.stringutils.to_unicode(load["path"]), load["saltenv"] ) if not fnd.get("back"): return "", None stat_result = fnd.get("stat", None) fstr = "{}.file_hash".format(fnd["back"]) if fstr in self.servers: return self.servers[fstr](load, fnd), stat_result return "", None def file_hash(self, load): """ Return the hash of a given file """ try: return self.__file_hash_and_stat(load)[0] except (IndexError, TypeError): return "" def file_hash_and_stat(self, load): """ Return the hash and stat result of a given file """ try: return self.__file_hash_and_stat(load) except (IndexError, TypeError): return "", None def clear_file_list_cache(self, load): """ Deletes the file_lists cache files """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") saltenv = load.get("saltenv", []) if saltenv is not None: if not isinstance(saltenv, list): try: saltenv = [x.strip() for x in saltenv.split(",")] except AttributeError: saltenv = [x.strip() for x in str(saltenv).split(",")] for idx, val in enumerate(saltenv): if not isinstance(val, str): saltenv[idx] = str(val) ret = {} fsb = self.backends(load.pop("fsbackend", None)) list_cachedir = os.path.join(self.opts["cachedir"], "file_lists") try: file_list_backends = os.listdir(list_cachedir) except OSError as exc: if exc.errno == errno.ENOENT: log.debug("No file list caches found") return {} else: log.error( "Failed to get list of saltenvs for which the master has " "cached file lists: %s", exc, ) for back in file_list_backends: try: cache_files = os.listdir(os.path.join(list_cachedir, back)) except OSError as exc: log.error( "Failed to find file list caches for saltenv '%s': %s", back, exc ) continue for cache_file in cache_files: try: cache_saltenv, extension = cache_file.rsplit(".", 1) except ValueError: # Filename has no dot in it. Not a cache file, ignore. continue if extension != "p": # Filename does not end in ".p". Not a cache file, ignore. continue elif back not in fsb or ( saltenv is not None and cache_saltenv not in saltenv ): log.debug( "Skipping %s file list cache for saltenv '%s'", back, cache_saltenv, ) continue try: os.remove(os.path.join(list_cachedir, back, cache_file)) except OSError as exc: if exc.errno != errno.ENOENT: log.error("Failed to remove %s: %s", exc.filename, exc.strerror) else: ret.setdefault(back, []).append(cache_saltenv) log.debug( "Removed %s file list cache for saltenv '%s'", cache_saltenv, back, ) # Ensure reproducible ordering of returns for key in ret: ret[key].sort() return ret @ensure_unicode_args def file_list(self, load): """ Return a list of files from the dominant environment """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") ret = set() if "saltenv" not in load: return [] if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) for fsb in self.backends(load.pop("fsbackend", None)): fstr = f"{fsb}.file_list" if fstr in self.servers: ret.update(self.servers[fstr](load)) # some *fs do not handle prefix. Ensure it is filtered prefix = load.get("prefix", "").strip("/") if prefix != "": ret = [f for f in ret if f.startswith(prefix)] return sorted(ret) @ensure_unicode_args def file_list_emptydirs(self, load): """ List all emptydirs in the given environment """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") ret = set() if "saltenv" not in load: return [] if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) for fsb in self.backends(None): fstr = f"{fsb}.file_list_emptydirs" if fstr in self.servers: ret.update(self.servers[fstr](load)) # some *fs do not handle prefix. Ensure it is filtered prefix = load.get("prefix", "").strip("/") if prefix != "": ret = [f for f in ret if f.startswith(prefix)] return sorted(ret) @ensure_unicode_args def dir_list(self, load): """ List all directories in the given environment """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") ret = set() if "saltenv" not in load: return [] if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) for fsb in self.backends(load.pop("fsbackend", None)): fstr = f"{fsb}.dir_list" if fstr in self.servers: ret.update(self.servers[fstr](load)) # some *fs do not handle prefix. Ensure it is filtered prefix = load.get("prefix", "").strip("/") if prefix != "": ret = [f for f in ret if f.startswith(prefix)] return sorted(ret) @ensure_unicode_args def symlink_list(self, load): """ Return a list of symlinked files and dirs """ if "env" in load: # "env" is not supported; Use "saltenv". load.pop("env") ret = {} if "saltenv" not in load: return {} if not isinstance(load["saltenv"], str): load["saltenv"] = str(load["saltenv"]) for fsb in self.backends(load.pop("fsbackend", None)): symlstr = f"{fsb}.symlink_list" if symlstr in self.servers: ret = self.servers[symlstr](load) # some *fs do not handle prefix. Ensure it is filtered prefix = load.get("prefix", "").strip("/") if prefix != "": ret = {x: y for x, y in ret.items() if x.startswith(prefix)} return ret class FSChan: """ A class that mimics the transport channels allowing for local access to to the fileserver class class structure """ def __init__(self, opts, **kwargs): self.opts = opts self.kwargs = kwargs self.fs = Fileserver(self.opts) self.fs.init() if self.opts.get("file_client", "remote") == "local": if "__fs_update" not in self.opts: self.fs.update() self.opts["__fs_update"] = True else: self.fs.update() self.cmd_stub = {"master_tops": {}, "ext_nodes": {}} def send( self, load, tries=None, timeout=None, raw=False ): # pylint: disable=unused-argument """ Emulate the channel send method, the tries and timeout are not used """ if "cmd" not in load: log.error("Malformed request, no cmd: %s", load) return {} cmd = load["cmd"].lstrip("_") if cmd in self.cmd_stub: return self.cmd_stub[cmd] if not hasattr(self.fs, cmd): log.error("Malformed request, invalid cmd: %s", load) return {} return getattr(self.fs, cmd)(load) def close(self): pass