PNG  IHDRX cHRMz&u0`:pQ<bKGD pHYsodtIME MeqIDATxw]Wug^Qd˶ 6`!N:!@xI~)%7%@Bh&`lnjVF29gΨ4E$|>cɚ{gk= %,a KX%,a KX%,a KX%,a KX%,a KX%,a KX%, b` ǟzeאfp]<!SJmɤY޲ڿ,%c ~ع9VH.!Ͳz&QynֺTkRR.BLHi٪:l;@(!MԴ=žI,:o&N'Kù\vRmJ雵֫AWic H@" !: Cé||]k-Ha oݜ:y F())u]aG7*JV@J415p=sZH!=!DRʯvɱh~V\}v/GKY$n]"X"}t@ xS76^[bw4dsce)2dU0 CkMa-U5tvLƀ~mlMwfGE/-]7XAƟ`׮g ewxwC4\[~7@O-Q( a*XGƒ{ ՟}$_y3tĐƤatgvێi|K=uVyrŲlLӪuܿzwk$m87k( `múcE)"@rK( z4$D; 2kW=Xb$V[Ru819קR~qloѱDyįݎ*mxw]y5e4K@ЃI0A D@"BDk_)N\8͜9dz"fK0zɿvM /.:2O{ Nb=M=7>??Zuo32 DLD@D| &+֎C #B8ַ`bOb $D#ͮҪtx]%`ES`Ru[=¾!@Od37LJ0!OIR4m]GZRJu$‡c=%~s@6SKy?CeIh:[vR@Lh | (BhAMy=݃  G"'wzn޺~8ԽSh ~T*A:xR[ܹ?X[uKL_=fDȊ؂p0}7=D$Ekq!/t.*2ʼnDbŞ}DijYaȲ(""6HA;:LzxQ‘(SQQ}*PL*fc\s `/d'QXW, e`#kPGZuŞuO{{wm[&NBTiiI0bukcA9<4@SӊH*؎4U/'2U5.(9JuDfrޱtycU%j(:RUbArLֺN)udA':uGQN"-"Is.*+k@ `Ojs@yU/ H:l;@yyTn}_yw!VkRJ4P)~y#)r,D =ě"Q]ci'%HI4ZL0"MJy 8A{ aN<8D"1#IJi >XjX֔#@>-{vN!8tRݻ^)N_╗FJEk]CT՟ YP:_|H1@ CBk]yKYp|og?*dGvzنzӴzjֺNkC~AbZƷ`.H)=!QͷVTT(| u78y֮}|[8-Vjp%2JPk[}ԉaH8Wpqhwr:vWª<}l77_~{s۴V+RCģ%WRZ\AqHifɤL36: #F:p]Bq/z{0CU6ݳEv_^k7'>sq*+kH%a`0ԣisqにtү04gVgW΂iJiS'3w.w}l6MC2uԯ|>JF5`fV5m`Y**Db1FKNttu]4ccsQNnex/87+}xaUW9y>ͯ骵G{䩓Գ3+vU}~jJ.NFRD7<aJDB1#ҳgSb,+CS?/ VG J?|?,2#M9}B)MiE+G`-wo߫V`fio(}S^4e~V4bHOYb"b#E)dda:'?}׮4繏`{7Z"uny-?ǹ;0MKx{:_pÚmFמ:F " .LFQLG)Q8qN q¯¯3wOvxDb\. BKD9_NN &L:4D{mm o^tֽ:q!ƥ}K+<"m78N< ywsard5+вz~mnG)=}lYݧNj'QJS{S :UYS-952?&O-:W}(!6Mk4+>A>j+i|<<|;ر^߉=HE|V#F)Emm#}/"y GII웻Jі94+v뾧xu~5C95~ūH>c@덉pʃ1/4-A2G%7>m;–Y,cyyaln" ?ƻ!ʪ<{~h~i y.zZB̃/,雋SiC/JFMmBH&&FAbϓO^tubbb_hZ{_QZ-sύodFgO(6]TJA˯#`۶ɟ( %$&+V'~hiYy>922 Wp74Zkq+Ovn錄c>8~GqܲcWꂎz@"1A.}T)uiW4="jJ2W7mU/N0gcqܗOO}?9/wìXžΏ0 >֩(V^Rh32!Hj5`;O28؇2#ݕf3 ?sJd8NJ@7O0 b־?lldщ̡&|9C.8RTWwxWy46ah嘦mh٤&l zCy!PY?: CJyв]dm4ǜҐR޻RլhX{FƯanшQI@x' ao(kUUuxW_Ñ줮[w8 FRJ(8˼)_mQ _!RJhm=!cVmm ?sFOnll6Qk}alY}; "baӌ~M0w,Ggw2W:G/k2%R,_=u`WU R.9T"v,<\Ik޽/2110Ӿxc0gyC&Ny޽JҢrV6N ``یeA16"J³+Rj*;BϜkZPJaÍ<Jyw:NP8/D$ 011z֊Ⱳ3ι֘k1V_"h!JPIΣ'ɜ* aEAd:ݺ>y<}Lp&PlRfTb1]o .2EW\ͮ]38؋rTJsǏP@芎sF\> P^+dYJLbJ C-xϐn> ι$nj,;Ǖa FU *择|h ~izť3ᤓ`K'-f tL7JK+vf2)V'-sFuB4i+m+@My=O҈0"|Yxoj,3]:cо3 $#uŘ%Y"y죯LebqtҢVzq¼X)~>4L׶m~[1_k?kxֺQ`\ |ٛY4Ѯr!)N9{56(iNq}O()Em]=F&u?$HypWUeB\k]JɩSع9 Zqg4ZĊo oMcjZBU]B\TUd34ݝ~:7ڶSUsB0Z3srx 7`:5xcx !qZA!;%͚7&P H<WL!džOb5kF)xor^aujƍ7 Ǡ8/p^(L>ὴ-B,{ۇWzֺ^k]3\EE@7>lYBȝR.oHnXO/}sB|.i@ɥDB4tcm,@ӣgdtJ!lH$_vN166L__'Z)y&kH;:,Y7=J 9cG) V\hjiE;gya~%ks_nC~Er er)muuMg2;֫R)Md) ,¶ 2-wr#F7<-BBn~_(o=KO㭇[Xv eN_SMgSҐ BS헃D%g_N:/pe -wkG*9yYSZS.9cREL !k}<4_Xs#FmҶ:7R$i,fi!~' # !6/S6y@kZkZcX)%5V4P]VGYq%H1!;e1MV<!ϐHO021Dp= HMs~~a)ަu7G^];git!Frl]H/L$=AeUvZE4P\.,xi {-~p?2b#amXAHq)MWǾI_r`S Hz&|{ +ʖ_= (YS(_g0a03M`I&'9vl?MM+m~}*xT۲(fY*V4x@29s{DaY"toGNTO+xCAO~4Ϳ;p`Ѫ:>Ҵ7K 3}+0 387x\)a"/E>qpWB=1 ¨"MP(\xp߫́A3+J] n[ʼnӼaTbZUWb={~2ooKױӰp(CS\S筐R*JغV&&"FA}J>G֐p1ٸbk7 ŘH$JoN <8s^yk_[;gy-;߉DV{c B yce% aJhDȶ 2IdйIB/^n0tNtџdcKj4϶v~- CBcgqx9= PJ) dMsjpYB] GD4RDWX +h{y`,3ꊕ$`zj*N^TP4L:Iz9~6s) Ga:?y*J~?OrMwP\](21sZUD ?ܟQ5Q%ggW6QdO+\@ ̪X'GxN @'4=ˋ+*VwN ne_|(/BDfj5(Dq<*tNt1х!MV.C0 32b#?n0pzj#!38}޴o1KovCJ`8ŗ_"]] rDUy޲@ Ȗ-;xџ'^Y`zEd?0„ DAL18IS]VGq\4o !swV7ˣι%4FѮ~}6)OgS[~Q vcYbL!wG3 7띸*E Pql8=jT\꘿I(z<[6OrR8ºC~ډ]=rNl[g|v TMTղb-o}OrP^Q]<98S¤!k)G(Vkwyqyr޽Nv`N/e p/~NAOk \I:G6]4+K;j$R:Mi #*[AȚT,ʰ,;N{HZTGMoּy) ]%dHء9Պ䠬|<45,\=[bƟ8QXeB3- &dҩ^{>/86bXmZ]]yޚN[(WAHL$YAgDKp=5GHjU&99v簪C0vygln*P)9^͞}lMuiH!̍#DoRBn9l@ xA/_v=ȺT{7Yt2N"4!YN`ae >Q<XMydEB`VU}u]嫇.%e^ánE87Mu\t`cP=AD/G)sI"@MP;)]%fH9'FNsj1pVhY&9=0pfuJ&gޤx+k:!r˭wkl03׼Ku C &ѓYt{.O.zҏ z}/tf_wEp2gvX)GN#I ݭ߽v/ .& и(ZF{e"=V!{zW`, ]+LGz"(UJp|j( #V4, 8B 0 9OkRrlɱl94)'VH9=9W|>PS['G(*I1==C<5"Pg+x'K5EMd؞Af8lG ?D FtoB[je?{k3zQ vZ;%Ɠ,]E>KZ+T/ EJxOZ1i #T<@ I}q9/t'zi(EMqw`mYkU6;[t4DPeckeM;H}_g pMww}k6#H㶏+b8雡Sxp)&C $@'b,fPߑt$RbJ'vznuS ~8='72_`{q纶|Q)Xk}cPz9p7O:'|G~8wx(a 0QCko|0ASD>Ip=4Q, d|F8RcU"/KM opKle M3#i0c%<7׿p&pZq[TR"BpqauIp$ 8~Ĩ!8Սx\ւdT>>Z40ks7 z2IQ}ItԀ<-%S⍤};zIb$I 5K}Q͙D8UguWE$Jh )cu4N tZl+[]M4k8֦Zeq֮M7uIqG 1==tLtR,ƜSrHYt&QP윯Lg' I,3@P'}'R˪e/%-Auv·ñ\> vDJzlӾNv5:|K/Jb6KI9)Zh*ZAi`?S {aiVDԲuy5W7pWeQJk֤#5&V<̺@/GH?^τZL|IJNvI:'P=Ϛt"¨=cud S Q.Ki0 !cJy;LJR;G{BJy޺[^8fK6)=yʊ+(k|&xQ2`L?Ȓ2@Mf 0C`6-%pKpm')c$׻K5[J*U[/#hH!6acB JA _|uMvDyk y)6OPYjœ50VT K}cǻP[ $:]4MEA.y)|B)cf-A?(e|lɉ#P9V)[9t.EiQPDѠ3ϴ;E:+Օ t ȥ~|_N2,ZJLt4! %ա]u {+=p.GhNcŞQI?Nd'yeh n7zi1DB)1S | S#ًZs2|Ɛy$F SxeX{7Vl.Src3E℃Q>b6G ўYCmtկ~=K0f(=LrAS GN'ɹ9<\!a`)֕y[uՍ[09` 9 +57ts6}b4{oqd+J5fa/,97J#6yν99mRWxJyѡyu_TJc`~W>l^q#Ts#2"nD1%fS)FU w{ܯ R{ ˎ󅃏џDsZSQS;LV;7 Od1&1n$ N /.q3~eNɪ]E#oM~}v֯FڦwyZ=<<>Xo稯lfMFV6p02|*=tV!c~]fa5Y^Q_WN|Vs 0ҘދU97OI'N2'8N֭fgg-}V%y]U4 峧p*91#9U kCac_AFңĪy뚇Y_AiuYyTTYЗ-(!JFLt›17uTozc. S;7A&&<ԋ5y;Ro+:' *eYJkWR[@F %SHWP 72k4 qLd'J "zB6{AC0ƁA6U.'F3:Ȅ(9ΜL;D]m8ڥ9}dU "v!;*13Rg^fJyShyy5auA?ɩGHRjo^]׽S)Fm\toy 4WQS@mE#%5ʈfFYDX ~D5Ϡ9tE9So_aU4?Ѽm%&c{n>.KW1Tlb}:j uGi(JgcYj0qn+>) %\!4{LaJso d||u//P_y7iRJ߬nHOy) l+@$($VFIQ9%EeKʈU. ia&FY̒mZ=)+qqoQn >L!qCiDB;Y<%} OgBxB!ØuG)WG9y(Ą{_yesuZmZZey'Wg#C~1Cev@0D $a@˲(.._GimA:uyw֬%;@!JkQVM_Ow:P.s\)ot- ˹"`B,e CRtaEUP<0'}r3[>?G8xU~Nqu;Wm8\RIkբ^5@k+5(By'L&'gBJ3ݶ!/㮻w҅ yqPWUg<e"Qy*167΃sJ\oz]T*UQ<\FԎ`HaNmڜ6DysCask8wP8y9``GJ9lF\G g's Nn͵MLN֪u$| /|7=]O)6s !ĴAKh]q_ap $HH'\1jB^s\|- W1:=6lJBqjY^LsPk""`]w)󭃈,(HC ?䔨Y$Sʣ{4Z+0NvQkhol6C.婧/u]FwiVjZka&%6\F*Ny#8O,22+|Db~d ~Çwc N:FuuCe&oZ(l;@ee-+Wn`44AMK➝2BRՈt7g*1gph9N) *"TF*R(#'88pm=}X]u[i7bEc|\~EMn}P瘊J)K.0i1M6=7'_\kaZ(Th{K*GJyytw"IO-PWJk)..axӝ47"89Cc7ĐBiZx 7m!fy|ϿF9CbȩV 9V-՛^pV̌ɄS#Bv4-@]Vxt-Z, &ֺ*diؠ2^VXbs֔Ìl.jQ]Y[47gj=幽ex)A0ip׳ W2[ᎇhuE^~q흙L} #-b۸oFJ_QP3r6jr+"nfzRJTUqoaۍ /$d8Mx'ݓ= OՃ| )$2mcM*cЙj}f };n YG w0Ia!1Q.oYfr]DyISaP}"dIӗթO67jqR ҊƐƈaɤGG|h;t]䗖oSv|iZqX)oalv;۩meEJ\!8=$4QU4Xo&VEĊ YS^E#d,yX_> ۘ-e\ "Wa6uLĜZi`aD9.% w~mB(02G[6y.773a7 /=o7D)$Z 66 $bY^\CuP. (x'"J60׿Y:Oi;F{w佩b+\Yi`TDWa~|VH)8q/=9!g߆2Y)?ND)%?Ǐ`k/sn:;O299yB=a[Ng 3˲N}vLNy;*?x?~L&=xyӴ~}q{qE*IQ^^ͧvü{Huu=R|>JyUlZV, B~/YF!Y\u_ݼF{_C)LD]m {H 0ihhadd nUkf3oٺCvE\)QJi+֥@tDJkB$1!Đr0XQ|q?d2) Ӣ_}qv-< FŊ߫%roppVBwü~JidY4:}L6M7f٬F "?71<2#?Jyy4뷢<_a7_=Q E=S1И/9{+93֮E{ǂw{))?maÆm(uLE#lïZ  ~d];+]h j?!|$F}*"4(v'8s<ŏUkm7^7no1w2ؗ}TrͿEk>p'8OB7d7R(A 9.*Mi^ͳ; eeUwS+C)uO@ =Sy]` }l8^ZzRXj[^iUɺ$tj))<sbDJfg=Pk_{xaKo1:-uyG0M ԃ\0Lvuy'ȱc2Ji AdyVgVh!{]/&}}ċJ#%d !+87<;qN޼Nفl|1N:8ya  8}k¾+-$4FiZYÔXk*I&'@iI99)HSh4+2G:tGhS^繿 Kتm0 вDk}֚+QT4;sC}rՅE,8CX-e~>G&'9xpW,%Fh,Ry56Y–hW-(v_,? ; qrBk4-V7HQ;ˇ^Gv1JVV%,ik;D_W!))+BoS4QsTM;gt+ndS-~:11Sgv!0qRVh!"Ȋ(̦Yl.]PQWgٳE'`%W1{ndΗBk|Ž7ʒR~,lnoa&:ü$ 3<a[CBݮwt"o\ePJ=Hz"_c^Z.#ˆ*x z̝grY]tdkP*:97YľXyBkD4N.C_[;F9`8& !AMO c `@BA& Ost\-\NX+Xp < !bj3C&QL+*&kAQ=04}cC!9~820G'PC9xa!w&bo_1 Sw"ܱ V )Yl3+ס2KoXOx]"`^WOy :3GO0g;%Yv㐫(R/r (s } u B &FeYZh0y> =2<Ϟc/ -u= c&׭,.0"g"7 6T!vl#sc>{u/Oh Bᾈ)۴74]x7 gMӒ"d]U)}" v4co[ ɡs 5Gg=XR14?5A}D "b{0$L .\4y{_fe:kVS\\O]c^W52LSBDM! C3Dhr̦RtArx4&agaN3Cf<Ԉp4~ B'"1@.b_/xQ} _߃҉/gٓ2Qkqp0շpZ2fԫYz< 4L.Cyυι1t@鎫Fe sYfsF}^ V}N<_`p)alٶ "(XEAVZ<)2},:Ir*#m_YӼ R%a||EƼIJ,,+f"96r/}0jE/)s)cjW#w'Sʯ5<66lj$a~3Kʛy 2:cZ:Yh))+a߭K::N,Q F'qB]={.]h85C9cr=}*rk?vwV렵ٸW Rs%}rNAkDv|uFLBkWY YkX מ|)1!$#3%y?pF<@<Rr0}: }\J [5FRxY<9"SQdE(Q*Qʻ)q1E0B_O24[U'],lOb ]~WjHޏTQ5Syu wq)xnw8~)c 쫬gٲߠ H% k5dƝk> kEj,0% b"vi2Wس_CuK)K{n|>t{P1򨾜j>'kEkƗBg*H%'_aY6Bn!TL&ɌOb{c`'d^{t\i^[uɐ[}q0lM˕G:‚4kb祔c^:?bpg… +37stH:0}en6x˟%/<]BL&* 5&fK9Mq)/iyqtA%kUe[ڛKN]Ě^,"`/ s[EQQm?|XJ߅92m]G.E΃ח U*Cn.j_)Tѧj̿30ڇ!A0=͜ar I3$C^-9#|pk!)?7.x9 @OO;WƝZBFU keZ75F6Tc6"ZȚs2y/1 ʵ:u4xa`C>6Rb/Yм)^=+~uRd`/|_8xbB0?Ft||Z\##|K 0>>zxv8۴吅q 8ĥ)"6>~\8:qM}#͚'ĉ#p\׶ l#bA?)|g g9|8jP(cr,BwV (WliVxxᡁ@0Okn;ɥh$_ckCgriv}>=wGzβ KkBɛ[˪ !J)h&k2%07δt}!d<9;I&0wV/ v 0<H}L&8ob%Hi|޶o&h1L|u֦y~󛱢8fٲUsւ)0oiFx2}X[zVYr_;N(w]_4B@OanC?gĦx>мgx>ΛToZoOMp>40>V Oy V9iq!4 LN,ˢu{jsz]|"R޻&'ƚ{53ўFu(<٪9:΋]B;)B>1::8;~)Yt|0(pw2N%&X,URBK)3\zz&}ax4;ǟ(tLNg{N|Ǽ\G#C9g$^\}p?556]/RP.90 k,U8/u776s ʪ_01چ|\N 0VV*3H鴃J7iI!wG_^ypl}r*jɤSR 5QN@ iZ#1ٰy;_\3\BQQ x:WJv츟ٯ$"@6 S#qe딇(/P( Dy~TOϻ<4:-+F`0||;Xl-"uw$Цi󼕝mKʩorz"mϺ$F:~E'ҐvD\y?Rr8_He@ e~O,T.(ފR*cY^m|cVR[8 JҡSm!ΆԨb)RHG{?MpqrmN>߶Y)\p,d#xۆWY*,l6]v0h15M˙MS8+EdI='LBJIH7_9{Caз*Lq,dt >+~ّeʏ?xԕ4bBAŚjﵫ!'\Ը$WNvKO}ӽmSşذqsOy?\[,d@'73'j%kOe`1.g2"e =YIzS2|zŐƄa\U,dP;jhhhaxǶ?КZ՚.q SE+XrbOu%\GتX(H,N^~]JyEZQKceTQ]VGYqnah;y$cQahT&QPZ*iZ8UQQM.qo/T\7X"u?Mttl2Xq(IoW{R^ ux*SYJ! 4S.Jy~ BROS[V|žKNɛP(L6V^|cR7i7nZW1Fd@ Ara{詑|(T*dN]Ko?s=@ |_EvF]׍kR)eBJc" MUUbY6`~V޴dJKß&~'d3i WWWWWW
Current Directory: /opt/dedrads
Viewing File: /opt/dedrads/alp.py
#! /usr/lib/rads/venv/bin/python3 '''Apache Log Parser - Parse Apache domain access logs''' import os import sys import logging import re import json from argparse import ArgumentParser from time import time from collections import defaultdict from platform import node as hostname import envinfo from dns import resolver, reversename, exception from rads import setup_logging, color __maintainer__ = "Daniel K" __email__ = "danielk@inmotionhosting.com" __version__ = "1.0.2" __date__ = "2016-09-16" # Location of Apache domain logs for users. # The bit at the end is for str.format() to allow users to be added there USER_DOMLOG_DIR = envinfo.get_data()['apache_domlogs'] + "/{0!s}/" # Maximum number of log files on shared servers MAX_LOGS_SHARED = 50 LOGGER = logging.getLogger(__name__) def ptr_lookup(ip_addr): """Return PTR for IP address""" try: myresolver = resolver.Resolver() myresolver.lifetime = 1.0 myresolver.timeout = 1.0 question_name = reversename.from_address(ip_addr) answers = myresolver.query(question_name, "PTR") return str(answers[0]) except resolver.NXDOMAIN: return "No Record Found" except exception.Timeout: LOGGER.debug("Query Timed out looking for %s", ip_addr) return "Query Timed Out" except resolver.NoNameservers: LOGGER.debug("No nameservers found for %s", ip_addr) return "No nameservers found" except resolver.NoAnswer: LOGGER.debug("No answer for %s", ip_addr) return "No Answer" def domlog_lines(source): '''Process log lines''' if source == "-": LOGGER.info("Processing from STDIN.") yield from sys.stdin else: filename = source LOGGER.info("Process file %s", source) if os.path.exists(filename): with open(filename, encoding='utf-8') as file_handle: try: yield from file_handle except OSError: LOGGER.error("Error reading file %s", filename) def trim_dict(dictionary, entries): '''Trim dictionary to top entries ordered by value''' trimmed_dict = {} count = 0 for item in sorted(dictionary, key=lambda x: dictionary[x], reverse=True): count = count + 1 trimmed_dict[item] = dictionary[item] if count >= entries: return trimmed_dict return trimmed_dict def parse_domlogs(source, numlines=10, add_ptr=False): '''Process log lines''' results = { 'status_codes': defaultdict(int), 'daily_hourly': defaultdict(lambda: defaultdict(int)), 'requests': defaultdict(int), 'user_agents': defaultdict(int), 'top_ips': defaultdict(int), 'linecount': 0, } # Single regex to match all log lines. # It stores each entry in named groups, even though not all groups # are used by this script. You can see the names listed below # as (?<name>...). rx_logline = re.compile( r'^(?P<ips>(?P<ip>[0-9.]+|[a-fA-F0-9:]+)' # Could handle multiple IPs r'(,\s*[0-9.]+|[a-fA-F0-9:]+)*)\s+' r'(?P<logname>\S+)\s+(?P<user>\S+)\s+' # Could find logged in users r'\[(?P<date>[0-9]+/[a-zA-Z]+/[0-9]+):' r'(?P<time>(?P<hour>[0-9]+):[0-9]+:[0-9]+ [0-9-+]+)\]\s+' r'"(?P<request>(?P<type>[A-Z]+)\s+(?P<uri>\S+)) [^"]*"\s+' r'(?P<status>[0-9]+|-)\s+(?P<size>[0-9]+|-)\s+' r'"(?P<referrer>[^"]*)"\s+' r'"(?P<useragent>.*)"$' ) for line in domlog_lines(source): results['linecount'] = results['linecount'] + 1 match_logline = rx_logline.search(line) if match_logline is not None: results['status_codes'][match_logline.group('status')] = ( results['status_codes'][match_logline.group('status')] + 1 ) request = "{: <4} {}".format( match_logline.group('status'), match_logline.group('request') ) results['requests'][request] = results['requests'][request] + 1 results['top_ips'][match_logline.group('ip')] = ( results['top_ips'][match_logline.group('ip')] + 1 ) results['user_agents'][match_logline.group('useragent')] = ( results['user_agents'][match_logline.group('useragent')] + 1 ) date = match_logline.group('date') hour = match_logline.group('hour') results['daily_hourly'][date][hour] = ( results['daily_hourly'][date][hour] + 1 ) else: LOGGER.warning("Missed log line: %s", line) results['requests'] = trim_dict(results['requests'], numlines) results['user_agents'] = trim_dict(results['user_agents'], numlines) results['top_ips'] = trim_dict(results['top_ips'], numlines) if add_ptr: ip_ptr = defaultdict(int) for ip_addr in results['top_ips']: ptr_record = ptr_lookup(ip_addr) ip_with_ptr = f"{ip_addr: <15} {ptr_record}" ip_ptr[ip_with_ptr] = results['top_ips'][ip_addr] results['top_ips_with_ptr'] = ip_ptr return results def logs_for_user(cpuser): '''Array of domlogs for cpuser. If cpuser is None, return all domlogs.''' if cpuser is None: LOGGER.info("Choosing domlog for all users") cpuser = '.' else: LOGGER.info("Choosing domlog for %s", cpuser) logfile_list = [] for filename in os.listdir(USER_DOMLOG_DIR.format(cpuser)): if ("_log" not in filename) and ("-ssl" not in filename): if "ftpxferlog" in filename: continue logfile = os.path.join(USER_DOMLOG_DIR.format(cpuser), filename) if os.path.isfile(logfile): logfile_list.append(logfile) return logfile_list def choose_logfile(cpuser): ''' Determine log file to use for a cPanel user. This is done by first using any unique file, then using any unique recently updated file, and then preferring size for the remaining files. If cpuser is None, search for all logs. ''' recentlog_list = [] logfile_list = logs_for_user(cpuser) if len(logfile_list) == 0: LOGGER.warning("Could not find valid log file for %s", cpuser) return None if len(logfile_list) == 1: LOGGER.debug("Only one log file for %s: %s", cpuser, logfile_list[0]) return logfile_list[0] for logfile in logfile_list: if os.path.getmtime(logfile) > (time() - 86400): # File is newer than 24 hours recentlog_list.append(logfile) if len(recentlog_list) == 1: LOGGER.debug( "Only one recent log file for %s: %s", cpuser, recentlog_list[0] ) return recentlog_list[0] if len(recentlog_list) == 0: # If there are no recent files, choose from all files. LOGGER.debug("No recent logs for %s", cpuser) else: logfile_list = recentlog_list largest = 0 domlog = None for logfile in logfile_list: if os.path.getsize(logfile) > largest: largest = os.path.getsize(logfile) domlog = logfile return domlog def print_title(title, width): '''Print pretty header''' header_format = "~~ {0!s} ~~{1}" base_header_size = 8 # If there is not enough room for the title, truncate it title = title[: width - base_header_size] head_length = len(title) + base_header_size long_bar = "".join("~" for i in range(width - head_length)) print( color.green( header_format.format( title, long_bar, ) ) ) def print_tall(title, array, numlines, width): '''Print pretty data in a tall format, with one entry per line''' print_title(title, width) line_count = 0 for item in sorted(array, key=lambda x: array[x], reverse=True): line_count = line_count + 1 print(f"{array[item]: 6} {item}"[:width]) if line_count == numlines: return def print_wide(title, array, numlines, width): '''Print pretty data in a wide format, with many entries per line''' print_title(title, width) line_count = 0 current_width = 0 for item in array: next_item = f"{item}: {array[item]} " if current_width + len(next_item) >= width: line_count = line_count + 1 print() current_width = 0 if line_count == numlines: return current_width = current_width + len(next_item) print(next_item, end=' ') print() def parse_args(): ''' Parse command line arguments ''' parser = ArgumentParser(description=__doc__) parser.add_argument( "-a", "--all", action='store_true', help=( "Search all users. Do not limit search to single user. " "Overrides any usernames or paths given." ), ) parser.add_argument( "-m", "--multilogs", action='store_true', help="Return results for all log files, rather than just one.", ) ptr_group = parser.add_mutually_exclusive_group() ptr_group.add_argument( "-p", "--with-ptr", action='store_true', help="Get PTR records for IPs. This is the default.", ) ptr_group.add_argument( "-P", "--no-ptr", action='store_true', help="Do not resolve PTRs for IPs. Overrides -p.", ) parser.add_argument( "-V", "--version", action='store_true', help="Print version information and exit.", ) output_group = parser.add_argument_group("Output options") output_group.add_argument( "-n", "--numlines", action='store', type=int, default=10, help=( "Number of lines to display in each section. " "The default is 10." ), ) output_group.add_argument( "-w", "--width", action='store', type=int, default=110, help="Width of output in characters. The default is 110.", ) output_group.add_argument( "-j", "--json", action='store_true', help="Output data as JSON instead." ) logging_parser_group = parser.add_argument_group("Error logging options") logging_group = logging_parser_group.add_mutually_exclusive_group() logging_group.add_argument( '-v', '--verbose', dest='loglevel', action='store_const', const='debug', help="Use verbose logging.", ) logging_group.add_argument( '-q', '--quiet', dest='loglevel', action='store_const', const='critical', help='Log only critical errors', ) logging_group.add_argument( '--loglevel', dest='loglevel', type=str, choices=['error', 'info', 'debug', 'warning', 'critical'], help=( "Specify the verbosity of logging output. " "The default is 'warning'." ), ) logging_parser_group.add_argument( "-o", "--output", action='store', type=str, default='', help="Output logging to the specified file.", ) parser.add_argument( 'sources', metavar='(USER|LOG)', type=str, nargs='*', help=( "Either a cPanel user or an Apache domain log file. " "'-' will be handled as STDIN. " "If none are given, then the script will attempt to gather " "data from the STDIN." ), ) args = parser.parse_args() if args.version: print(f"Apache Log Parser version {__version__}") print(f"Last modified on {__date__}.") sys.exit(0) if args.loglevel is None: logging_level = logging.WARNING else: logging_level = getattr(logging, args.loglevel.upper()) if args.output == '': setup_logging( path='/var/log/messages', loglevel=logging_level, print_out=sys.stderr, ) else: setup_logging(path=args.output, loglevel=logging_level, print_out=False) if args.no_ptr: show_ptr = False else: show_ptr = True if len(args.sources) == 0: LOGGER.info("No sources. Using STDIN.") args.sources.append("-") return ( args.sources, show_ptr, args.numlines, args.width, args.json, args.all, args.multilogs, ) def print_results(results, numlines, width): '''Print out results to terminal''' for source, result in results: if result['linecount'] < 1: print(f"{source} is empty.") continue print(color.yellow(f"Results for {source}:") + ":") for day in result['daily_hourly']: print_wide( f"Hourly hits ({day})", result['daily_hourly'][day], numlines, width, ) print_wide( "HTTP response codes", result['status_codes'], numlines, width ) print_tall("Top Requests", result['requests'], numlines, width) print_tall("Top user agents", result['user_agents'], numlines, width) if result['top_ips_with_ptr'] is not None: print_tall( "Top IPs with PTRs", result['top_ips_with_ptr'], numlines, width ) else: print_tall("Top IPs", result['top_ips'], numlines, width) print("\n") def main(): '''Main function for script''' ( sources, show_ptr, numlines, width, show_json, all_users, multilogs, ) = parse_args() # On shared servers, limit the number of log files searched if any(shared_type in hostname() for shared_type in ["biz", "hub", "res"]): log_limit = MAX_LOGS_SHARED else: log_limit = None # The complete results of our search. # This is an array of tuples, with each tuple being # (string, dict) where string is the source, and dict is the entries results = [] if all_users: # If all_users, ignore other sources if multilogs: LOGGER.info("Source is all log files.") for domlog in logs_for_user(None)[:log_limit]: sections_dict = parse_domlogs(domlog, numlines, show_ptr) results.append((domlog, sections_dict)) else: domlog = choose_logfile(None) LOGGER.info("Source is user file: %s", domlog) sections_dict = parse_domlogs(domlog, numlines, show_ptr) results.append((domlog, sections_dict)) else: # Loop through user/paths, adding the results for source in sources: if source == '-': LOGGER.info("Source is STDIN: %s", source) sections_dict = parse_domlogs(source, numlines, show_ptr) results.append(("STDIN", sections_dict)) elif os.path.isfile(source): LOGGER.info("Source is file: %s", source) sections_dict = parse_domlogs(source, numlines, show_ptr) results.append((source, sections_dict)) elif os.path.isfile(f"/var/cpanel/users/{source!s}"): if multilogs: LOGGER.info("Source is all files for : %s", source) for domlog in logs_for_user(source)[:log_limit]: sections_dict = parse_domlogs( domlog, numlines, show_ptr ) results.append((domlog, sections_dict)) else: domlog = choose_logfile(source) LOGGER.info("Source is user file: %s", domlog) sections_dict = parse_domlogs(domlog, numlines, show_ptr) results.append((domlog, sections_dict)) else: LOGGER.warning("Unable to determine log file for: %s", source) sys.exit('255') if show_json: print( json.dumps( results, sort_keys=True, indent=4, separators=(',', ': ') ) ) else: print_results(results, numlines, width) if __name__ == "__main__": main()