From 8d883f25c303afd4ac8fb2dc5be05800be7551ab Mon Sep 17 00:00:00 2001 From: noisedestroyers Date: Mon, 28 Jul 2025 18:28:26 -0400 Subject: [PATCH] progress? --- .envrc | 3 + .gitignore | 4 + .swarm/memory.db | Bin 0 -> 221184 bytes analyzer/analysis/background_analyzer.py | 364 ++++++++++++++ analyzer/analysis/flow_manager.py | 11 + analyzer/analysis/statistics.py | 16 + analyzer/main.py | 33 +- analyzer/models/__init__.py | 52 +- analyzer/models/enhanced_analysis.py | 289 +++++++++++ analyzer/models/protocols.py | 258 ++++++++++ analyzer/tui/textual/app_v2.py | 127 ++++- analyzer/tui/textual/widgets/flow_table_v2.py | 253 ++++++++-- analyzer/tui/textual/widgets/progress_bar.py | 121 +++++ .../tui/textual/widgets/split_flow_details.py | 59 ++- data_summary.md | 461 ++++++++++++++++++ requirements.txt | 25 + 16 files changed, 2004 insertions(+), 72 deletions(-) create mode 100644 .envrc create mode 100644 .swarm/memory.db create mode 100644 analyzer/analysis/background_analyzer.py create mode 100644 analyzer/models/enhanced_analysis.py create mode 100644 analyzer/models/protocols.py create mode 100644 analyzer/tui/textual/widgets/progress_bar.py create mode 100644 data_summary.md create mode 100644 requirements.txt diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..d737163 --- /dev/null +++ b/.envrc @@ -0,0 +1,3 @@ +#!/bin/bash +# Automatically activate Python virtual environment +source venv/bin/activate \ No newline at end of file diff --git a/.gitignore b/.gitignore index 09f70d9..864072a 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,7 @@ wheels/ *.egg-info/ .installed.cfg *.egg + +# Environment files +.env +.envrc.local diff --git a/.swarm/memory.db b/.swarm/memory.db new file mode 100644 index 0000000000000000000000000000000000000000..194e0fb5ba8892cae07165c2ea462013e890d75c GIT binary patch literal 221184 zcmeEv33wz|ad7JxNh8f@Y-6)F_TsVZ^{&wNsINJC7HqF~*9Um*wRhLXUK@Ed)6$GI zm*&uA)~2QPu32Ls90>#h!PuAp`NJ6qkeHBQju1lvA%uY8Cy;Rci7^DoMgDrP?>F5u zJ<>>{HKWfhKlQ$TM^(LgRrTss9lrU7OhpP#6-v28CCF1}QQh6tRly)dQ5VA#fk*r4 zg#voCpDwLX=l>T|q0{I11HYpBhQ31i9}0ZJzsL7Q-_`I(=cfaK4g@+7=s=(Yfer*Z z5a>Xl1Az_%S^`M4~b(_j~%cvC!XMvOj6Dm?=qR>$iPQ-Kn*0 zdQ<-ebr!tsx&yZy2pVsK*B=hvc+-*KjYn^|A=lf#Eer$kk^{*na#FdNNJ`eBc$|i! z*S8sr{%@m)diu7Fjdk5#Nla#?@>~}5sEM*PSC#SrX6v85YEMV@UVX!XAn}j8P?Nji z%h3~KUH#ieN4p+48@Fj4t@W>NON-Wj3+@^Wsy~?&;PuFXYY*HKJao(T2lw7`d+;R( zZV&D~dgP|-Z-gce9=P$yZoT%bv=lsY;H3af6{5yJ%_p)|$>|?;m{=`SJGe7n zSlBr_YJ6KQLW67k)23SH1LzazXZM;_*UTSfv*If417NDmx0Fv9}j#u@P7jD4m=!qYv8v7uMM0IoCw?- zSPYZ`*}xQho-E{YiF}GqXUY{|HE}i?=A*pGb73JW-ZP^9^GJN8T&R|kQgB|%&yP=L z^5cnQB{L6b3toPCP)eo?!8=E8&6FzDL^cRjGNnR32U7@Y)lzqkpqdNmOfnrTE>+Tn zJo;NL(e%7jDrXA$k=-M}`6~MgDGB;y;7>$2Y0VA}`>0`MCpfeI@caJ3Ir}tM&i3*xu zoY_5+O_VD~6`HR%spCbdgv~p^mT0wG7@eq%uv<X=x$$P3e4-9CmeVVWlFPzagsrU~7(i-HxP!T{V;^HepPy$Rz&A!waY z)j4Y?I&*OjDZX$AT+t_Q!wUw#Ek%5N+f4% zSODU262Sy^10AZILWhaA0@}DJj;9NS*)m2IjplmK2zqg3sfZXOLLh#tEGfjn+lWRB zlQ?ds%@f=DhpF36PL)ay~N6=aVqulBA>Oy-I$@|+PE1LdA)*5O4I*0xBqG=@ z4&;%D3b<8vHCX+47cL(vNyVb}UZa$fWJ4(GOK4Ty9X1oD)`=m?c%k;J4E6b`YgaEI znR<;J!ZH!wOb}BbB?Uu48i`dPb0B#HL<_p0zX{d8QtMP;@dQ)Bmj}~QA{C^=02E^n zb0KW7As9<5g$=Zo<3~>RqMiIwcRM+NgrjHjVtT$Zl}YNr(U?|5+_;S#fq-jrBiCoS z($#}zIZfi+ej|67CD8ANl7Rc&S|Jn2qoXTBjzq>VJWprm7Aq{5;5l;J zZSBBYx3@AO7`1iyF5H^T4B{vZ z;n%q-`1PE-;MX}v;a31PKYRdw`J?d5htBec#^Bdr5Po^jgI@za)J#vn55K9MfsY23 z1ODOcFzY)p^m^|vyr1Y_>U*H?V9$$t0^QH=`X}lc>Z8Mt`j>{j=iz4XI|fe=d||-j zxwH3m{U7dq(crl6@t#$GPtRxl`;j)*~nzay>_fiQ;rD%BOGR;OpN%){rh_* zUjIT>53B=5pGE0(Nn%r#xw!=!RsHI#dL^S>f;u)4A?tN_pgtBq0x7bRR+sENa~t_Ca(Rh+Fb;ZiLxBM zAcARZK0TMz{#hle*j2sLqk0=)=c-6Jo4E)x~4K&NjLmW zV3lVAt2`4(<(V*4o{7+OrjnV?gYv8y2!*H+;Y4!_VT=MW=~UwAmu7fUD$;Y|=|x`T zvIPhQuHC$e!`8#55Qi>5_!V zLdq-3Q@oxQH*QQx`8EM)oW?fGGKVR`W+qIGVdzRGm5P=p8N6p2oCZ~A>#ZIkA^^|O z=vG}lOvBZKK8uKSE}PCxPq8`CX7z*`t{$=xZ4~P2q3u@>`4x#e#Kw2quO1=7L?aA6 z9a-Y%63bK7+SM~wA65(-j%cG)Vf6wRu!7IPLR_5Z*yPNTWCN>RE?{MCL>q+)tDR1; zvVLU;tIH30WN%Z;4iwi?QeSdHBogJ#OKyk5lEW40$wGynn_rmCkz+xmdRc=dhnnE% zXmpA%=Q5mS$z6J5@9{OQI6Rx0)i&a=+BlH|YyYws5vmL2FigU{ucIkkQkR0bSXhi| zxKk-8*qDOQXAzngN}1V6c2TgQpyxNHAfgd%6e{M%s*TMHGE6RV-eZT9rYi$aVI)9K0OSShs- zS+>F$&E~oG$l+rVQRL}jikr$7SPlSTCS-)1^Xd^Ye9p5ZA)nijknv{`da1}|mluVp zSsOw=ry(JejSz%wqlcD?4NjI1n~xwPe>4 zc7J_XfrW_IMybN8Z@hO;jpn7DwVJ>JTIWRz9{DzvrJ+C59$9RL!zJ>G?9#GSny6GW z6G-NSqw9%8u{bf$g^KDyxWI&YhT|-o?jm${wmpv!8t+-~@mVhD!D>7+O(V_qD!~XSVk(y*KuD^<3Beny$A8z7aS!{1yLy`mgnUXXu}Y?i)J4?;Ow7 zJzwv+xO-}t^Jjgx3@!|Yyx;e})qB`GIPl(q$$<;{zufnOz7w9~y?@wyckgq0?&y9; z*9QXs8Ym3EVtCm1^P&G7di~Ju!O!%+um8>c_xI2B?eyH%^Z$B;?t6xF{>OX|4SsO& z@Zg~L&%KM@@qvFHI5luZ|G)QN-uG$GCwhO}d#X3yBX@tU>+4+)!p(@^4vY^!=6}+E zq3^>(A0IkA_!H0fJm>bjr~9YfvEhI5`TOI2d%Auy{9nFH`futx)ZG;r_6hyTzEn4I z!q9%61L(0H1X5o+yJttY*-B3R-MKwuS}o*3ge=C?S5Y;yS)2NbO=Ew9J|&Ew)R$33 z?WJPRq`u_u*{;1SQ%|DL2>TlK*XQ@}-8S|l>aWbTRhuF81!B<1bW45SIz;^Lb2an* zEb%Fx?-Moi{fxCST9fJfu(5 z4E*EP#&E5VnTMhc{G(0-|A@J^Ht-J6QqCNTw8A9HS>8Z@hOJkTWaR>yY};Wv$YTW@te#e)F|h7oaXaJb8T%tZy*MR z$NqY2U-;eYP(_V%sD3EaZ=s6XOO>6x*1}F+Qv-%q6FuUYJX8aQSJ{K%LF>@)$ER!h zc%@w*57hK=%D#`2HhtW0H~kYf(+4Lv6E|J4HiaQ6+q`?lIX$CY#%_S>-ByUUdtQ%rnrrgJnXPSphHO4#(|nq2e!8|HiEJok(@>IZXtK7UyUB(oY#MqQ+0b3J z4ILvJy2GZS+sTGrTHDaAWJ5=78ahHYbhx&mo5_X_*)(($+0c!)xI9?f{0-JuXsx^i zbwDuo7jNs~yKH2Z>&$gkHh3*DK-A_nwcQ*rf2#7^{SMt+ZLX_!v)87Zt7^Ns()_8~ z%@q#aywF@%?PiZnH}TqTL{v><-kN72s;0eDeFc3zx~pJ0LZUWI^o6*KK%$nnMJ;C? z68e%g&qkd&<1q7(xvn~Mnivhj<8Es==+&6{Q?;8HICQhiTvzR8r%gAPlaSd#R6_H+ zjD*akwvZXI4jFxUiFp|6kb@3Ge!jV`I^^xdXb>1KvUY=BT}aO2dBk^UEEkZoINx>_ z+pNu_FQ03Dg?c>CJXm#N=Q>R69CKZDVrLTrL~Wjh-f2{;*> zyq60667=Hl!)^b^1MdxdB=F5ZCqkd5mv{c(fj|cW9SC$F(1Ab)0v!l+Akcw82Lc@k zbRf`yz-B_g+tb^fe!%ll>Jhc{dFstdsrQ4_gKFsm)X9OK-mdfmeIK1w3Z8Et#D!D8 zeu`5I9~n^#uiJ+7|0f4hRNz;Ee+&FGME(1#z^4O$5_o^$vB28`Zw$ODunI8&<^t)! zMBvuI!NC5&o&XmZ4Fm(*0>c5%@P7~gZ1_jR|9AN7!(SSHV)##oKRo>2;fIIcJp9_> zQ^WTTS0R=_a`=wnn}@F(zH&G+Ob=f+eBtmp!`@-a|4aYB`oHi0rvEGcFZlo5|1tj` z`QPRLJ^vg05BgX9Oa7vN+W#{DQU4A8eg3$g_3!du?0>G`@9*{fm+u+h4}IV9{k`u= z-)DTE^nJ+p9^X5BZ}PpychYy>~>806cGa zU)D=s(o0XOrKhOB)=Ph-m%gBvKChO(MSV^!eVO{KUV1_=eMT*PiTVq*^hxT|dg)Vo z>2baEXKLx=)Sv35PwJ&l=%tUVr9YuQrk37AeN-=fL@#|6rQg>} zZ`Vt2Q%kR+eorkuNWE1ry+tqmu39>+irS}sd765Y`mRj!pYE(yP?c!>WjX>K6}Fr`31wpkAq$9?(mt^wLSabiY#i z_tVq~y|k*AR@BlWC99U3u zQ(7UZ6(-ffWp}HEODD9#%e2B>YT=S&Y9V-sR=8a&yi_fmf2&&9c2p}I(F%u^!cTvA zvsO5y6>icBH>!n?se0Q_KX!xqX7nX$Vb_bb!gX5VTCH%6TDbUtT6q3`t#Gwg*sB$; zQVZL!R14=^p%q@J74~R_xLP<{R0>ajH>MS$S|Oqq{x+-?1g*fUg(p=t^yw#A_08uP ztq{@*v{u-y7XETfE4)A}?9vK5wZi3U;g5G{h0CC^R>cu zt#FZAc=d&9;i2bgg$uO8`D$Thn^t(PT39|$E1atp&d~~IYlXAa!oGl37}g4YweTXJ zRv6R@Uac^o7GBt|7Q{ZK@XMz>+M8aj(4!T)`+Iu55H>c3?*9)#n!n+ThCVv@A@5&P zk%7ni9_{sZ|ERl%`i7IRAzy-{+F#pjRC`v?W5KT>tUVWpaQ$p76bZ9;IE8zM;FZ68 z3O&zTx!i|B1K&i3iB)|pv`93a6a}65|DE{%o%sJ+wh~kPoR$R}?!^ChlcJ?25V4-R zqZ9w%CRc?uYSMaQ6syTWthWuUVW~Uu{~<|0C;oq{68UuE{~NidI`RLNR1nBd+lI|q zLzuN}OYX$~*D|BkY`dNK|6J?h|F3axZEfNNYxV+^Kqsa>NdF(`x}WlY+57VTpLjmf z`?;R)cK;Y&bbdAu0xQY$AkoIEI4b*{5^dN-f7LVgKrS04Az}(b?dIo!N46+ah8Pzj zA&5HboGF7CRvV;HV&=7^z6_*}AGvZa7=%`48ySRfq+By_>t*~aW{SZqB)tsMG)}Tu zdC^o!l9o~aE%!;GFq|PgZ!42R8PiUl<31@An!737uMPz*DO4EpQf^3gM->E-CX+%L zvz$2_%~B+htKNpPWZfVm3+uCNkkrgQu3ct-GUMGv_9uO^Cxo*g`_uiIG5H>6eqli* zj>p^Xd1B<+i+KdywM@)wAHA+2%~NamOpQFJlg+9>Fig#^PK?Pr6tdB7KdLt($-EzA z?CN_))DMg_v@8=7BVf~4vX^M@ud6~TNmxuwFqg?!D-uq2i1OPWNt9!b3N_#I#$e~nGZ*w13GW4p)2PoStI zHq3?~P&gbd5O9IyylTG8+Dy7=$5SScbr2@_!uG&yzW&-Js z>P-dlMU`u>hlw{cuiYcS{3S?uE*(~qq-*(WRmhuQiiGWAp;S4jrC`>QwW~WD;#O+c z+8%{cl|LKzE^YHFTMYQ>aE*63`K&@2I9swZ$Kl0tr zM~cgKH#vLp)J$oG^hs*!8M{F_do>W$s%P24f|DzTF~#e*!xZHO5^;9And1~;Q+0($ zcdC~*A&gTOs+T^O)xt$c^}1wC79CVCat1Z3*9J_(S+gQXgP3@0a@x0t)YhO@xq`Pv zK0#8tj`5Yb3z2c|+EMvJCyd+V&C-{qBi**fMhAq0*;J z#oJ@RItm2yS8LD84bK~<*kmw6LV?m3*_K|jAQpe*6%Dj`KX)J zk!%A%hEyi3XrY3hl#)nJh89;QC&5OOE7m9h2vgw@i~(Uv%BK_gq?FP`nkgw!sRBPl zf{J2Cgzqb*DzYOg3d2DuS12L%OnnQhIFr9os+JPjk@!?1Tb7O=H$w?P`H=S9KXCyt ztp{eu{dZ}i&O>=F9cdh&m`qho>hP&N=FCK zO;jpUt_Whh3dP+cpvIMPjT;L=rR$1R3MS?gnQUSbi03#`{SY#a79m3@5YxJd%2w#s zXa)Qn>ye%*sU6hQ#%jz2q3>`8jL~r+42BXU`Xnm~71g7EXbU)rgtT9PQ_W(4`R6g+ z9YG|R$xkZ^u$eO>`m)HMMJ?bI8t2+pAUQq6FmwuE^`AdXO|0g2$fE{@)3#Y)iEMzs zX~?q#0@Vy#n5n7IS}TSu_>i;PfPtSa-#3mKcz3T7kGGl3x+LA{in5{=X6ckPFJ%j$ z46b#=NNcR&2l@~-=6t*+lrRw9iuMejl|J?B*WDj|F31QEct+)nL4FnnYnS(N&7_AW zjnfvx@TBc_uCR(spEyVK{zN6A@$dtSV9|k181JWWjGD=2 z_hGA^wof;~LebzIS*XS=!LP2CSE?`9PS%)W9npXms!qR({(lkWFZtd&G&V5bzo+-z zJr8w1*0n-iw2~Zaa4cuTf0XX_)G0s8F>M^r*JK) z9g}8_2+Cz`1}-+N@07qC559?)?NWP0UYtQR#?d=Rb_TDwB6#n32>n?~1~0!nSm&E% z%O~(xDG6|MZzaK4fgvKSShOkE)?VV^K?4x_znt9V`Vbq4(vrSghnZ$25#bifMjT=* zgb95k5Wr-LSp+>Kv*%!uS94B=5`z@Z-wCr^J&z;>TSJM{EYY5jOc#QiOc^fiE@mom zVRz9%bN^YFg9>x6NiB9SQXP8w9c1gIdf zq1GUQTcRfm6@G4hVK#@zq@FEEsNvLE?{rmE-4r?lNSM*fA#Wagdhl=L<7KZbTm4}mvb3z3dT=r=8B517#?D= zwb~uj!ET1NL5B-D6SUrSqFu>fifjz~cF5-$L~CzjK*Vc;(HfTanSz~GnG-Z@n*lB# z9`RGh<>cCJ2uiE#)-iSZFl&}T_$GB$J=@2c7i~&C0`qxPd$6tGBlizo0$SCny`%C` zgN!V;0=xY0O_ESk76Nn_CWbi9*+M{$2r;cQ#zNp8dz)z!&YBJWq)kF}PD9h0cVyRM{PtPU6 z3h&@*0{RmViam=rAM7A#?`pz>RD;|H_v!wE;DT7=FIdObgd7r(vtoYQFjtd^7!lyW zl<+~({3LmIevsgm;Y635QU4l{Z7)0H*66wL^dc{E*#gw7BS1nR3Lyl&Z<_4H(>fKe+`}Vb`sfaMyFtbF!n8t3IPlwD5yP?5FNO$fD!87mdny8N_83 zv7=0kVdzRGm5P=p87^KdNoydi=EQ6uHYNn!;yuHjFIYzf^FM>c?540-SJXx>E+8yn z&jn5ZYkjVe%1mX@VT|EfA3nmu$tD&>A`BK8*LFA;GLJh`-fuWbs9rTjTnxnWt!B#R zj_NF3H5Sr!s+MnHkmu`#rUlZwEHl3TEVsjwvsq(FMn8R++jYM>q10aZC3D0tvQ^)1inBvtma(T^= z7_&dgahPI51iC+S(UIxm%tfEXYGOA?n)g3vOdeC%sLh#+b?7x`F7*kpsqrhAa5x>Y z4g-j`HK!wOA}-A)pdEr(d5-oLW~r`)hJ<2S!blR(ag6l;F6!SX^xtp5&)bHhaOLh# zmPTD&xg+fa`Xv&(D|cjNM1-|h?zC%M$UB7A_;+20R%6q!>b=mueiP4)qm0oGe0fG_ zz}DMeW{ANI+~2+fP7beDGYQ)tq{q~)C5cU0#6=R%^GJKrXG|KQXp(DD8BQmI0 zM`LGDEQbv}>}wUe@JJh@%Uo^!=pwxhb;M!E=w=Z|w&sXJ!tJ^d3OCw;ymcsqk-9;l zQH?<1UHazc;%c%0LSmFLBgyT?SZBc~^51$yU@J7$A!$q>M%xq%L&9jI+pDU~s#GHY zs9+a(#zTr%^;D-%6@VcV>v<}wFU>d^OQmt87b73JWHV|3jW>rEz6_+aMLLU9CmS}n&UCoCpJjj}usq8DH z&^;EkKh(hv1S<%KV8Cm)c(f2ZcrcBzW3JXTb~^DQK*XpUvO^C9Q9opdUX4Hjh%ng3 z=OSJNY~04lH!{(fVjE~DtV(PMxYn;;m_XIs#EYoWOOgJsAIKv8-$(t38qWHDJ#^0C z_1>w0*Y}_8IlK3Yo@3qj!aqD~epb&ufNnEPk7_=LHP)zZ1CdzOWoybniXUf2&GRycjPN=v9Jf=nWdbv2yhF~~|AK4nc$k9SQ>n($g zzvSQ!Al6X<_5Ji>?UC=;j||&U(oCuuM$|A$;V_8zG2QeF%-q~_N=gEUpMcvO2?YbL znSL063=Ss@zC3NQk*(d#{nO{ePd(jjwEo+>TRh5}huM#^!l7HhLRVT0MRhGJ21X$=5m< zy~&Bx7`>Yz&Na-&fg8XQF%b|Fc3KkWp-qIh1`~KQV7DxG#FF$?yK>)^Xqz}Xs@){6 z-6jN)7b0{um04IQE>>gpwuu_v)Uw?HJT_qz&btFK$Tsf|HE^ii6t)8v@FO3(0&NPH zl1zo%6ku}{Se;QCTx3o1KWqh%SqK_jyZlcWR$w~kXpemN3*B!8XkH8*4u@y68LXnXKX3s|j zF-9ksO5r58G@o){_U7JO#^Tv%2>Nzr@x;*D$KuViuVw6K0~f^4>e(?Ae|Fk!{8>_S z)sJ6IG5nsjj@4SkJ~e`&uJ%Ld2kb}-(Sv9$x*=$O&M|07DB}2khW_9CG{pb^4(0z> z|2e+#q3gZXzHMFK=^1#|sQtg;6It1D5Ij9!vJ%Fgo?UAR#F79*$I`M?8n09{;}!7F z1dGS`LMAmWRm$U2pe9aKPy~gEc@ETXgHEwow&@@(O?3tw&+IO$X#cJ}Mt}M-$PxJ^ zU`!wD1DDYUJl|fGZUlei6UiNN(r_8I^+%3|d6r@6*=iQze8DAKcW$Fi&kI})?Om3` z@h~5P5ukm+){TQ4Ak;J6YgBwIkspI{L6hS^gmCX(K3;Dk&=YK+TGSJgG)9iM%B{KH z7-}dI@w=b?PeY_a4;EVGqn+%|ts2hq2qHSZ8?<0|%mQfs?gje6X-P6C>J-;v$)X z9of`&cheG|^xnjqTu;I8M#Cbj>Dp#W1x?(qCCkQHP5TzP+vU_A> zy|tT#w%MV(2Em94p*4n|)tg@etLwzhQOEop;RxgmjnQH$B68Cl91r39392E$q$MXI z^iL*-LSF~9;f8V$m2Xe5SjyxpyLKLgKZ1a>YE}v=730C1Ua}K{0|DK)OjAK|-W4ml zy>XJ54Rmc)6E^Lrf?glMtfNLa9n~H=`C??Xx}LOukXEY*Bd~@SyMf9Er%KJ7X$VXR z`e>ygYZuHCKl0tzxi@5?sR9j8&51MBd?Ha}$l~Ezn_>vjOj{77(xR+`Tmk`3D>z^$ z{F?y;_`~@uA8dIT1Xy);YsfODc>A?zirYv;+8VN)rnpwTy0!}$ICf#%(&u&}a1F9; z?H!YEcd~5}Gjg(RHAk+^O{5 zD+6yDm>9Uc|Nr%WsDG*d%Ko0d&-Xplcernx=Ubk4c&4e-p0KB<_iuXN-ka;))9dZ| z`<_R8N_fy!k*87AN!E{_Rk%#Mk5@mTt zAba^87Qm%brUhKQ7)C5nBtCg@-7l~=1@XoHx?iAE2684>)cZp5$R_$=>VJXaTM}Qq zpzasgFP8WsSnmtP{ge3O+`3<2Cr086U;QtTV;%8DpV)ibs2y{QiXuac#hL1YuvpC? zDc`tlig+RQIS00gC^$+>Y$TPMj-?B>Z1K+=dWD=2f=EZQaKgA;3g>Kk{ZoftVG)Ey znwv>;3yIQ9*0$FVIrPc_O+@Hver{F}rD)lv*ALqFiq{cCPlgLi*-B;#?rPg?0dKSK zm4zHk980Ifa;chT*?F5@f6t*;v=n)IdMd^j*x7|i+g@Mq&}#&erZP0cRSLOuE{_k5 z2)w?|_Dd!b5u%YOT}q^4)1}37(q_Dnc*KU5fH^U-Fi(dWex{htC&M;hz9I%+c9B~w z6Ban05$T0xekPn1%az(M?{oT)<-{nR$VR#J%uED3LRhDHuib};XVq%A)(>A22WfyD zs>b*4XuVl|!F|6Gkm$uXN~>IM%i6Gw#qQaVl8b zr)b|NDc#uhx!b-^yD-pJ*p4~%X%&X-`n=rXQ$hr?`}87*;Sw29YC&|N{fDrBz`~}B zDGrQLEXUM-c%J=WNm0vguxB}ZN(fkXpAI+-mlI-qlwMBGN2d9F(uU>sJM{^B7)Q_K z#q@k-Dg#`omg9Cufth#-X2Od+kvqK>M7Mk7v&Xxzv5eMDq*%v~*GozO$?EjPR0;B3Od#j#G8`n;v`)SGu`QWTa=3CPfh8{#FhS`*CDdyy1kL+0s^o*<8HIU+N>^;7wCh=9`n&w6Z zd*4Y#yL?pd%X%;9`D)K=dS2Y)>HbvU6M<@AeE5gM@9d5ZPYz$=|BnCd{+Ic;`@ZRW zi|-EK`9ohDdc)8y-S>iJ;G2W*7@Qg$^FHnUhFQI6Sbe{~!C`-k z+4tkVKk6%Y{dd==x=wUm*X8Srd;Zt+gy%ueEuL+?PxZdD>x!-(>I>9sun_?BBXp{1 zI%|T=O_0l4;An0x9bFWQ{2a-T&<8KaJkaJ-CKeOuY&N$vGZ|ilPt77Ws=lQ6xul$9bM7m!a5pH*Md*SUo)Oa3C2%?~lv0^I=3LajH2OBiLkz^_#UeAsu$3jtx8H5_ zZA6U4=yH0hoLjCG5|(ej)!^F*%Yye(A)VmZYBDit`S$A#z6}d}SfH1tm)Ja$&w%=8 zn(tQ*fU!i^;8i~qlf|XhaATvRQ-8I$S2FAXwFcJORy4gztxJ8O*0uoq9NLw%99^Wh zw#_)MZrlhVkJ&WRF(1`BCbhjAHDa$cznybv(@r0Y~cmjM%oB-GI zgd238VB$lNCWi=WXxFD;-=|;`aLf>q(d<}$4p$B80(CkeNv?M5E;@XJI>^Y4EMUu?(=;m@qk; ziNW;;Gt2sTgKs%Bbh^kCi+qBORV?5BQKN4;IH#o(u|zbNFNjIYx9@56Ez3t@^mHX% znkgqI!_1P<&fY@VG8v#GFU#B+^4MQ0FndVVfH!xyIJ%mdEW|CD9JVAG_f$jnlBZj!O! zcDmlDya@Taz_>z}k_*#|#VKjQ-0NhcPeEGZX^|18*~nBlW&ZT82A?u9p6BQ^$3?2y znfaXg)0fu&6fBvsm`Ilk3v;{>&RXz#xWT7{N1eIX8|#0na%4R?sQFXA!KXx!e)Ff7 zHu#jtA!z>e{Khg3VY?+|n#UWrK-gi)7Czjt1=6NUw($1GEfDrmvV}KX1+txfXi+QM z)f)4N7OrjF0AZOU8ra*o0Ya=J8rajY0n%nhG(fv(z-j;^8ra#m0m2MMG_bv41Egt+ zXyEL|4G;z@q5-dq2CUX5qJf^qss^#3Y@~DQQ;l087L{EqA8FVMxv=b7`TfSN5R1#M zl{YrlS_p3mn^wS^hobtZCn?|ShX(uK=lRR-+u)Bg_Gh*4D0sWycA}^G@ba3WYy!O8 zCsZ%@3DwJ8vCo;j+_j)dXwwrR;_8MtZ-RVYli*3|kgisv3#DwV96=5+&Iz6(abARs z(@MbMO-S(c%(nC?L1)Ya>lhmYNw*Mk3Gmj%vDe)4OW-ax znN1{T0iCrzyQcI|it~QM`NY8a55m2#B}=uo3~Y_nq`epo7l3AePeCC*G!&aj7a(`k*Ni;KF&94MlWBw@K`xCyeOaYgUs%uS7CN5x>I&!j!FqXj|^ zOpOHiR+b%&`LtbjG=}0NI}#>CwD}fFIV39E%8p>@X&wJN^JPb4u_^lhy_E0a!H@SJ z1pR-Cx_9%`|5fQOmy&CYwHmcIj&C|?GhsFaW_3qxh8$o6iiDFY$OqV}9l0(=woh`s z6tooloIG}{f#e!vL^yk(7lmqWdU+`}vsEP5NL&y@FfwPn69WglcO+Lgk}D5x=uX#( zk>m=Hk3xpUj^wJ}B{bbGM|$BU+Oyhs$EHfIF?34kGGBdj>aN3XuXG4r{89okgUvKZ zYY&7I3&8;KlHCN!^_d_b0zW5bZf_vDk~3+KLAVHmWg zBX%9M_L8f@B{xM*fcWJYaYqmiGx_O}cpgGbo3E&%3DxGJLT}M5T5kN(NJobZ;(GdG zTa{kwR(2%38a7{c)X1|Qf7_BY(LUKx$MeZ!w>Fd=nK1admBq@uRGAVJTS#_fLNKyA zvZHn2u!=geqZ`?g!)HOxR!@BP#59)`^fxIK%`@i%C&kaR?fgHrxsot=>G7L+{*T-O z;btj6pPjBsv$+y*j5;>l2-yEaP=C`T;hjKM_&GU~Y$OR2GijG3Y=V%jBuorb7uO7# zcWX()Xg(+g*A|e3wV^mkJpfv>i>nS=UEWiKN;OU)Tr!1tK^;jrjubE?3G0h(Rho1w z2@@dSd`b9>rHS@Q!aAN$9=p4dB+PPr6r8Z9glr*_$*HLcHbOIN?))D%A}$0YbC!gO zfp;X~LLNCGHa!1V&JpTK!eJ&9}Wv9UUsw&Qx|Zy ztn8>Bac%|K@k~8!Ju|GR`eMWSf7hFg^Z$q8kIqlqA+XXv3%e1$vaQ8&`}G(cx({nj zsN!{)^7Yn%)q(UP<#8I;_(&6x`G0mKedWgrDY91@^~!}ySQU2-kjl#L+c<_WbSy12P}9INFBgd+lJp=OPO{*M-l@~rk{ zHc@g#R1DXeqnjqVY8}#PF*8z11@kV>`E=G@o9? z_4LKIDowkxde(w`<2aX%@93FL6K%748hAc=OloBHgiJIMhOSfu9!%ruE#o^HjdM&0 zM&=+6LXZz);J^zy=l`y(p7<=tK{~{~qhTQ=@Hmmcx}-xb&l%#EY?;rG?>iuHZTi{Q}pOthOWKD5ZYS9gQUE)=) z)I+R49I0m=ELv|7;9%o{r}aQDFD+Hm>1q*Ox~?Nl#^Rg+7w9%kn*90K)2Ec@WT*tv zhaNRbXYzWWCa)8FX$zVkbV5<3g)v3yY5iid!i@nt_Sv&|Wq$`mRr{dgLk{ zmbUUD8)b#{%8Ss0={|`u4FFDi97K&FXmq5{9V9|y!K}vSkVLq1hkU?TFxvHOQy@Q` z1?WI+A-g&A(Y&EDpMgsY>P}uk{J-(=^04^Mu+Ux0$Dep$;IgGB4DMyl);6dK3->=Z~*=DuK}p> zb25}`D0o?CvU!4+0TO$`%Q`MZ_gh==Li1^{-HxO~#~oSi+fncWSeoK~>af&>4~3~H z?ul?XymZtl=l}1ad`}Hs+W$=7sUE58 zJ&wwM=O5Y#fz{YOZWo!7E%G#W$sI5&ouZsH>Q83+{k zIeGC?BkhK-O;gb#v^D3#5Q`&!(U{`)XDeb5BL!4;w3`EqkaDIxf?e|=g>hyX4((Y` zTp&%cgOM6*HxODPtb~?WFRh=U+RZbtLh!SaUff{qhDWjETePI?wHw?6U0F`$$_vE| z0}G?Bb_44SMzm?#jSlLQp@l};4Kb5V({4;4abP_t{=XT*TU)!)=F<+dvevhgcB9}} z&!y04N5;Hty9@wj61rSs6(K5oJTL<$lCSJAxNjbnOK%dt$(YsNnTe5WMiC zod3Ut@_lJ20@+Xh0|5q}+2}~r$ zA@}bnGnq++rJ2QhMx=bomS3`LoZ~bH?Mk7N$R0_QXF+ZUIUWCb02Utn$@1#o`wBTo z@R)*EVB4$aGjmmGFLJF=K2gVkFIqJi6D(HvPAVlT(qScMy{0TcQU*v{Svd@8D?w>M zcY9&SUMv<$m4i~H1ZgY{mrFIx-bnnO5w*Mh5NN3aAVEWpFc91uln(_++P_>^g$Y0t z93RYO^3{q|Msn%8r>^GcI9yF8p}$-3*deDg=z{ap7^a7$(m~uG1}?nCpf$e@g`@Q( zWD~BAA6F+;wgey2Kn~a*fy2aqKdn9Y`{gcR`}ZeDSECB$fZ&lfWd#J9vB|T2&xkrJ z-QhY&Cxds6OqB|`pccn01nD6{YI=x}>VXro_yYwqIfT970rkw{nrva=NMbT8?Fkl3 znS5o}&hNhU^|k-1eO6XLI#taE7c!M}a6Tg~1S^H<>8un40!SB9<(;EH z)3iT>0%_HYIgT*ykR*bO1DqKl1WCt{CW7FE$r%;&^+mu6>Xj2yQU!(mLIwDHK9S9& zFeM(yqp9jKSRwCEp;`hOIaHF0MeV&7Q>rA1q)qyEGeKs{K{}fV&O;j47mNdJT7-Ni&Vac`xkh1+d}3T}Kt4J3T0qAD z(D*qKz7H+@q0yBI2RPd$yEnsGxoOd8;VVZ>+LAq7DM^Xk4RG6OZ?SkQXIbaOziFQs ztGuO)nGxd*T(oEKMSEL(lJjo^KHCN28oK%UuR#jJ&xvFABD!HmS3Qn&1F=e?bRrw& z(lav=HL}9i;!rIoB{l?ecH9@jke=5Jk}XS08@o-bP_Lz~{{ia4&r16EMjJQbqB++> zYXcEGKtI6IGjId0oka?8kuseJZHDWHancfcc|+qSf)$ZVyhWgM3ldxPp2%?y5Z>a( zO>{%Q#UWSSx^9@rs}3yn15h27_O2TndEB@~eP;Cq1M2qkkk1f+2!wcu> zDeQwKlIW{tBTS8dQr$HJ+WLlifB|+AIjLMsB+-P?yy-M3<0zytN}pCqfKroC;^2Zh z0#Y6j&|U~)Ji;*$aFmJCT>lN=TzJ z49^uD4F$%9bEsxoOrA`9!t0As9$Scfw$aMhCZz5T?Lt0I+0Y*3h8UTR-wc)0s+U zI$tPBTJmuL91Jkfv8NBftMz-6Z4sJ70#}g@!7Q!rBGVTRFJ6!Y%vXxz{Le7 z1ic|7s0VtQ2S++lu5sUhcS;g0-KB$AC&ZMXRktQ6pa?ve#&9-QBjBu?ponqkpbayS z!rM&$_MlBkNo1))rnBZD$}?J?@GSsQ7>pZ;8r7PGD5Rwo*t!s<>3QTJ1TH?vEyz7( zvat{}y+tS!2IB^fMztp4C`1GxO?x;Z2fzG$Fp&i}oTgxi{bHcmEdfJiF!c@0SQrCe z9u?G0!;oRa5qlU)$#fyOQv$0YxIYPZ zr72J{aV{2u*0%(atsKg}XsRShaLcO+rYwj^^oH)Y0<4w6G=#OKTC=d`!a~$R5~~Mm%sB2GxiwR& zfM^y3=A9`O@<MmiiYeB^To0H+K3n^5g5wN3@%6$ewnmj}~QA{C^= zO)U_Z4w%!GD-iAG>xdp)*!C^e2o!+cg2THYtp&=eqqpq=vdP&QRB*_;YH|nHoS$i6 z@}RotaobTfya3p6RTNnx)5QTbO*ss32>od9Q(w>-2+$xB~=ZCF72s50-^IE zP$~}BX=9Q^QBtjg0*Y7mVOxhDj_1}Ei$xneG!KHTg!muw$N0-^nleKekU)tcdOvV-O!k1+?kf+Du6PZiw#4}Zy5ZRv9broaaiPE@*KoV9 z&ODlmtiWO-htVJb!->c(gN1r$Tx5kh&}wPuIF5_OAVi4W9lz)i4w6HJ(M>ObU1R7t zZ5n_AMDz}fVE4!-MUT^lQ^Oxr38PM=Ut`g%o-6kbQ?skNF?olwT(k>RU}Z!X%^KgU zQ$Nt|mOaaFoe|(MIHOd;n-AFBX%%!pCu^3#zL=ML0@SqpJJC`3Iab7AiO)4!TnT@0 z1gl(mL?S6-LuQ-$I(-jy_fN%Qta*(CaMtf{rne2P*>X=c#L$-UP;bc*7(PtC9c&hQacBi|><=ej0tFfy99^Y_=xt~|)Fz}-!KR@XM`GV3^y0pZ zNaoOMUG#2rO!n5H7e?x4^%~VS551t~UNA^~or+R#rMhnHW_pHuo*vvY*gdc`aMggP z|BL;<)&J7|?S0?t`-8q>U)b~CoHr;85V);cpDTZ8$Z& z%m1(b5BeAVFZ6f&p6Hc&M|I)8iXz04ZZx7B7eAtbO zy{Gr~v2MidNts@Zh;$~GNtL7d*dmTjgw7($xJ085s0X?(ru^ONo*Wl@ZyT*?hy`OG z!>}}0%ooz>RHTBG1GOEvgN&Q`e%Jp}<2$-tGtM}SxQiGuIMfgx8g?VT%yzOc#u(3X zbR?Ufn~@?jlXjm{|Kv2kXoQW@j5rgX7D};#&HTR8{cVbEVtx~e7=Y4dhHx;8Tk$l3iS8Qnj5@|Jd_*EAWRM ztqg}KyFm&L^NX-7j4@SQNJ$k=WbHns{=s2>kmN*U=;Z{%=ZZo`u$kZ2U?H^ve^CZ( zEG#{ho+&Lxx!iPZS2sA!FD&rkFkP6-r@&laW^Cv8RfqY7`6$oPg~;^ed}U@D7IMw{ z`y1OYkvYhsDcgK`zui*QBvRt`AHh6cZb#$j)7lV?R(7%E)TdQ+v4hcqL!U&udWgIQ410+-28i&c08(44zil&e-=!-YK%{Gi}#rZStksc}jMD z)?Q_?>oe)lCz0m9w$Hnr`n2Alu4t+8rc;M5ELNzzNycEOPv8=E?=FlhMxNq0z z?M{6%VYnby7Ax~oWlBuc&hu9LK1qLZyFQOlRM$B>x?FF{Yq==Li^@pMog7{8$hU2L zzu0No-0z{ZM;7b%>%g3&Vw8)}f+%KqagnLo0h9WHJqDw|oS7KS7UqhROHu^q)U{I0 z%?=nOoWAWky{YkxTwHb3O#-^P0W&=_=*k<_5#n^_j`A}ec~4V<6nGylElVYZIN`tq zZgzqbVz~l_I5;lRrX)sqj*pmoqF$Z#$XBkfE8;}M=nPI-L`!F{JmQhhu1|YbOd0Ow zN~@YXxw+NwA&=~Bs!=#yCY`9w4eqd~e=Z_~g$P}hvWw+uV1IU`NIhXIVz69<7eP5# zij`+l`CQqCYf^vV(5t|)3`b{XQfYoBHxIH@p$vWrR{i7?4!sI2#HOM%%&agu$1K^2 zL?3hLl>S`hfcl1NgEGIdD6>^ zlj&s9cCPPr>J=_gi*zm-jh0H)O4O#;cRTe8(auC#k|L$0+37MUHZ~A`ms77yG!mw{ zs#r{f^HVsEi0k<%8C$lw)-@1k`bcV=>fFAG} z;)YzdF|$=Tcz&n$oaAIbimXx{lV=?dTWv_SRlsw$23mmv#-Ey~6E-m3!l6}6^Er-g z0OTMV#nB=h?t?EvtDuQYRF19~28GoD=2ThAuuH-$x*b->6$2Ez=$i5oY6}yZH@RY1 zmlS|!^AZwj0~)uEp=S2TWkX*zt9xW0^1B(^Azy2(8VA1{yn1xFc2I2+hetNh=f;Xp z3;I?DL*rB#JFm9D-^S8B+IAM#=56yZ5HfzSx6KDnUMdeFZ=1cN zD{729!p@Dnv})yzR*`YjzBZf|;Ta9nW=pUR72tH{`r51ouZUk0Tr@XtIr_ z4ay8~a63<%@DUE(ONm2(gBa6NPaASZo9$;q_KzLy>Ur7di)AG*dx2$KxjNelskE-5 zzSgBhG>TY+{Srae0OOnu>eUy*kPGJqUIQm2Q{64{WLzEq$@FyBm|QUS*v3bOtplH1 zu(*Ws7MD#`Pu-TuruO9MD4m{3j>2t`Pix^x$mbTPVx<4~Qs+|s&-Sc#{TK@HvwB!Q z2e$eXqoeY1WksQDedhS-Al8c_&(I84Ddf_*JU)B138Rd!T3hb}!`+Z+hy@;`_Xarz zZkMnk7h)r^kO0@&LoWa;RtTcbhFGSRAOs0}h~PH{}2-#vr7>NJkf##dLZ~T((O)5DAN-5YgQqHP=u*?Lf?B z+5vFp#aRjV)y=q|-cdTduCxO}gN=+s{&ujD=@)NQCr?9EZ`J{n1SCpV@QaUV&sPQc zEaU+Cu6^ztK*`p0wAeX-l9kae-ckopO)suzL4!`$<^ifTUZZq7NGw(Z6q>7Z-h+*| zrM1Y3Ofd*GbRx4!ZtD?C>t~7U&d!6f1f;{V56 zj(j7qzf;a1jC`Y7n~>j$|3A`tmuK%)-L4Z`xtH6^WhE&3;xHYEXS~Et}+XQ zaI(k4Lg~c+ubb`5>0(8*m^IpgFKsOB+Jz0{x@b>jcmN1M&nm8MaK+i*k|al5=e zx;pXy!JWE(UOh5mzkZRa6aT;2)QG^=A;Wkl{y*f^+&$8X|L-EBLMQ$|M4C)N)XDV( zGlpako%sJM96RyMS!bdnpH*Gfr~)#-_;5`>ALK#4C*fm7VCnTlXaPdgv~{)n$ZAY zxAH|S6|;~4a*Y?U>JXj%-7BL}hs+ua!7c=VLV_j$u4^qI)BYKX z$X4aou*H@IIdVz90J#4JD?RvvzjrMuHSBsZt_rQ~R=-`Fumsk4mH(Hgfgt|`p3~?m zzvuJ!?~~7m`+W~=8+E!d3#3>ZuxMT6*xdbn;59-BA|^TI%BUG10Ow4(zrPOL*1fck z-5Yj~EM&6TeI>}^3_P|{s!F>@kOeG>4IA;1ddXoBzc!Lx*bejfaciusdq&XT+QoP^ zYO>l6*f8t$Hg4NN6sr~O4O27bRJDW?CXdA7-wY_gA9W=#3 zBuK;Zf+??tHbwM*d09ReP4jjV-H@%knyr+r_miEbX;{|Rd(+mKH_j8SzzmuP6uX21 zQuJ=ZpjMVD1i8r}t2JbF!ZR|@{=^aaIUrA;^p34u=yWHWn6oNRtBr1@iQC`7CWepT zSV|;k4=UMdVEl-kp#=oIOd>LAW9te^VuNC2sWaB-w|x|?aZ3mkNTWGh*`@XtrnJ#G zvhW-v0qM-c@7Y5Qbe}~H-{V^xdgtKd-tY9~y3gv__Zy-7cP3c_ft7LjGLVRmu3U*F z;;yww#Q4TA#I_wb-~7~W3zUo5Ohvh#03q=cnJluJtXE!czDHPeYn7%Sf%WqkJdbo^ zar&dH6?q2=^0u%;&Na~RI1VD0#%S1ROEcx3MTG17XNH(^6TJ^4L6T&$y-XF0@qe zO8MiH1^H4?JWunZD`Un=F^6gb+N+JvyMe2uDYX=>93}+egB{gUa`ZsYXNp?dJSDnh zp>>s3MMEuPiy#|@K&|$X=3+qsvBcLR&h@{q(hFJ5+6_Q1f{*m!EDu|Hmw?-0mP}2$Rr>@2mj)tJ^ zR_eYF!SwJW-z^7`Qn=lf?u+Kg(39c9Qnr$r;`G^SngSQyFx?l!00*D~2phUDq!xBk ze2qEYEMJV~*iWL+R`GS3qoMz~!>DV0*1)a{eO8~;>V5L_L7%-J&WEmd(r1aOIO($+ zF`qi2!iZ4;4{42>+9u*#yN>D(?lr2XK>}U!c8~^EZyS|WX#nZSRx^$mVK|mfiRDr? z&9d_-g1x?K8&dQ_p){MxPw&r^plUr^B*LL{peQg#M?x?R#K7z=7sM!`o;3{HrbUl) zK#6rUNdJG8_Mr3sJ}N``?j8J5|AObfuK$5QHsWXXEO|Fb%ZU^I<`3F6*<=FtNIguE zcGKN<+t#)f6pb0qH9pT%6eG4DqYj|J#5&oJ0?07D08jr&PKZY+yCt`_cE?*Fn zfUX910oMKLQzJpWv0Y#@qpdb0tP?Oo>{nlbU^{w&Yugd5L=*Hkn5o(N4xI5c(kj=2 zH7q>~-oX;GtDz;NHb2q^;=E}~2n4+0lyKh;yx5Z633x@v65=}MzG70fi@6^=!f{}Q z!iFdjwvN^@*k`FF#9*~&$vfL2fTCPw8@O&A1W;>+w1$EeIuO{j*!_kH9R$$J8w#M- zskB!BHQ;2=^sHmm1tjC}TDfa+5NCBV-hu@olfO{{w~DM9szMX3lyh#J`KeIKB`OsO zDYKJVI2ec4Ymcx{ja(5t|1|F(XT=tA5%nNwjXtDn*aa2Q(owiLqYtLeAOTcA#6tT2 z0CffBduZ^D{m=2-*YlrUzw8=;7p{K5{2F#`eg&l%5}MQ8OqyFrlxDKJA!nn_uZYmW zf)mCJ&aL_NJs_szN4{AWT$^7V=Lm~dz0vDnqah6H?#!?1%p%xa>B9V~PYTShd;{|< zIhD;YzgjS5YilD%g?6@o@Z5Ul*LMSR!4FtP*>(t}C|U643#HZ+#c1g|THO)a z`4Ef}p5WZ^S`rl|}m-QlEq^6F*?7xYE6)!fB2hW~sn4!&f?8 zSSs{Mfu-V#MwSX!APR0v!MhJhBZ14&p%3H7$RrQ+kjT<`;dQeFs_ik1gT;RFscjJp%zoJ^;- zq84tK&dj9J{7i0s8ZEzi)&@Wwi~~zEaF-wce7L4c^2(g~`wNoUBW_53Y4Z#2~2e;>2*73ngIa73RTl@fX zS*%F;a;A_60K+9(m=FZ|fDi&wKBb-i!&*-k@+kZ2{zN5#-3S072NvP-4_uWu-@a1f zF*z#ND3ll+!!Xzj-mP+t7@Kjw%3&6nGC1gK0ZP;yE$xpog2LtC2QE-W-CUqT2a^#- z*A$!8qU!F*==54SQ|QJ>yZ9>)2Mu@BkCbZ6GMx|nd*YDSj~|7tMe#YbIA$8 zEgFI$IP0!a>j;4qHd}Xf27+}Y2>9R9p4C42TF_f>JJHkPdq+55eQkzKmWv2sA>vLX zt?NKXb^s8!clN{`(p4$DSe_Ojl2Tnw7Xpz(Xf?&1VpGo5zYLhb&&e728g#CH{n$#s z(`_SSDotp*&4RMdhHC+lttd$h6Cl2oRbXR2YgB=og{`x0Yz(D7F=u9y3(;t~%IToB zJxzxjF=2>T=2jkU;*L?x;@b?Zxmh?9Rs+QM^wqX1$p_FGdUA)HG*+8!+);&Nf%v4t zeB*eRy`Fsr6Gf{u@a8aYLG;OE@;-EEe%|QHB?h9cK~*BW2#NTXV&$1sK3AS`ubMYE zE|BTqh)N9&h&QW-dlC3eC_1IX)@B_zG1waR=X#zIUcM{S;gG#Y#;rr5h% z#XU``ehv3*YZMim(Ilp&d_{{}O2+6#%j-+p1N#5fuJnI&+!xyz{ogn_wI~_u(Es(b zwLwtycl2CI_0RYF`X26!d7kX~Z_lm0PxVd(9tfN@{HEcH{qOK!>U-38VCefpr-!Z? z>K^>l!NvY>_gpy`_5Q;9VQ*>RvYr#(7kLK;{&wK415-UuwA*sf>AwSkO@qMx-r2G4 zvQ$P^SXzs>tNpEpRuW>da4bU4OfOGnW<`$C{#mU~{bXc`>UW81YBTz*@oew6XZ;*M^_wzVuZ^%G-iTbd;dN1|AqnCHO!tcBbVqGFVbm|LZO zxZsg*+tFoyw}zgyfFC3~S32g!7|wBS?&OY5=~qdwZSMEz=JboxE1Ua0v{4w1@hoTV zi2B|}^%N0fqNS&AZ%|JW2zh4h=^Go=Q&`}`VauHUE{#^+de;NF@))3ph+x6faX0*l zgYZS+7(KTrCYK5ea|;^p(fHGs8}lcpCRG0PrF#6y;oCCxWSy@VGOLj~=rZ-1dS69i z91~&bC^MN!g{7HA{i_uLIB1vpr&>mRH@FV=aSl2k$NBTJfr{;>Dk3_ zfsKkOPW#B;5SmVNqg80Wws8v%^VC{cy$W_5ZMMowmvdHIh&FD(Vg70Z!!8KZ5e^z* z{+63YoIs&9a$+BB;W{kTw{D@KR*$)9!wDd28-Y7K^7dwULj5$=W`6U$36H#^nT_7= zz)aiT72gYU^VBC>*}jrE%hLW^k9*`qGvl`vR5bCSLA)@u+%3I-U^9D1!L}^Dzi%^p zN6`x`y+5{y1h@5yh#KVW1vdznhRx}gjz7Bz5|qHOY?uv4%^hbpp7QF^Zy=S9~HQ0crW~~^V5Mq2Lc@kbRf`yKnDUH2y`IOfj|cW&w2=)h{`wm zsbi<2+g0CGTT2*dYJ{oq9th0>zUF%?;O-33O2af0r8y-;B^&2a%q@K1vr?!evPTl- zS@1bWNdnQI2f(ok{^a0BH~xE{;-d_&SYh`_HJ_QQN_){Mt@4dJpz9&7-H2MI)x!~T zP~ap0-yuH)>2MVspG%NrQ1NsJ=Y%qNz8^+MEZRG1u~;Zo4w6acRI3c~m{+^ouO~gk zxiUnE;z9h&bye`PhVk)yFqg?!A;BPWe^~$ARrl?~)npP*nr^{^XBhxwVtyJ!^^jCL zh&#mKg|`^E=C=?)sexCZhOOhr)mfD-+3GZy1HMPl`S{b?Bi|+;MCaoy>8MR47{RW@ zYxB&m>#ApXW4G#TT=(0BJL4~Le`nmu2VHY;=Gx=(3^b43#H?90^*I7-dqD-##pbeb zmBjFm!xl7HE91!K3`+(wD=Ifk?A&x1$lJs55t`>>Vj(=4D%JJ+VIy&d4?+EnyK4!7 z7~%d~xtK_zyGLew2aF6>b|Qr@!{j0VCftv~$#U&@m@-m^!A{Xi;Le8C5_pa7D^&Bq z6Pe???-`kuP?A&gAY7h(YB*4mjuTM)T011Cmj9bS9Tcm81FCVnZaeA*g>wAejxhLb6`1g-AAMv~W1kTQi2u zLF)CkozSZs9@5-gI=Uzp`MJ92Wn%F#3wz22-j{CC8dgTy5WSXan}=S8)6k3hqEn^C$;rj&Z2i3#0gVem{WAi+5V5O~ zfTc`mW6|5-j8Z)+Y{_C)yHeyX(*-hU+*a^XPATcMm@MYhpuC_sIyFT)@^V&8bU+bFY=d;sQX*O3+wE{pzF!Fhcaau509j{yzrIx=l-Q99@H3zizFK))fJ4 zwA$5h^<%%{_7DK>8uDLCiBeUXsdu#(gH(t?_|dH{wQD1FgM65fTCGj1Uvchk3-Xn& z>1s8)P|C*2kw&W@l4Hk|3;eEEf16f5jMNSCjcV;eKK8J05At!>bScX*Y?`aoQ|Mql zh0%3BR=r5MA>r#<#rE+?8zbLbZT-j(O=l{Z>3pFiX?I10s1V_rww)U708QA)>fk$y z+ok8i(~G>wW$Vf6fHnxa0Wp=-EP7iSZFr=O(PpmJ0<=l_`BG9xnGlIYo3@sPa2t;3 z!fn!%g$h46zcAZie`VrABm`sF;;I!879MG1beXHIA6@kw(wpU?WNVHngfnJHi#oNy z#j;Ffs-9sJ1BMC1F1gk1*4jwjpb#dcRwGb&7Y-qf0)N2~`Z4VwiiicKyY-{vOV*4e zwIZA+Img{0LTEWDCy=ocID;NGYi$R$)*XnL1`OQ&xt=nZ2C&t}Y!6%gT#p!s4o-X< z+#Z}wbweT#X>th9JcO_BI-%>kYNnLMm0V)-^JYs`qmTJv{H43*oEEXe!m#QEI-=NAw z3ZiwUqzUAV0LS{lmj|ayQZY#H4RTEI&Jk9Gh}4l-NPzt+^a2i)AvT0!RNpxQP;||B zVIDjr)nu?$XbZp|2IB_yMzyA3&qFvUMqn+1y*=#9pbsRn6Z4sJHIdCMpY@8r7PFBV?%r(zK^c6x}F4 zA53Jkg@vYIh@+B0vs(g&%3$it0)TgOwWeXnuwi6l)V;|J#wjJ!h2TyJ{ss1>%XX{og+JgS6mUicRYmtEG2`NUmmRU&9dba_^Xs`7BdVSabgF;_HJnk$lY)W zz>Hq4S@Ck zy}h%m)*JCW#H_-k6 z?ye6}JvYFw&d-?ufz|WmL&MbUYGF*?p#*%@F6XG1maNfh>~3wTAN;0VX(HgGFCm+| zG3!}N{}GgL3Q*JX(-)4)&#__#U8AXq>PKE&)32JjTE^fDq}-@O%mp1rnyvh6xNc&; ziBR7qMvK^D-rJkHdSbqq(4;q{8Z2h(M7y^de9dqN@HBNM0de^oZ29<1!l*{TR$qbdtH{`oZ32e5%a29 zL~H$x+bw3>{v62wY}=5#lUrn&^sJyh zfcmF}bb5JlGMy~KNa`Mb!W{>wkJPm-?Cu5}8}RFn8yMBvO9m(kTsz4C^-oJuq_i|U zU9RUrr2rRBx6XiTrict0>XX`pR54)vWMH=&Yc2{(pv7hpZPy}Nf_7l!v8}(Nm60|S)hyN4k4kO7oxlaSw>@FGny+G2y^8M+tDx+TBK^Ow z>k-QLt)WZ$|E+Je=Wh6;^RuNOuo{zJiclj@~Cpfm6OiVTz z6TmFXkvGQe4OaqUYtSX$hwYdEIU*oUB5jd5*)bc13oPqQ=mb5$F_e}9*_g)h4NcZ# z>L-2bS8vpwlNZakLUx{4?ipR_F>q(e&LcqhZOHw%oM8A|QOF4E%+6yBq}g;lfJ-I> z-jsE4X&os*`ixG;V+K`A^X~u|i_tpycQ_%=@}X#?scgDNbQaSxMa#)Zv1^{t#^%-4 zwj+!BXYRgHhMix%0hS1UR?_lO$j~URN-fIJs1HLEbs!-lgy3%>FT@5qB=cx7sFWV) z!dyND$5!Qfjzb{i0rrIujA-MO7qbZn;QVUsIT@0V{Qvfz1C&yZ3$W zyYIexp$6l}!FQZ8T8^-=%3WD(4Ms&9FjS4rKdnrt8;OoDH1e~#$w8ER45jyi&(*$S zlz+KZ%oo$}dl(s*QbEiCv7z+fFnVu8t_XvLf;c&)szhksABel#gZzM#+1&h%QxNJ` zd!ioIZ0}PG6OBKkO&O1`(o0Z7SqjYj=zKWCW>P2wNtOU4SSuw0noTablCK_Mm)$XFbTg;5F2p{7)CL0|cnkkHr&b83^LKXpE zdYM9>vszAJ@OO-_tL7jo2QI!!OIBW5uHrl-XEZK;h?4;&SiJo%c2QfR#@7K@;_#o_ z8n=vfJ>v1-?7QDP>ptW>=?L^3?S9ODDsWd|wf{livp$FSQO{w|diUw>AJ~6ud&v4P z*7pTo3~chB@V)Niy?^g{#WUftcmJlFvAtk(TetW>?7P!D;@qHd>-kO3!R`m_&)bXkOKn-}Uv<6M^;FkyyG~no`!Duw_ik{HxG!*>a_x6*bpG1; z1?Lv0({ZBb$34033H!tL>+D^&tF52vdc?Y{>uu|L>z4vQ32gWO$oH;qllN86>z%Y_Q_x{53 zuIE9|mG193o^Tv=-0PU`S=)WH?QNR?{7m|ecn`Rrb`QASuJ5?!Tz$^ho%@{Y9KUp| z>3P2Uxcx2rK6}4yul1#_(_Q-me;)X3pwEBA_p)!b_ZiQ#o&opS?$^3k+8($5+#2`4 z;q^KCd*0u5*8kt$iyhbZ++ghr_`QN-xF=~{rE)Z^uw76e?vh2qhNWF`04koKew#Z3GW3pD^fO6 z%kwtDavtU0-|}rWw#@hm8smR$yH(v5?#y!po8PkSwX9P08QCkA;{@Z++8$RE!&u)` zvm!J84cpsv#=ow{mKlGB#`s^_SE$>}v$EBlfpQVuuN0Up>dxXT`@)W)KQ9j^fzCyyda(l|&m~QOi!2^q0GD zrc3%!HMT72M`)7%)9yX$wy>nXNc87W_i?g64-@_Q0=+*6yI)hcffew1HHUJ4KG(CB z-k*om*m8e9L+j6jJ@;6yDNirlp9c`4F0#gQKU&0OAP#!TTR!cxDa9$FJ{uv1Kk0d% z6vF{R440(vVL{cOWG7ma*G;o@GUuHtNp2 zp3t0g9YJVTPiV$Dq$gC+6UsaH5rlGjLetKpdO}$}p^Wo&g3zR%P$QAGewbvVu|5R$GC-9DirL0ul@3rw=6gV7h%d4H#yp7Vl6;7MWwR7GLAkpcu z1kMId2fiCP7C02RFHi{V3S1kA2QCQ&{O|hD_)q$e`w#o?_ut_kbw20(jB~=d(SM_V zqrcC8zTf5hFW=96Cwa@RS|pv&d_ALl#HSDmjo1=WeV z^WTmHIuhtepd*2f1UeEhlK{=3ahBrHcuV8Z_)nd`2-$;{H?I zQ4s%`hWJk!?q1e>`jqC=U21DDs{R0`AK}IN0R{GH3hZvutc;N$KVn^HkXlaCfcMjY z;AbLFY7|)DTpMqt7>t(h8-4L@qc6U0^u?n_Up!=BtzK;a-)I2;fB`&Y0Ke1#exU(; zxdGg50Iy($lNUIYVf#Z0>;($!n-tiS6xi1&u&+>HpQpe+L4n-i%HNqT2Y{5qva(E>^TbTX$tI%6xah4*dn~Y z+D;&juOUHKk)ZV?C_#d*AVI51&?*w-SZ2G-RUSZyFWO3diUxd<20Ts!9-{$|(tw9) zz=vtT12o`$x|1}T5O+{uvlLj40-L13#wf613hZ_Y>=p`a8wGYf1tyf@|Ib*wFMEcZ z>mmMsOV=4QL!tBi5|aQ#*zRm_ge`;!#uIf6cq1chaSP1QtPoC3PU1w+6{9bqo+)tD zvSQ+dq>#f{=n0dRz%p!Z5zCO6OfMMOnz)6 zKeTqVlkd4Mu~Vmh$YSO(SfT83Pfvw^bWqtCMOS7aXGq+ohvt%@x0#B?uWS z@=LM$giE97XhnDMylJ~xgl!Be93+^LaXy-!TVRtdjIae=0xQPPn{=FZB5WbJo*rR~ zU+Eb}*urBDcu5B3iIJ!myQnSE2wP=}fbn%}gN-lr=(DlL7jA(`PR%C80*BLuS2Vux zo6zPNU**@9p}n`58DF}tG|l)@$%JZr>G)C%hIW14B)|J@h4Ce~ZDf2QS*l(^E3#BQ z-O@a7YTTRSHZ{JK8Pj?H-~MNLFpx*9HjooBF3MyRk#S+HI4?HCKu(09$qGi5mLrq7 z80Gke9fX=_ARhrE8UKNSESeg~36>2r+*BqzHkOPQT4^Ao!hHBqIyyH{90XcUL6@B; zXjB6^Dww_h=VMT-_5ZVhtV|IwkdqBIkYjN^%++x%q|6M|0uxPVrpLwT_z=vDiUu+d zg<8W6tU`vY8~L#oEJcUqKk!Ztn;FPhSmmxX%|KSkgueI?B5rfMNu(Pc^{?<6>a`BK^4#V%?~ z)I0fAFbMD;I{$ZfE#Uiq*IL^O_;2T5QzQUs{L?k1@fYIpNIc4nk1hC@7 z@#n(PSOg7zDYQ`j=ISE6-Umk_9}P~XGDRrUh78m!R1Mlz$|286AFqO41k!16QR(xC zN>Atj6o#~X#Y}3tC~g>qS}Xv8-$*a)28zSOVm`kWXEzG7@WtTFD5iXanA?mS#OMIU z4XB~uJTS491*lLI2M2>v2t#_n^Sh_<*GsY&-Dw?@|Mu?Ry>|lg_}}%=mg@5O>&YpK z!N&wS$}=2;R5LmU@ZvSY`E=P1SZ+uW7l$(kM{Snx|eAWz{V8 zqPE&6-l|^38DLsQtDC(t8dwATJ+ZyxKFjufu?se63cKh_?dVtc-H@C@p#H#TXn zUEEeIZ8KGFIk1I`IoPfTitt1}H!mm9f~NwIS-LQQs@2Faac(M`D{K}ExzsT1`f{#C zX%j}t6LrQ0m=FJ_%@ElR$!n4l%G*5XTWd~$do$H&_a@qC>*#H0`>kB(-c(%4;NBcF zb#H2|lIFQL%jON;Riot#R{1t}Z%X}XiF>muSt_@niY)2(|BbwJY}5OHWyZkfCiFJV ze!(#=O|oCec$$jlsVY3D&COHbw3^dL8LesR#TlBLq+T)`bVcBjXSmSRyzENgqLB2wKCXmL4Z!#|4$hS7^6jNu=E)5xy*bBn z5r&O19ErAg<SljdaKc?0jLxQTf8deq70oHfZ*U(vig_Ltku>B z*aJ2#Dt?`x8XQK>QT1^H1~MFd2O*RHU@28Sc)`I@YLJOvUEHt2j8>zyb{KSO075N- z-u{unbY86MFi@s>}cWW1fv+9gg+4`Uj%MkpDdz~BNO@*3yL2QZL&II)OIt%{0u2y)2Ya z2}AsP10$I0q}~#zrgK<>);#sH99N%;+e##W(d>=zs4b?jketlVPNgars6*0VLxA7* zv$w=)RqADBw_4NG%L-gH1`26>tPLB%Z7~b;^Tn~T;uI=hXS@T0&^RFkye$Rw0;koe zw~W?2^>VRz)6|RFVv@PJp`p3hMCH8~sh0}@{*q9ya^N+#qDvi;MnQwrYvS!@!{JCc zQXg&C=N$XDiR`Fe;-CthT@MF{kEpTaDHd<@1R+D4$1L=HgfK+LSLh z&G0kn(V{ppS?Op6f{J40^K6?dU*^3Rt)z@mz)o}lh?(j;B?`B-4w(Ku2pqi}L;qt8Yb&1N{Ry6>W(^;zq02}A|`k3i8 z&wkuAlT3(%P~mud(rf@gL?N)c6lOngT8;9{Xf0BHECErlty%g@O*6$}Y&MsU=g~B+ zIQt>o99Ufn$}b1T8yS6gfheQ3O8MY`VNr4H=I{IPSOl78a%qkYk8y=c2Re{dq(8!k z0Kff}k2y7;`D(PqD?c=vDnK2`Y)&j)eDDb&CO}rYS~Xjy1HYt%GstwA>B#6DpWxD! zY;{mu7+zI{fRFZ98|KtpZECbSs7=hwcTXcp=^Plo|&C6 z*Q#Y<1GxPWVY~GDy2bMjDw*70_crK&m8o9NHF+hoA*Cck-p@qVfZivB5m?x&&Zb zOU0w*(}*r4M_1bssMZjb1Iv`D`6=^Em+h5i;uM5%JPuqbi&a^iQ}czfY-aHyh1n~F zpp~`|DQ3oGxPnO4Xu9EQDpD@WMm3grBSgxHVk zPQ=htcL68#Y}`miQ4p~Zw9*bEwZx3+NY$X#D%Kcy3pE}iVx+2B1dy;3+uj&R-+t+r~pHgZf&6iKbe`}U>csW)Fi{wV~kiE*a9G&YF>uu z&}x<;8;)q6^OP4(ad<2nTq}Zw0Q=G#Ehw|HE6ZUnu`cA152!u ztntFJqxYs%4&}HBf;v*UYzBFig0f1IJ9?2;X2(*)V?k-7L|Q5xtbeC1Q;7Eqkq|K4 z7TUqgm}^IkR None: + """Start parsing PCAP file in background""" + if self.is_parsing: + self.logger.warning("Already parsing a file") + return + + self.is_parsing = True + self.stop_event.clear() + self.start_time = time.time() + self.processed_packets = 0 + + # Start reader thread + reader_thread = threading.Thread( + target=self._read_pcap_file, + args=(pcap_file,), + daemon=True + ) + reader_thread.start() + self.reader_thread = reader_thread + + # Start worker threads + futures = [] + for _ in range(self.num_threads): + future = self.executor.submit(self._process_packet_batches) + futures.append(future) + + # Monitor progress in separate thread + monitor_thread = threading.Thread( + target=self._monitor_progress, + args=(futures,), + daemon=True + ) + monitor_thread.start() + self.monitor_thread = monitor_thread + + def _read_pcap_file(self, pcap_file: str) -> None: + """Read PCAP file and queue packets for processing""" + try: + self.logger.info(f"Starting to read {pcap_file}") + + # First, get total packet count for progress tracking + with PcapReader(pcap_file) as reader: + # Quick pass to count packets + count = 0 + for _ in reader: + count += 1 + self.total_packets = count + + self.logger.info(f"Found {self.total_packets} packets to process") + + # Now read and queue packets + with PcapReader(pcap_file) as reader: + batch = [] + batch_num = 0 + + for i, packet in enumerate(reader): + if self.stop_event.is_set(): + break + + batch.append((i + 1, packet)) + + if len(batch) >= self.batch_size: + self.packet_queue.put(batch) + batch = [] + batch_num += 1 + + # Queue remaining packets + if batch: + self.packet_queue.put(batch) + + except Exception as e: + self.logger.error(f"Error reading PCAP: {e}") + self._report_progress(error=str(e)) + finally: + # Signal end of packets + for _ in range(self.num_threads): + self.packet_queue.put(None) + + def _process_packet_batches(self) -> None: + """Worker thread to process packet batches""" + while not self.stop_event.is_set(): + try: + batch = self.packet_queue.get(timeout=0.5) # Shorter timeout for faster exit + if batch is None: # End signal + break + + # Process batch of packets + for frame_num, packet in batch: + if self.stop_event.is_set(): + break + + try: + # Thread-safe packet processing + with self.flow_lock: + self.analyzer.flow_manager.process_packet(packet, frame_num) + + # Update progress + with self.parse_lock: + self.processed_packets += 1 + + # Check if we should trigger a flow update + should_update = False + with self.update_lock: + self.packets_since_update += 1 + if self.packets_since_update >= self.update_batch_size: + self.packets_since_update = 0 + should_update = True + + # Trigger flow update callback if needed + if should_update and self.flow_update_callback: + try: + self.flow_update_callback() + except Exception as e: + self.logger.error(f"Error in flow update callback: {e}") + + except Exception as e: + self.logger.error(f"Error processing packet {frame_num}: {e}") + continue + + except queue.Empty: + # Check stop event more frequently + if self.stop_event.is_set(): + break + continue + except KeyboardInterrupt: + self.logger.info("Packet processing interrupted") + break + except Exception as e: + self.logger.error(f"Error processing batch: {e}") + if self.stop_event.is_set(): + break + + def _monitor_progress(self, futures: List) -> None: + """Monitor parsing progress and send updates""" + last_update_time = time.time() + last_packet_count = 0 + + while self.is_parsing and not self.stop_event.is_set(): + try: + current_time = time.time() + + # Update every 0.5 seconds + if current_time - last_update_time >= 0.5: + with self.parse_lock: + current_packets = self.processed_packets + + # Calculate metrics + elapsed = current_time - self.start_time + packets_processed = current_packets - last_packet_count + time_delta = current_time - last_update_time + packets_per_second = packets_processed / time_delta if time_delta > 0 else 0 + + # Update for next iteration + last_update_time = current_time + last_packet_count = current_packets + + # Report progress + self._report_progress( + packets_per_second=packets_per_second, + elapsed_time=elapsed + ) + + # Check if all workers are done + if all(f.done() for f in futures): + break + + time.sleep(0.1) + except KeyboardInterrupt: + self.logger.info("Monitor thread interrupted") + break + except Exception as e: + self.logger.error(f"Error in monitor thread: {e}") + break + + # Final update + self.is_parsing = False + self._report_progress(is_complete=True) + + # Final flow update + if self.flow_update_callback: + try: + self.flow_update_callback() + except Exception as e: + self.logger.error(f"Error in final flow update callback: {e}") + + # Calculate final statistics + with self.flow_lock: + self.analyzer.statistics_engine.calculate_all_statistics() + + def _report_progress(self, packets_per_second: float = 0, + elapsed_time: float = 0, + is_complete: bool = False, + error: Optional[str] = None) -> None: + """Report parsing progress""" + with self.parse_lock: + processed = self.processed_packets + total = self.total_packets + + if total > 0: + percent = (processed / total) * 100 + + # Estimate time remaining + if packets_per_second > 0 and processed < total: + remaining_packets = total - processed + eta = remaining_packets / packets_per_second + else: + eta = 0 + else: + percent = 0 + eta = 0 + + progress = ParsingProgress( + total_packets=total, + processed_packets=processed, + percent_complete=percent, + packets_per_second=packets_per_second, + elapsed_time=elapsed_time, + estimated_time_remaining=eta, + is_complete=is_complete, + error=error + ) + + if self.progress_callback: + self.progress_callback(progress) + + def stop_parsing(self) -> None: + """Stop background parsing""" + self.logger.info("Stopping background parsing") + self.stop_event.set() + self.is_parsing = False + + def get_current_flows(self): + """Get current flows (thread-safe)""" + with self.flow_lock: + return dict(self.analyzer.flows) + + def get_summary(self): + """Get current summary statistics (thread-safe)""" + with self.flow_lock: + return self.analyzer.get_summary() + + def cleanup(self): + """Cleanup resources""" + self.logger.info("Starting cleanup...") + self.stop_parsing() + + try: + # Clear the queue to unblock waiting workers + while not self.packet_queue.empty(): + try: + self.packet_queue.get_nowait() + except queue.Empty: + break + + # Send stop signals to all workers + for _ in range(self.num_threads): + try: + self.packet_queue.put(None, timeout=0.1) + except queue.Full: + pass + + # Wait briefly for threads to see stop signal + time.sleep(0.1) + + # Force shutdown with no wait - this kills threads immediately + try: + self.executor.shutdown(wait=False) + except Exception: + pass + + # Join daemon threads if they exist + if hasattr(self, 'reader_thread') and self.reader_thread.is_alive(): + # Can't join daemon threads, they will be killed when main thread exits + pass + + if hasattr(self, 'monitor_thread') and self.monitor_thread.is_alive(): + # Can't join daemon threads, they will be killed when main thread exits + pass + + self.logger.info("Cleanup complete") + except Exception as e: + self.logger.error(f"Error during cleanup: {e}") \ No newline at end of file diff --git a/analyzer/analysis/flow_manager.py b/analyzer/analysis/flow_manager.py index 08cba9e..65e7770 100644 --- a/analyzer/analysis/flow_manager.py +++ b/analyzer/analysis/flow_manager.py @@ -86,6 +86,17 @@ class FlowManager: flow.total_bytes += packet_size flow.protocols.update(protocols) + # Update timeline statistics + if flow.frame_count == 1: + # First packet in flow + flow.first_seen = timestamp + flow.last_seen = timestamp + flow.duration = 0.0 + else: + # Update last seen and duration + flow.last_seen = timestamp + flow.duration = flow.last_seen - flow.first_seen + # Enhanced protocol detection dissection_results = self._dissect_packet(packet, frame_num) enhanced_protocols = self._extract_enhanced_protocols(dissection_results) diff --git a/analyzer/analysis/statistics.py b/analyzer/analysis/statistics.py index f4e952f..9eee319 100644 --- a/analyzer/analysis/statistics.py +++ b/analyzer/analysis/statistics.py @@ -29,6 +29,12 @@ class StatisticsEngine: def _calculate_single_flow_statistics(self, flow: FlowStats) -> None: """Calculate statistics for a single flow""" + # Ensure timeline statistics are calculated + if len(flow.timestamps) >= 2: + flow.duration = flow.timestamps[-1] - flow.timestamps[0] + flow.first_seen = flow.timestamps[0] + flow.last_seen = flow.timestamps[-1] + if len(flow.inter_arrival_times) < 2: return @@ -36,9 +42,19 @@ class StatisticsEngine: flow.avg_inter_arrival = statistics.mean(flow.inter_arrival_times) flow.std_inter_arrival = statistics.stdev(flow.inter_arrival_times) + # Calculate jitter as coefficient of variation (normalized standard deviation) + if flow.avg_inter_arrival > 0: + flow.jitter = flow.std_inter_arrival / flow.avg_inter_arrival + else: + flow.jitter = 0.0 + # Detect outliers (frames with inter-arrival time > threshold * std deviations from mean) threshold = flow.avg_inter_arrival + (self.outlier_threshold_sigma * flow.std_inter_arrival) + # Clear existing outliers to recalculate + flow.outlier_frames.clear() + flow.outlier_details.clear() + for i, inter_time in enumerate(flow.inter_arrival_times): if inter_time > threshold: # Frame number is i+2 because inter_arrival_times[i] is between frame i+1 and i+2 diff --git a/analyzer/main.py b/analyzer/main.py index b3bb68d..08f2e8a 100644 --- a/analyzer/main.py +++ b/analyzer/main.py @@ -72,8 +72,8 @@ def main(): print(f"Last packet: {last_time}") return - # Load PCAP file - if args.pcap: + # Load PCAP file (skip for textual mode which uses background parsing) + if args.pcap and not args.textual: try: loader = PCAPLoader(args.pcap) if not loader.validate_file(): @@ -93,6 +93,16 @@ def main(): except Exception as e: print(f"Error loading PCAP file: {e}") sys.exit(1) + elif args.pcap and args.textual: + # For textual mode, just validate the file exists + try: + loader = PCAPLoader(args.pcap) + if not loader.validate_file(): + print(f"Error: Invalid or inaccessible PCAP file: {args.pcap}") + sys.exit(1) + except Exception as e: + print(f"Error validating PCAP file: {e}") + sys.exit(1) # Handle console output mode if args.no_tui: @@ -106,9 +116,24 @@ def main(): # TUI mode - choose between classic, modern curses, and textual interface if args.textual: - # Use new Textual-based interface (TipTop-inspired) + # Use new Textual-based interface (TipTop-inspired) with background parsing app = StreamLensAppV2(analyzer) - app.run() + + try: + # Start background parsing if PCAP file provided + if args.pcap: + app.start_background_parsing(args.pcap) + + app.run() + except KeyboardInterrupt: + print("\nShutting down...") + finally: + # Cleanup background threads + try: + app.cleanup() + except Exception as e: + print(f"Cleanup error: {e}") + pass return elif args.classic: tui = TUIInterface(analyzer) diff --git a/analyzer/models/__init__.py b/analyzer/models/__init__.py index 5e47c94..f58d6ec 100644 --- a/analyzer/models/__init__.py +++ b/analyzer/models/__init__.py @@ -1,8 +1,54 @@ """ -Data models for the Ethernet Traffic Analyzer +StreamLens Data Models + +This module provides the core data structures used throughout StreamLens for +representing network flows, protocol information, and decoded packet data. + +The models are organized into several categories: +- Core models: FlowStats, FrameTypeStats +- Protocol models: ProtocolInfo, DecodedField, ProtocolRegistry +- Analysis models: EnhancedAnalysisData, TimingAnalysis +- Result models: AnalysisResult, DissectionResult """ +# Core data models from .flow_stats import FlowStats, FrameTypeStats -from .analysis_results import AnalysisResult +from .analysis_results import AnalysisResult, DissectionResult -__all__ = ['FlowStats', 'FrameTypeStats', 'AnalysisResult'] \ No newline at end of file +# Protocol models (new) +from .protocols import ( + ProtocolInfo, + DecodedField, + ProtocolRegistry, + StandardProtocol, + EnhancedProtocol +) + +# Enhanced analysis models (refactored) +from .enhanced_analysis import ( + EnhancedAnalysisData, + TimingAnalysis, + QualityMetrics, + DecodedData +) + +__all__ = [ + # Core models + 'FlowStats', + 'FrameTypeStats', + 'AnalysisResult', + 'DissectionResult', + + # Protocol models + 'ProtocolInfo', + 'DecodedField', + 'ProtocolRegistry', + 'StandardProtocol', + 'EnhancedProtocol', + + # Enhanced analysis + 'EnhancedAnalysisData', + 'TimingAnalysis', + 'QualityMetrics', + 'DecodedData' +] \ No newline at end of file diff --git a/analyzer/models/enhanced_analysis.py b/analyzer/models/enhanced_analysis.py new file mode 100644 index 0000000..8f907dc --- /dev/null +++ b/analyzer/models/enhanced_analysis.py @@ -0,0 +1,289 @@ +""" +Enhanced Analysis Data Models + +This module defines data structures for enhanced protocol analysis including +timing analysis, quality metrics, and decoded data representation. +""" + +from dataclasses import dataclass, field +from typing import Dict, List, Set, Any, Optional +from enum import Enum + + +class TimingQuality(Enum): + """Timing quality classifications""" + EXCELLENT = "excellent" # < 1ppm drift, stable + GOOD = "good" # 1-10ppm drift, mostly stable + MODERATE = "moderate" # 10-100ppm drift, variable + POOR = "poor" # > 100ppm drift, unstable + UNKNOWN = "unknown" + + +class TimingStability(Enum): + """Timing stability classifications""" + STABLE = "stable" # Consistent timing behavior + VARIABLE = "variable" # Some timing variations + UNSTABLE = "unstable" # Highly variable timing + UNKNOWN = "unknown" + + +class DataType(Enum): + """Primary data types in enhanced protocols""" + ANALOG = "analog" + PCM = "pcm" + DISCRETE = "discrete" + TIME = "time" + VIDEO = "video" + TMATS = "tmats" + UNKNOWN = "unknown" + + +@dataclass +class TimingAnalysis: + """Timing analysis results for enhanced protocols""" + avg_clock_drift_ppm: float = 0.0 + max_clock_drift_ppm: float = 0.0 + min_clock_drift_ppm: float = 0.0 + drift_variance: float = 0.0 + + quality: TimingQuality = TimingQuality.UNKNOWN + stability: TimingStability = TimingStability.UNKNOWN + + # Timing accuracy metrics + timing_accuracy_percent: float = 0.0 + sync_errors: int = 0 + timing_anomalies: int = 0 + anomaly_rate_percent: float = 0.0 + + # Internal timing capabilities + has_internal_timing: bool = False + rtc_sync_available: bool = False + + def calculate_quality(self) -> TimingQuality: + """Calculate timing quality based on drift measurements""" + max_drift = abs(max(self.max_clock_drift_ppm, self.min_clock_drift_ppm, key=abs)) + + if max_drift < 1.0: + return TimingQuality.EXCELLENT + elif max_drift < 10.0: + return TimingQuality.GOOD + elif max_drift < 100.0: + return TimingQuality.MODERATE + else: + return TimingQuality.POOR + + def calculate_stability(self) -> TimingStability: + """Calculate timing stability based on variance""" + if self.drift_variance < 1.0: + return TimingStability.STABLE + elif self.drift_variance < 25.0: + return TimingStability.VARIABLE + else: + return TimingStability.UNSTABLE + + +@dataclass +class QualityMetrics: + """Quality metrics for enhanced protocol data""" + # Frame quality metrics + avg_frame_quality_percent: float = 0.0 + frame_quality_samples: List[float] = field(default_factory=list) + + # Signal quality metrics + avg_signal_quality_percent: float = 0.0 + signal_quality_samples: List[float] = field(default_factory=list) + + # Error counts + sequence_gaps: int = 0 + format_errors: int = 0 + overflow_errors: int = 0 + checksum_errors: int = 0 + + # Confidence metrics + avg_confidence_score: float = 0.0 + confidence_samples: List[float] = field(default_factory=list) + low_confidence_frames: int = 0 + + # Data integrity + corrupted_frames: int = 0 + missing_frames: int = 0 + duplicate_frames: int = 0 + + def calculate_overall_quality(self) -> float: + """Calculate overall quality score (0-100)""" + if not self.frame_quality_samples and not self.signal_quality_samples: + return 0.0 + + frame_score = self.avg_frame_quality_percent if self.frame_quality_samples else 100.0 + signal_score = self.avg_signal_quality_percent if self.signal_quality_samples else 100.0 + confidence_score = self.avg_confidence_score * 100 if self.confidence_samples else 100.0 + + # Weight the scores + weighted_score = (frame_score * 0.4 + signal_score * 0.4 + confidence_score * 0.2) + + # Apply error penalties + total_frames = len(self.frame_quality_samples) or 1 + error_rate = (self.format_errors + self.overflow_errors + self.corrupted_frames) / total_frames + penalty = min(error_rate * 50, 50) # Max 50% penalty + + return max(0.0, weighted_score - penalty) + + +@dataclass +class DecodedData: + """Container for decoded protocol data""" + # Channel information + channel_count: int = 0 + analog_channels: int = 0 + pcm_channels: int = 0 + discrete_channels: int = 0 + + # Data type classification + primary_data_type: DataType = DataType.UNKNOWN + secondary_data_types: Set[DataType] = field(default_factory=set) + + # Decoded field information + sample_decoded_fields: Dict[str, Any] = field(default_factory=dict) + available_field_names: List[str] = field(default_factory=list) + field_count: int = 0 + critical_fields: List[str] = field(default_factory=list) + + # Frame type analysis + frame_types: Set[str] = field(default_factory=set) + frame_type_distribution: Dict[str, int] = field(default_factory=dict) + + # Special frame counts + tmats_frames: int = 0 + setup_frames: int = 0 + data_frames: int = 0 + + # Decoder metadata + decoder_type: str = "Standard" + decoder_version: Optional[str] = None + decode_success_rate: float = 1.0 + + def add_frame_type(self, frame_type: str): + """Add a frame type to the analysis""" + self.frame_types.add(frame_type) + self.frame_type_distribution[frame_type] = self.frame_type_distribution.get(frame_type, 0) + 1 + + def get_dominant_frame_type(self) -> Optional[str]: + """Get the most common frame type""" + if not self.frame_type_distribution: + return None + return max(self.frame_type_distribution.items(), key=lambda x: x[1])[0] + + def update_data_type_classification(self): + """Update primary data type based on channel counts""" + if self.analog_channels > 0 and self.analog_channels >= self.pcm_channels: + self.primary_data_type = DataType.ANALOG + elif self.pcm_channels > 0: + self.primary_data_type = DataType.PCM + elif self.discrete_channels > 0: + self.primary_data_type = DataType.DISCRETE + elif self.tmats_frames > 0: + self.primary_data_type = DataType.TMATS + + # Add secondary types + if self.analog_channels > 0: + self.secondary_data_types.add(DataType.ANALOG) + if self.pcm_channels > 0: + self.secondary_data_types.add(DataType.PCM) + if self.discrete_channels > 0: + self.secondary_data_types.add(DataType.DISCRETE) + if self.tmats_frames > 0: + self.secondary_data_types.add(DataType.TMATS) + + +@dataclass +class EnhancedAnalysisData: + """Complete enhanced analysis data combining all analysis types""" + timing: TimingAnalysis = field(default_factory=TimingAnalysis) + quality: QualityMetrics = field(default_factory=QualityMetrics) + decoded: DecodedData = field(default_factory=DecodedData) + + # Legacy compatibility fields (will be deprecated) + avg_clock_drift_ppm: float = field(init=False) + max_clock_drift_ppm: float = field(init=False) + timing_quality: str = field(init=False) + timing_stability: str = field(init=False) + anomaly_rate: float = field(init=False) + avg_confidence_score: float = field(init=False) + avg_frame_quality: float = field(init=False) + sequence_gaps: int = field(init=False) + rtc_sync_errors: int = field(init=False) + format_errors: int = field(init=False) + overflow_errors: int = field(init=False) + channel_count: int = field(init=False) + analog_channels: int = field(init=False) + pcm_channels: int = field(init=False) + tmats_frames: int = field(init=False) + has_internal_timing: bool = field(init=False) + primary_data_type: str = field(init=False) + decoder_type: str = field(init=False) + sample_decoded_fields: Dict[str, Any] = field(init=False) + available_field_names: List[str] = field(init=False) + field_count: int = field(init=False) + frame_types: Set[str] = field(init=False) + timing_accuracy: float = field(init=False) + signal_quality: float = field(init=False) + + def __post_init__(self): + """Initialize legacy compatibility properties""" + self._update_legacy_fields() + + def _update_legacy_fields(self): + """Update legacy fields from new structured data""" + # Timing fields + self.avg_clock_drift_ppm = self.timing.avg_clock_drift_ppm + self.max_clock_drift_ppm = self.timing.max_clock_drift_ppm + self.timing_quality = self.timing.quality.value + self.timing_stability = self.timing.stability.value + self.anomaly_rate = self.timing.anomaly_rate_percent + self.has_internal_timing = self.timing.has_internal_timing + self.timing_accuracy = self.timing.timing_accuracy_percent + + # Quality fields + self.avg_confidence_score = self.quality.avg_confidence_score + self.avg_frame_quality = self.quality.avg_frame_quality_percent + self.sequence_gaps = self.quality.sequence_gaps + self.rtc_sync_errors = self.timing.sync_errors + self.format_errors = self.quality.format_errors + self.overflow_errors = self.quality.overflow_errors + self.signal_quality = self.quality.avg_signal_quality_percent + + # Decoded data fields + self.channel_count = self.decoded.channel_count + self.analog_channels = self.decoded.analog_channels + self.pcm_channels = self.decoded.pcm_channels + self.tmats_frames = self.decoded.tmats_frames + self.primary_data_type = self.decoded.primary_data_type.value + self.decoder_type = self.decoded.decoder_type + self.sample_decoded_fields = self.decoded.sample_decoded_fields + self.available_field_names = self.decoded.available_field_names + self.field_count = self.decoded.field_count + self.frame_types = self.decoded.frame_types + + def update_from_components(self): + """Update legacy fields when component objects change""" + self._update_legacy_fields() + + def get_overall_health_score(self) -> float: + """Calculate overall health score for the enhanced analysis""" + quality_score = self.quality.calculate_overall_quality() + + # Factor in timing quality + timing_score = 100.0 + if self.timing.quality == TimingQuality.EXCELLENT: + timing_score = 100.0 + elif self.timing.quality == TimingQuality.GOOD: + timing_score = 80.0 + elif self.timing.quality == TimingQuality.MODERATE: + timing_score = 60.0 + elif self.timing.quality == TimingQuality.POOR: + timing_score = 30.0 + else: + timing_score = 50.0 # Unknown + + # Weight quality more heavily than timing + return (quality_score * 0.7 + timing_score * 0.3) \ No newline at end of file diff --git a/analyzer/models/protocols.py b/analyzer/models/protocols.py new file mode 100644 index 0000000..3122faa --- /dev/null +++ b/analyzer/models/protocols.py @@ -0,0 +1,258 @@ +""" +Protocol Information Data Models + +This module defines data structures for representing protocol information, +decoded fields, and protocol registry management. +""" + +from dataclasses import dataclass, field +from typing import Dict, List, Set, Optional, Any, Union +from enum import Enum, IntEnum +from abc import ABC, abstractmethod + + +class ProtocolType(IntEnum): + """Protocol type identifiers""" + UNKNOWN = 0 + + # Standard protocols + UDP = 10 + TCP = 11 + ICMP = 12 + IGMP = 13 + + # Enhanced protocols + CHAPTER10 = 100 + CH10 = 100 # Alias for CHAPTER10 + PTP = 101 + IENA = 102 + NTP = 103 + + +class ProtocolCategory(Enum): + """Protocol categories for organization""" + TRANSPORT = "transport" # UDP, TCP, ICMP + NETWORK = "network" # IP, IGMP + ENHANCED = "enhanced" # CH10, PTP, IENA + TIMING = "timing" # PTP, NTP + TELEMETRY = "telemetry" # CH10, IENA + + +class FieldType(Enum): + """Types of decoded fields""" + INTEGER = "integer" + FLOAT = "float" + STRING = "string" + BOOLEAN = "boolean" + TIMESTAMP = "timestamp" + IP_ADDRESS = "ip_address" + MAC_ADDRESS = "mac_address" + BINARY = "binary" + ENUM = "enum" + + +@dataclass +class DecodedField: + """Represents a single decoded field from a protocol""" + name: str + value: Any + field_type: FieldType + description: Optional[str] = None + unit: Optional[str] = None # e.g., "ms", "bytes", "ppm" + confidence: float = 1.0 # 0.0 to 1.0 + is_critical: bool = False # Critical field for protocol operation + + def __str__(self) -> str: + unit_str = f" {self.unit}" if self.unit else "" + return f"{self.name}: {self.value}{unit_str}" + + def format_value(self) -> str: + """Format the value for display""" + if self.field_type == FieldType.TIMESTAMP: + import datetime + if isinstance(self.value, (int, float)): + dt = datetime.datetime.fromtimestamp(self.value) + return dt.strftime("%H:%M:%S.%f")[:-3] + elif self.field_type == FieldType.FLOAT: + return f"{self.value:.3f}" + elif self.field_type == FieldType.IP_ADDRESS: + return str(self.value) + elif self.field_type == FieldType.BINARY: + if isinstance(self.value, bytes): + return self.value.hex()[:16] + "..." if len(self.value) > 8 else self.value.hex() + + return str(self.value) + + +@dataclass +class ProtocolInfo: + """Information about a detected protocol""" + protocol_type: ProtocolType + name: str + category: ProtocolCategory + version: Optional[str] = None + confidence: float = 1.0 # Detection confidence 0.0 to 1.0 + + # Protocol-specific metadata + port: Optional[int] = None + subtype: Optional[str] = None # e.g., "CH10-Data", "PTP-Sync" + vendor: Optional[str] = None + + def __str__(self) -> str: + version_str = f" v{self.version}" if self.version else "" + subtype_str = f"-{self.subtype}" if self.subtype else "" + return f"{self.name}{subtype_str}{version_str}" + + @property + def is_enhanced(self) -> bool: + """Check if this is an enhanced protocol requiring special handling""" + return self.category in [ProtocolCategory.ENHANCED, ProtocolCategory.TIMING, ProtocolCategory.TELEMETRY] + + +class StandardProtocol: + """Standard protocol definitions""" + + UDP = ProtocolInfo( + protocol_type=ProtocolType.UDP, + name="UDP", + category=ProtocolCategory.TRANSPORT + ) + + TCP = ProtocolInfo( + protocol_type=ProtocolType.TCP, + name="TCP", + category=ProtocolCategory.TRANSPORT + ) + + ICMP = ProtocolInfo( + protocol_type=ProtocolType.ICMP, + name="ICMP", + category=ProtocolCategory.NETWORK + ) + + IGMP = ProtocolInfo( + protocol_type=ProtocolType.IGMP, + name="IGMP", + category=ProtocolCategory.NETWORK + ) + + +class EnhancedProtocol: + """Enhanced protocol definitions""" + + CHAPTER10 = ProtocolInfo( + protocol_type=ProtocolType.CHAPTER10, + name="Chapter 10", + category=ProtocolCategory.TELEMETRY + ) + + PTP = ProtocolInfo( + protocol_type=ProtocolType.PTP, + name="PTP", + category=ProtocolCategory.TIMING + ) + + IENA = ProtocolInfo( + protocol_type=ProtocolType.IENA, + name="IENA", + category=ProtocolCategory.TELEMETRY + ) + + NTP = ProtocolInfo( + protocol_type=ProtocolType.NTP, + name="NTP", + category=ProtocolCategory.TIMING + ) + + +@dataclass +class ProtocolDecodeResult: + """Result of protocol decoding""" + protocol_info: ProtocolInfo + fields: List[DecodedField] = field(default_factory=list) + frame_type: Optional[str] = None # e.g., "CH10-Data", "PTP-Sync" + payload_size: int = 0 + errors: List[str] = field(default_factory=list) + warnings: List[str] = field(default_factory=list) + + def get_field(self, name: str) -> Optional[DecodedField]: + """Get a specific field by name""" + for field in self.fields: + if field.name == name: + return field + return None + + def get_critical_fields(self) -> List[DecodedField]: + """Get all critical fields""" + return [f for f in self.fields if f.is_critical] + + def has_errors(self) -> bool: + """Check if decode result has any errors""" + return len(self.errors) > 0 + + +class ProtocolRegistry: + """Registry for managing protocol information and detection""" + + def __init__(self): + self._protocols: Dict[ProtocolType, ProtocolInfo] = {} + self._register_standard_protocols() + self._register_enhanced_protocols() + + def _register_standard_protocols(self): + """Register standard protocols""" + for attr_name in dir(StandardProtocol): + if not attr_name.startswith('_'): + protocol = getattr(StandardProtocol, attr_name) + if isinstance(protocol, ProtocolInfo): + self._protocols[protocol.protocol_type] = protocol + + def _register_enhanced_protocols(self): + """Register enhanced protocols""" + for attr_name in dir(EnhancedProtocol): + if not attr_name.startswith('_'): + protocol = getattr(EnhancedProtocol, attr_name) + if isinstance(protocol, ProtocolInfo): + self._protocols[protocol.protocol_type] = protocol + + def get_protocol(self, protocol_type: ProtocolType) -> Optional[ProtocolInfo]: + """Get protocol info by type""" + return self._protocols.get(protocol_type) + + def get_protocol_by_name(self, name: str) -> Optional[ProtocolInfo]: + """Get protocol info by name""" + for protocol in self._protocols.values(): + if protocol.name.lower() == name.lower(): + return protocol + return None + + def get_enhanced_protocols(self) -> List[ProtocolInfo]: + """Get all enhanced protocols""" + return [p for p in self._protocols.values() if p.is_enhanced] + + def get_protocols_by_category(self, category: ProtocolCategory) -> List[ProtocolInfo]: + """Get all protocols in a category""" + return [p for p in self._protocols.values() if p.category == category] + + def register_protocol(self, protocol_info: ProtocolInfo): + """Register a new protocol""" + self._protocols[protocol_info.protocol_type] = protocol_info + + def is_enhanced_protocol(self, protocol_type: ProtocolType) -> bool: + """Check if protocol type is enhanced""" + protocol = self.get_protocol(protocol_type) + return protocol.is_enhanced if protocol else False + + +# Global protocol registry instance +PROTOCOL_REGISTRY = ProtocolRegistry() + + +def get_protocol_info(protocol_type: ProtocolType) -> Optional[ProtocolInfo]: + """Convenience function to get protocol info""" + return PROTOCOL_REGISTRY.get_protocol(protocol_type) + + +def is_enhanced_protocol(protocol_type: ProtocolType) -> bool: + """Convenience function to check if protocol is enhanced""" + return PROTOCOL_REGISTRY.is_enhanced_protocol(protocol_type) \ No newline at end of file diff --git a/analyzer/tui/textual/app_v2.py b/analyzer/tui/textual/app_v2.py index 46c52eb..83b4f4b 100644 --- a/analyzer/tui/textual/app_v2.py +++ b/analyzer/tui/textual/app_v2.py @@ -15,12 +15,15 @@ from rich.console import Group from rich.panel import Panel from rich.table import Table import time +import signal +import sys from .widgets.sparkline import SparklineWidget from .widgets.metric_card import MetricCard from .widgets.flow_table_v2 import EnhancedFlowTable from .widgets.split_flow_details import FlowMainDetailsPanel, SubFlowDetailsPanel from .widgets.debug_panel import DebugPanel +from ...analysis.background_analyzer import BackgroundAnalyzer if TYPE_CHECKING: from ...analysis.core import EthernetAnalyzer @@ -50,6 +53,7 @@ class StreamLensAppV2(App): ("4", "sort('quality')", "Sort Quality"), ("p", "toggle_pause", "Pause"), ("d", "show_details", "Details"), + ("v", "toggle_view_mode", "Toggle View"), ("?", "toggle_help", "Help"), ] @@ -73,6 +77,17 @@ class StreamLensAppV2(App): self.sub_title = "Network Flow Analysis" self.paused = False + # Background parsing support + self.background_analyzer = BackgroundAnalyzer( + analyzer=analyzer, + num_threads=4, + batch_size=1000, + progress_callback=None, + flow_update_callback=self._on_flow_update + ) + self.pcap_file = None + + # Metrics history for sparklines self.packets_history = [] self.bytes_history = [] @@ -127,11 +142,20 @@ class StreamLensAppV2(App): except: pass # Debug panel not visible + # Set initial subtitle with view mode + try: + flow_table = self.query_one("#flow-table", EnhancedFlowTable) + view_mode = flow_table.get_current_view_mode() + status = "PAUSED" if self.paused else "LIVE" + self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW" + except: + pass + self.update_metrics() - # Set up update intervals like TipTop - self.metric_timer = self.set_interval(0.5, self.update_metrics) # 2Hz for smooth graphs - self.flow_timer = self.set_interval(1.0, self.update_flows) # 1Hz for flow data + # Set up update intervals like TipTop (reduced frequency since we have real-time updates) + self.metric_timer = self.set_interval(2.0, self.update_metrics) # 0.5Hz for background updates + self.flow_timer = self.set_interval(5.0, self.update_flows) # 0.2Hz for fallback flow updates # Initialize sparkline history self._initialize_history() @@ -177,13 +201,23 @@ class StreamLensAppV2(App): self.packets_per_sec = self.total_packets / elapsed self.bytes_per_sec = summary.get('total_bytes', 0) / elapsed - # Count enhanced and outliers + # Count enhanced and outliers (thread-safe access) enhanced = 0 outliers = 0 - for flow in self.analyzer.flows.values(): - if flow.enhanced_analysis.decoder_type != "Standard": - enhanced += 1 - outliers += len(flow.outlier_frames) + try: + # Use background analyzer's thread-safe flow access + flows = self.background_analyzer.get_current_flows() + for flow in flows.values(): + if flow.enhanced_analysis.decoder_type != "Standard": + enhanced += 1 + outliers += len(flow.outlier_frames) + except Exception: + # Fallback to direct access if background analyzer not available + for flow in self.analyzer.flows.values(): + if flow.enhanced_analysis.decoder_type != "Standard": + enhanced += 1 + outliers += len(flow.outlier_frames) + self.enhanced_flows = enhanced self.outlier_count = outliers @@ -256,6 +290,53 @@ class StreamLensAppV2(App): flow_table = self.query_one("#flow-table", EnhancedFlowTable) flow_table.refresh_data() + + def _on_flow_update(self): + """Handle flow data updates from background parser""" + try: + # Use call_from_thread to safely update UI from background thread + self.call_from_thread(self._update_flow_ui) + except Exception: + # Ignore errors during shutdown + pass + + def _update_flow_ui(self): + """Update flow UI (called from main thread)""" + try: + # Update flow table + flow_table = self.query_one("#flow-table", EnhancedFlowTable) + flow_table.refresh_data() + + # Also update metrics in real-time + self.update_metrics() + except Exception: + # Flow table widget may not be available yet + pass + + def start_background_parsing(self, pcap_file: str): + """Start parsing PCAP file in background""" + self.pcap_file = pcap_file + + # Start background parsing + self.background_analyzer.start_parsing(pcap_file) + + def stop_background_parsing(self): + """Stop background parsing""" + self.background_analyzer.stop_parsing() + + def cleanup(self): + """Cleanup resources when app shuts down""" + try: + self.background_analyzer.cleanup() + # Cancel any pending timers + if self.metric_timer: + self.metric_timer.stop() + if self.flow_timer: + self.flow_timer.stop() + except Exception as e: + # Don't let cleanup errors prevent shutdown + pass + def on_enhanced_flow_table_flow_selected(self, event: EnhancedFlowTable.FlowSelected) -> None: """Handle flow selection events""" try: @@ -290,7 +371,14 @@ class StreamLensAppV2(App): """Toggle pause state""" self.paused = not self.paused status = "PAUSED" if self.paused else "LIVE" - self.sub_title = f"Network Flow Analysis - {status}" + + # Get current view mode to maintain it in subtitle + try: + flow_table = self.query_one("#flow-table", EnhancedFlowTable) + view_mode = flow_table.get_current_view_mode() + self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW" + except: + self.sub_title = f"Network Flow Analysis - {status}" def action_sort(self, key: str) -> None: """Sort flow table by specified key""" @@ -302,10 +390,29 @@ class StreamLensAppV2(App): # TODO: Implement detailed flow modal pass + def action_toggle_view_mode(self) -> None: + """Toggle between simplified and detailed view modes""" + flow_table = self.query_one("#flow-table", EnhancedFlowTable) + flow_table.toggle_view_mode() + + # Update subtitle to show current view mode + view_mode = flow_table.get_current_view_mode() + status = "PAUSED" if self.paused else "LIVE" + self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW" + def on_mouse_down(self, event: MouseDown) -> None: """Prevent default mouse down behavior to disable mouse interaction.""" event.prevent_default() def on_mouse_move(self, event: MouseMove) -> None: """Prevent default mouse move behavior to disable mouse interaction.""" - event.prevent_default() \ No newline at end of file + event.prevent_default() + + def action_quit(self) -> None: + """Quit the application with proper cleanup""" + self.cleanup() + self.exit() + + def on_unmount(self) -> None: + """Called when app is being unmounted - ensure cleanup""" + self.cleanup() \ No newline at end of file diff --git a/analyzer/tui/textual/widgets/flow_table_v2.py b/analyzer/tui/textual/widgets/flow_table_v2.py index d76f1f3..d62ded8 100644 --- a/analyzer/tui/textual/widgets/flow_table_v2.py +++ b/analyzer/tui/textual/widgets/flow_table_v2.py @@ -6,7 +6,7 @@ from textual.widgets import DataTable from textual.containers import Vertical from textual.reactive import reactive from textual.message import Message -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List, Optional, Dict, Tuple from rich.text import Text from rich.box import ROUNDED @@ -43,6 +43,7 @@ class EnhancedFlowTable(Vertical): selected_flow_index = reactive(0) sort_key = reactive("flows") + simplified_view = reactive(False) # Toggle between detailed and simplified view def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs): super().__init__(**kwargs) @@ -50,6 +51,7 @@ class EnhancedFlowTable(Vertical): self.flows_list = [] self.row_to_flow_map = {} # Map row keys to flow indices self.flow_metrics = {} # Store per-flow metrics history + self.view_mode_changed = False # Track when view mode changes def compose(self): """Create the enhanced flow table""" @@ -64,25 +66,49 @@ class EnhancedFlowTable(Vertical): def on_mount(self): """Initialize the table""" - table = self.query_one("#flows-data-table", DataTable) - - # Compact columns optimized for data density - table.add_column("#", width=2, key="num") - table.add_column("Source", width=18, key="source") - table.add_column("Proto", width=4, key="proto") - table.add_column("Destination", width=18, key="dest") - table.add_column("Extended", width=8, key="extended") - table.add_column("Frame Type", width=10, key="frame_type") - table.add_column("Pkts", width=6, key="rate") - table.add_column("Size", width=8, key="volume") - table.add_column("ΔT(ms)", width=8, key="delta_t") - table.add_column("σ(ms)", width=8, key="sigma") - table.add_column("Out", width=5, key="outliers") - + self._setup_table_columns() self.refresh_data() + def _setup_table_columns(self): + """Setup table columns based on current view mode""" + table = self.query_one("#flows-data-table", DataTable) + + # Clear existing columns if any + if table.columns: + table.clear(columns=True) + + if self.simplified_view: + # Simplified view - only main flows with summary data + table.add_column("#", width=3, key="num") + table.add_column("Source", width=18, key="source") + table.add_column("Destination", width=18, key="dest") + table.add_column("Protocol", width=8, key="protocol") + table.add_column("Packets", width=8, key="packets") + table.add_column("Volume", width=10, key="volume") + table.add_column("Avg ΔT", width=8, key="avg_delta") + table.add_column("Quality", width=8, key="quality") + table.add_column("Status", width=10, key="status") + else: + # Detailed view - original layout with subflows + table.add_column("#", width=2, key="num") + table.add_column("Source", width=18, key="source") + table.add_column("Proto", width=4, key="proto") + table.add_column("Destination", width=18, key="dest") + table.add_column("Extended", width=8, key="extended") + table.add_column("Frame Type", width=10, key="frame_type") + table.add_column("Pkts", width=6, key="rate") + table.add_column("Size", width=8, key="volume") + table.add_column("ΔT(ms)", width=8, key="delta_t") + table.add_column("σ(ms)", width=8, key="sigma") + table.add_column("Out", width=5, key="outliers") + def refresh_data(self): - """Refresh flow table with enhanced visualizations""" + """Refresh flow table with current view mode""" + # Check if view mode changed and rebuild table structure if needed + if self.view_mode_changed: + self._setup_table_columns() + self.view_mode_changed = False + table = self.query_one("#flows-data-table", DataTable) # Preserve cursor and scroll positions @@ -103,7 +129,39 @@ class EnhancedFlowTable(Vertical): # Get and sort flows self.flows_list = self._get_sorted_flows() - # Add flows with enhanced display + if self.simplified_view: + self._populate_simplified_view() + else: + self._populate_detailed_view() + + # Restore cursor position + if selected_row_key and selected_row_key in table.rows: + row_index = list(table.rows.keys()).index(selected_row_key) + table.move_cursor(row=row_index, column=cursor_column, animate=False) + elif table.row_count > 0: + # If original selection not found, try to maintain row position + new_row = min(cursor_row, table.row_count - 1) + table.move_cursor(row=new_row, column=cursor_column, animate=False) + + # Restore scroll position + table.scroll_to(x=scroll_x, y=scroll_y, animate=False) + + def _populate_simplified_view(self): + """Populate table with simplified flow summary data""" + table = self.query_one("#flows-data-table", DataTable) + + for i, flow in enumerate(self.flows_list): + # Create simplified row data - no subflows shown + row_data = self._create_simplified_row(i + 1, flow) + row_key = table.add_row(*row_data, key=f"flow_{i}") + + # Map row key to flow index + self.row_to_flow_map[row_key] = i + + def _populate_detailed_view(self): + """Populate table with detailed flow data including subflows""" + table = self.query_one("#flows-data-table", DataTable) + for i, flow in enumerate(self.flows_list): # Track metrics for this flow flow_key = f"{flow.src_ip}:{flow.src_port}-{flow.dst_ip}:{flow.dst_port}" @@ -127,20 +185,14 @@ class EnhancedFlowTable(Vertical): metrics['last_packet_count'] = flow.frame_count metrics['last_update'] = flow.last_seen - # Create row with visualizations + # Create row with detailed visualizations row_data = self._create_enhanced_row(i + 1, flow, metrics) row_key = table.add_row(*row_data, key=f"flow_{i}") # Map row key to flow index self.row_to_flow_map[row_key] = i - # Apply row styling based on status - style = self._get_flow_style(flow) - if style: - # Note: DataTable doesn't have set_row_style, using CSS classes instead - pass - - # Add sub-rows for protocol breakdown + # Add sub-rows for protocol breakdown (only in detailed view) if self._should_show_subrows(flow): sub_rows = self._create_protocol_subrows(flow) combinations = self._get_protocol_frame_combinations(flow) @@ -151,18 +203,6 @@ class EnhancedFlowTable(Vertical): if j < len(combinations): _, frame_type, _, _ = combinations[j] self.row_to_subflow_map[sub_key] = (i, frame_type) - - # Restore cursor position - if selected_row_key and selected_row_key in table.rows: - row_index = list(table.rows.keys()).index(selected_row_key) - table.move_cursor(row=row_index, column=cursor_column, animate=False) - elif table.row_count > 0: - # If original selection not found, try to maintain row position - new_row = min(cursor_row, table.row_count - 1) - table.move_cursor(row=new_row, column=cursor_column, animate=False) - - # Restore scroll position - table.scroll_to(x=scroll_x, y=scroll_y, animate=False) def _create_enhanced_row(self, num: int, flow: 'FlowStats', metrics: dict) -> List[Text]: """Create enhanced row with inline visualizations""" @@ -229,6 +269,64 @@ class EnhancedFlowTable(Vertical): delta_t_text, sigma_text, outlier_text ] + def _create_simplified_row(self, num: int, flow: 'FlowStats') -> List[Text]: + """Create simplified row with summary data only""" + # Flow number + num_text = Text(str(num), justify="right") + + # Source (IP only for simplified view) + source_text = Text(flow.src_ip) + + # Destination (IP only for simplified view) + dest_text = Text(flow.dst_ip) + + # Main protocol (transport + extended if available) + extended = self._get_extended_protocol(flow) + if extended != "-": + protocol_str = f"{flow.transport_protocol}/{extended}" + else: + protocol_str = flow.transport_protocol + protocol_text = Text(protocol_str, style="bold cyan") + + # Total packet count + packets_text = Text(str(flow.frame_count), justify="right") + + # Total volume + volume_text = Text(self._format_bytes(flow.total_bytes), justify="right") + + # Average delta T + if flow.avg_inter_arrival > 0: + delta_t_ms = flow.avg_inter_arrival * 1000 + if delta_t_ms >= 1000: + avg_delta_str = f"{delta_t_ms/1000:.1f}s" + else: + avg_delta_str = f"{delta_t_ms:.1f}ms" + else: + avg_delta_str = "N/A" + avg_delta_text = Text(avg_delta_str, justify="right") + + # Quality score as percentage + quality_score = self._get_quality_score(flow) + quality_text = Text(f"{quality_score}%", justify="right", + style="green" if quality_score >= 90 else + "yellow" if quality_score >= 70 else "red") + + # Flow status + status = self._get_flow_status(flow) + status_color = { + "Enhanced": "bold blue", + "Alert": "bold red", + "Warning": "yellow", + "Normal": "green" + }.get(status, "white") + status_text = Text(status, style=status_color) + + return [ + num_text, source_text, dest_text, protocol_text, + packets_text, volume_text, avg_delta_text, + quality_text, status_text + ] + def _create_rate_sparkline(self, history: List[float]) -> str: """Create mini sparkline for rate""" if not history: @@ -319,16 +417,60 @@ class EnhancedFlowTable(Vertical): def _should_show_subrows(self, flow: 'FlowStats') -> bool: """Determine if flow should show protocol breakdown""" - # Show subrows for flows with multiple frame types or enhanced analysis - return (len(flow.frame_types) > 1 or - flow.enhanced_analysis.decoder_type != "Standard") + # Only show subrows if there are enhanced frame types + enhanced_frame_types = self._get_enhanced_frame_types(flow) + return len(enhanced_frame_types) > 0 + + def _get_enhanced_frame_types(self, flow: 'FlowStats') -> Dict[str, 'FrameTypeStats']: + """Get only frame types that belong to enhanced protocols""" + enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'} + enhanced_frame_types = {} + + for frame_type, stats in flow.frame_types.items(): + # Check if this frame type belongs to an enhanced protocol + if any(enhanced_proto in frame_type for enhanced_proto in enhanced_protocols): + enhanced_frame_types[frame_type] = stats + elif frame_type.startswith(('CH10-', 'PTP-', 'IENA-')): + enhanced_frame_types[frame_type] = stats + elif frame_type in ('TMATS', 'TMATS-Data'): # TMATS is part of Chapter 10 + enhanced_frame_types[frame_type] = stats + + return enhanced_frame_types + + def _get_enhanced_protocol_frame_combinations(self, flow: 'FlowStats', enhanced_frame_types: Dict[str, 'FrameTypeStats']) -> List[Tuple[str, str, int, float]]: + """Get protocol/frame combinations for enhanced protocols only""" + combinations = [] + total_packets = flow.frame_count + + # Group enhanced frame types by extended protocol + protocol_frames = {} + + for frame_type, ft_stats in enhanced_frame_types.items(): + # Determine extended protocol for this frame type + extended_proto = self._get_extended_protocol_for_frame(flow, frame_type) + + if extended_proto not in protocol_frames: + protocol_frames[extended_proto] = [] + + protocol_frames[extended_proto].append((frame_type, ft_stats.count)) + + # Convert to list of tuples with percentages + for extended_proto, frame_list in protocol_frames.items(): + for frame_type, count in frame_list: + percentage = (count / total_packets * 100) if total_packets > 0 else 0 + combinations.append((extended_proto, frame_type, count, percentage)) + + # Sort by count (descending) + combinations.sort(key=lambda x: x[2], reverse=True) + return combinations def _create_protocol_subrows(self, flow: 'FlowStats') -> List[List[Text]]: - """Create sub-rows for protocol/frame type breakdown""" + """Create sub-rows for enhanced protocol/frame type breakdown only""" subrows = [] - combinations = self._get_protocol_frame_combinations(flow) + enhanced_frame_types = self._get_enhanced_frame_types(flow) + combinations = self._get_enhanced_protocol_frame_combinations(flow, enhanced_frame_types) - for extended_proto, frame_type, count, percentage in combinations: # Show all subrows + for extended_proto, frame_type, count, percentage in combinations: # Show all enhanced subrows # Calculate timing for this frame type if available frame_delta_t = "" frame_sigma = "" @@ -385,6 +527,16 @@ class EnhancedFlowTable(Vertical): self.sort_key = key self.refresh_data() + def toggle_view_mode(self): + """Toggle between simplified and detailed view modes""" + self.simplified_view = not self.simplified_view + self.view_mode_changed = True + self.refresh_data() + + def get_current_view_mode(self) -> str: + """Get current view mode as string""" + return "SIMPLIFIED" if self.simplified_view else "DETAILED" + class FlowSelected(Message): """Message sent when a flow is selected""" def __init__(self, flow: Optional['FlowStats'], subflow_type: Optional[str] = None) -> None: @@ -451,6 +603,19 @@ class EnhancedFlowTable(Vertical): self.post_message(self.FlowSelected(selected_flow, subflow_type)) # Helper methods from original implementation + def _get_extended_protocol_for_frame(self, flow: 'FlowStats', frame_type: str) -> str: + """Get extended protocol for a specific frame type""" + if frame_type.startswith('CH10') or frame_type == 'TMATS': + return 'CH10' + elif frame_type.startswith('PTP'): + return 'PTP' + elif frame_type == 'IENA': + return 'IENA' + elif frame_type == 'NTP': + return 'NTP' + else: + return self._get_extended_protocol(flow) + def _get_extended_protocol(self, flow: 'FlowStats') -> str: """Get extended protocol""" if flow.detected_protocol_types: diff --git a/analyzer/tui/textual/widgets/progress_bar.py b/analyzer/tui/textual/widgets/progress_bar.py new file mode 100644 index 0000000..d98be2f --- /dev/null +++ b/analyzer/tui/textual/widgets/progress_bar.py @@ -0,0 +1,121 @@ +""" +Progress Bar Widget for PCAP parsing progress +""" + +from textual.widget import Widget +from textual.reactive import reactive +from rich.console import RenderableType +from rich.progress import Progress, BarColumn, TextColumn, TaskProgressColumn, MofNCompleteColumn, TimeRemainingColumn +from rich.text import Text +import time + + +class ParsingProgressBar(Widget): + """Progress bar widget for PCAP parsing with rich formatting""" + + DEFAULT_CSS = """ + ParsingProgressBar { + height: 3; + margin: 1; + padding: 1; + background: $surface; + border: solid $accent; + } + """ + + # Reactive attributes + progress = reactive(0.0) + total_packets = reactive(0) + processed_packets = reactive(0) + packets_per_second = reactive(0.0) + estimated_time_remaining = reactive(0.0) + is_complete = reactive(False) + is_visible = reactive(False) + error_message = reactive("") + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.start_time = None + + def render(self) -> RenderableType: + """Render the progress bar""" + if not self.is_visible: + return Text("") + + if self.error_message: + return Text(f"❌ Error: {self.error_message}", style="red") + + if self.is_complete: + elapsed = time.time() - self.start_time if self.start_time else 0 + return Text( + f"✅ Parsing complete! {self.processed_packets:,} packets processed in {elapsed:.1f}s", + style="green" + ) + + # Create rich progress bar + progress = Progress( + TextColumn("[bold blue]Parsing PCAP..."), + BarColumn(bar_width=40), + TaskProgressColumn(), + MofNCompleteColumn(), + TextColumn("•"), + TextColumn("{task.fields[rate]}"), + TextColumn("•"), + TimeRemainingColumn(), + expand=False + ) + + # Format rate display + if self.packets_per_second >= 1000: + rate_str = f"{self.packets_per_second/1000:.1f}K pkt/s" + else: + rate_str = f"{self.packets_per_second:.0f} pkt/s" + + task = progress.add_task( + "parsing", + total=self.total_packets, + completed=self.processed_packets, + rate=rate_str + ) + + return progress + + def start_parsing(self, total_packets: int): + """Start showing progress for parsing""" + self.total_packets = total_packets + self.processed_packets = 0 + self.progress = 0.0 + self.is_complete = False + self.is_visible = True + self.error_message = "" + self.start_time = time.time() + self.refresh() + + def update_progress(self, processed: int, total: int, pps: float, eta: float): + """Update progress values""" + self.processed_packets = processed + self.total_packets = total + self.packets_per_second = pps + self.estimated_time_remaining = eta + self.progress = (processed / total * 100) if total > 0 else 0 + self.refresh() + + def complete_parsing(self): + """Mark parsing as complete""" + self.is_complete = True + self.refresh() + # Hide after 3 seconds + self.set_timer(3.0, self.hide_progress) + + def show_error(self, error: str): + """Show error message""" + self.error_message = error + self.is_visible = True + self.refresh() + # Hide after 5 seconds + self.set_timer(5.0, self.hide_progress) + + def hide_progress(self): + """Hide the progress bar""" + self.is_visible = False + self.refresh() \ No newline at end of file diff --git a/analyzer/tui/textual/widgets/split_flow_details.py b/analyzer/tui/textual/widgets/split_flow_details.py index 22eabf9..000cba8 100644 --- a/analyzer/tui/textual/widgets/split_flow_details.py +++ b/analyzer/tui/textual/widgets/split_flow_details.py @@ -10,7 +10,7 @@ from rich.text import Text from rich.panel import Panel from rich.console import RenderableType, Group from rich.table import Table -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Optional, Dict if TYPE_CHECKING: from ....models import FlowStats, FrameTypeStats @@ -106,19 +106,18 @@ class FlowMainDetailsPanel(Vertical): sections.append(Text("Enhanced Analysis", style="bold green")) sections.append(enhanced_table) - # Timing analysis - only show if no sub-flows exist + # Timing analysis - only show if no enhanced sub-flows exist # Match the same logic as _should_show_subrows in flow_table_v2.py - has_subflows = (len(flow.frame_types) > 1 or - flow.enhanced_analysis.decoder_type != "Standard") + has_enhanced_subflows = self._has_enhanced_subflows(flow) # Debug output try: debug_panel = self.app.query_one("#debug-panel") - debug_panel.add_debug_message(f"TIMING_LOGIC: {flow.src_ip}:{flow.src_port} - types={len(flow.frame_types)}, decoder={flow.enhanced_analysis.decoder_type}, has_subflows={has_subflows}") + debug_panel.add_debug_message(f"TIMING_LOGIC: {flow.src_ip}:{flow.src_port} - types={len(flow.frame_types)}, decoder={flow.enhanced_analysis.decoder_type}, has_enhanced_subflows={has_enhanced_subflows}") except: pass - if not has_subflows: + if not has_enhanced_subflows: try: debug_panel = self.app.query_one("#debug-panel") debug_panel.add_debug_message(f"BRANCH: Taking FULL timing branch for {flow.src_ip}:{flow.src_port}") @@ -160,6 +159,19 @@ class FlowMainDetailsPanel(Vertical): return Group(*sections) + def _has_enhanced_subflows(self, flow: 'FlowStats') -> bool: + """Check if flow has enhanced frame types that warrant sub-rows""" + enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'} + + for frame_type in flow.frame_types.keys(): + # Check if this frame type belongs to an enhanced protocol + if any(enhanced_proto in frame_type for enhanced_proto in enhanced_protocols): + return True + elif frame_type.startswith(('CH10-', 'PTP-', 'IENA-')): + return True + + return False + def _format_bytes(self, bytes_count: int) -> str: """Format byte count with units""" if bytes_count >= 1_000_000_000: @@ -276,14 +288,23 @@ class SubFlowDetailsPanel(Vertical): return Group(*sections) def _create_subflow_summary(self, flow: 'FlowStats') -> RenderableType: - """Create summary of all sub-flows""" - if not flow.frame_types or len(flow.frame_types) <= 1: + """Create summary of all sub-flows for enhanced flows""" + # For enhanced flows, show ALL frame types, not just enhanced ones + if flow.enhanced_analysis.decoder_type != "Standard": + frame_types_to_show = flow.frame_types + title = "Sub-Flow Summary (All Frame Types)" + else: + # For standard flows, only show enhanced frame types if any + frame_types_to_show = self._get_enhanced_frame_types(flow) + title = "Enhanced Sub-Flow Summary" + + if not frame_types_to_show: return Text("No sub-flows available", style="dim") sections = [] - sections.append(Text("Sub-Flow Summary", style="bold yellow")) + sections.append(Text(title, style="bold yellow")) - # Frame type breakdown table + # Frame type breakdown table for enhanced protocols only frame_table = Table(show_header=True, box=None) frame_table.add_column("Frame Type", style="blue") frame_table.add_column("Count", justify="right") @@ -294,7 +315,7 @@ class SubFlowDetailsPanel(Vertical): total = flow.frame_count for frame_type, stats in sorted( - flow.frame_types.items(), + frame_types_to_show.items(), key=lambda x: x[1].count, reverse=True ): @@ -315,6 +336,22 @@ class SubFlowDetailsPanel(Vertical): sections.append(frame_table) return Group(*sections) + def _get_enhanced_frame_types(self, flow: 'FlowStats') -> Dict[str, 'FrameTypeStats']: + """Get only frame types that belong to enhanced protocols""" + enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'} + enhanced_frame_types = {} + + for frame_type, stats in flow.frame_types.items(): + # Check if this frame type belongs to an enhanced protocol + if any(enhanced_proto in frame_type for enhanced_proto in enhanced_protocols): + enhanced_frame_types[frame_type] = stats + elif frame_type.startswith(('CH10-', 'PTP-', 'IENA-')): + enhanced_frame_types[frame_type] = stats + elif frame_type in ('TMATS', 'TMATS-Data'): # TMATS is part of Chapter 10 + enhanced_frame_types[frame_type] = stats + + return enhanced_frame_types + def _format_bytes(self, bytes_count: int) -> str: """Format byte count with units""" if bytes_count >= 1_000_000_000: diff --git a/data_summary.md b/data_summary.md new file mode 100644 index 0000000..0838ead --- /dev/null +++ b/data_summary.md @@ -0,0 +1,461 @@ +# StreamLens Data Flow Architecture + +## Overview + +StreamLens is a real-time network traffic analyzer that processes packets, groups them into flows, detects protocols, and provides enhanced analysis for specialized protocols like Chapter 10, PTP, and IENA. + +## Complete Data Flow + +### 1. Packet Input Layer +```python +# analyzer/analysis/core.py:36-43 +def analyze_pcap(self, pcap_file: str): + packets = rdpcap(pcap_file) # Scapy reads PCAP + self._process_packets(packets) # Process each packet +``` + +**Sources**: +- PCAP/PCAPNG files +- Live network interfaces via Scapy + +### 2. Packet Processing Engine +```python +# analyzer/analysis/core.py:65-67 +def _process_packets(self, packets): + for i, packet in enumerate(packets): + self.dissector.dissect_packet(packet, i + 1) +``` + +**Per-packet operations**: +- Frame number assignment +- Timestamp extraction +- Initial protocol detection + +### 3. Flow Management & Classification +```python +# analyzer/analysis/flow_manager.py:38-58 +def process_packet(self, packet, frame_num): + # Extract network identifiers + src_ip, dst_ip = ip_layer.src, ip_layer.dst + transport_info = self._extract_transport_info(packet) # ports, protocol + + # Create unique flow key + flow_key = (src_ip, dst_ip) + + # Initialize new flow or use existing + if flow_key not in self.flows: + self.flows[flow_key] = FlowStats(...) +``` + +**Flow Bucketing**: +- Packets grouped by `(src_ip, dst_ip)` pairs +- Each flow tracked in a `FlowStats` object +- O(1) flow lookup via hash map + +### 4. Protocol Detection Pipeline +```python +# analyzer/analysis/flow_manager.py:89-96 +# Basic protocols (UDP, TCP, ICMP) +protocols = self._detect_basic_protocols(packet) + +# Enhanced protocol detection +dissection_results = self._dissect_packet(packet, frame_num) +enhanced_protocols = self._extract_enhanced_protocols(dissection_results) +flow.detected_protocol_types.update(enhanced_protocols) + +# Fallback detection +fallback_protocols = self._detect_fallback_protocols(packet, dissection_results) +``` + +**Protocol Hierarchy**: +1. **Basic**: Transport layer (UDP/TCP/ICMP) +2. **Enhanced**: Application layer (CH10/PTP/IENA) via specialized dissectors +3. **Fallback**: Port-based heuristics for unidentified protocols + +### 5. Frame Type Classification +```python +# analyzer/analysis/flow_manager.py:98-100 +frame_type = self._classify_frame_type(packet, dissection_results) +self._update_frame_type_stats(flow, frame_type, frame_num, timestamp, packet_size) +``` + +**Sub-Flow Binning**: +- Each packet classified into specific frame types +- Enhanced protocols: `CH10-Data`, `CH10-ACTTS`, `PTP-Sync`, `IENA-Control` +- Standard protocols: `UDP-Data`, `TCP-Stream` + +### 6. Statistical Analysis Engine +```python +# analyzer/analysis/flow_manager.py:105-112 +if len(flow.timestamps) > 1: + inter_arrival = timestamp - flow.timestamps[-2] + flow.inter_arrival_times.append(inter_arrival) + + # Real-time statistics if enabled + if self.statistics_engine and self.statistics_engine.enable_realtime: + self.statistics_engine.update_realtime_statistics(flow_key, flow) +``` + +**Timing Calculations**: +- **Flow-level**: Aggregate timing across all packets +- **Frame-type level**: Per-subtype timing in `FrameTypeStats` +- **Outlier detection**: Based on configurable sigma thresholds +- **Real-time updates**: Live statistical calculations during capture + +### 7. Enhanced Protocol Analysis +```python +# analyzer/analysis/flow_manager.py:102-103 +self._perform_enhanced_analysis(packet, flow, frame_num, transport_info) +``` + +**Specialized Decoders**: +- **Chapter 10**: Timing analysis, sequence validation, TMATS parsing +- **PTP**: Clock synchronization analysis, message type classification +- **IENA**: Packet structure validation, timing quality assessment + +### 8. TUI Presentation Layer +```python +# analyzer/tui/textual/widgets/flow_table_v2.py:143-153 +if self._should_show_subrows(flow): + enhanced_frame_types = self._get_enhanced_frame_types(flow) + combinations = self._get_enhanced_protocol_frame_combinations(flow, enhanced_frame_types) + + for extended_proto, frame_type, count, percentage in combinations: + sub_key = table.add_row(*sub_row, key=f"flow_{i}_sub_{j}") + self.row_to_subflow_map[sub_key] = (i, frame_type) +``` + +**Presentation Logic**: +- **Main Rows**: Flow-level aggregates (all packets in flow) +- **Sub-Rows**: Only enhanced protocols (`CH10-*`, `PTP-*`, `IENA-*`) +- **Standard protocols**: Aggregated into main flow timing +- **Timing display**: ΔT (avg inter-arrival), σ (std deviation), outlier count + +## Core Data Types + +### FlowStats +Main container for flow-level statistics: + +```python +@dataclass +class FlowStats: + # Network identifiers + src_ip: str + dst_ip: str + src_port: int = 0 # 0 if not applicable + dst_port: int = 0 + transport_protocol: str = "Unknown" # TCP, UDP, ICMP, IGMP, etc. + traffic_classification: str = "Unknown" # Unicast, Multicast, Broadcast + + # Aggregate statistics + frame_count: int = 0 + total_bytes: int = 0 + first_seen: float = 0.0 # Timestamp of first frame + last_seen: float = 0.0 # Timestamp of last frame + duration: float = 0.0 # Flow duration in seconds + + # Timing data + timestamps: List[float] = field(default_factory=list) + frame_numbers: List[int] = field(default_factory=list) + inter_arrival_times: List[float] = field(default_factory=list) + avg_inter_arrival: float = 0.0 + std_inter_arrival: float = 0.0 + jitter: float = 0.0 + + # Outlier tracking + outlier_frames: List[int] = field(default_factory=list) + outlier_details: List[Tuple[int, float]] = field(default_factory=list) # (frame_number, time_delta) + + # Protocol classification + protocols: Set[str] = field(default_factory=set) # Basic protocols seen + detected_protocol_types: Set[str] = field(default_factory=set) # Enhanced protocols (CH10, PTP, IENA) + + # Sub-flow bins + frame_types: Dict[str, FrameTypeStats] = field(default_factory=dict) + + # Enhanced analysis results + enhanced_analysis: EnhancedAnalysisData = field(default_factory=EnhancedAnalysisData) +``` + +### FrameTypeStats +Statistics for specific frame types within a flow: + +```python +@dataclass +class FrameTypeStats: + # Frame type identifier + frame_type: str # e.g., "CH10-Data", "PTP-Sync", "UDP-Data" + + # Basic statistics + count: int = 0 + total_bytes: int = 0 + + # Timing data + timestamps: List[float] = field(default_factory=list) + frame_numbers: List[int] = field(default_factory=list) + inter_arrival_times: List[float] = field(default_factory=list) + avg_inter_arrival: float = 0.0 + std_inter_arrival: float = 0.0 + + # Outlier tracking + outlier_frames: List[int] = field(default_factory=list) + outlier_details: List[Tuple[int, float]] = field(default_factory=list) +``` + +### Enhanced Analysis Data Models + +#### New Modular Structure +The enhanced analysis data has been restructured into focused components for better organization and extensibility: + +```python +@dataclass +class TimingAnalysis: + """Timing analysis results for enhanced protocols""" + avg_clock_drift_ppm: float = 0.0 + max_clock_drift_ppm: float = 0.0 + min_clock_drift_ppm: float = 0.0 + drift_variance: float = 0.0 + + quality: TimingQuality = TimingQuality.UNKNOWN # EXCELLENT, GOOD, MODERATE, POOR + stability: TimingStability = TimingStability.UNKNOWN # STABLE, VARIABLE, UNSTABLE + + timing_accuracy_percent: float = 0.0 + sync_errors: int = 0 + timing_anomalies: int = 0 + anomaly_rate_percent: float = 0.0 + + has_internal_timing: bool = False + rtc_sync_available: bool = False + +@dataclass +class QualityMetrics: + """Quality metrics for enhanced protocol data""" + avg_frame_quality_percent: float = 0.0 + frame_quality_samples: List[float] = field(default_factory=list) + + avg_signal_quality_percent: float = 0.0 + signal_quality_samples: List[float] = field(default_factory=list) + + sequence_gaps: int = 0 + format_errors: int = 0 + overflow_errors: int = 0 + checksum_errors: int = 0 + + avg_confidence_score: float = 0.0 + confidence_samples: List[float] = field(default_factory=list) + low_confidence_frames: int = 0 + + corrupted_frames: int = 0 + missing_frames: int = 0 + duplicate_frames: int = 0 + +@dataclass +class DecodedData: + """Container for decoded protocol data""" + channel_count: int = 0 + analog_channels: int = 0 + pcm_channels: int = 0 + discrete_channels: int = 0 + + primary_data_type: DataType = DataType.UNKNOWN # ANALOG, PCM, DISCRETE, TIME, VIDEO, TMATS + secondary_data_types: Set[DataType] = field(default_factory=set) + + sample_decoded_fields: Dict[str, Any] = field(default_factory=dict) + available_field_names: List[str] = field(default_factory=list) + field_count: int = 0 + critical_fields: List[str] = field(default_factory=list) + + frame_types: Set[str] = field(default_factory=set) + frame_type_distribution: Dict[str, int] = field(default_factory=dict) + + tmats_frames: int = 0 + setup_frames: int = 0 + data_frames: int = 0 + + decoder_type: str = "Standard" + decoder_version: Optional[str] = None + decode_success_rate: float = 1.0 + +@dataclass +class EnhancedAnalysisData: + """Complete enhanced analysis data combining all analysis types""" + timing: TimingAnalysis = field(default_factory=TimingAnalysis) + quality: QualityMetrics = field(default_factory=QualityMetrics) + decoded: DecodedData = field(default_factory=DecodedData) + + # Legacy compatibility fields maintained for backward compatibility + # (automatically synchronized with component data) +``` + +#### Protocol Information Models + +```python +@dataclass +class DecodedField: + """Represents a single decoded field from a protocol""" + name: str + value: Any + field_type: FieldType # INTEGER, FLOAT, STRING, BOOLEAN, TIMESTAMP, etc. + description: Optional[str] = None + unit: Optional[str] = None # e.g., "ms", "bytes", "ppm" + confidence: float = 1.0 # 0.0 to 1.0 + is_critical: bool = False + +@dataclass +class ProtocolInfo: + """Information about a detected protocol""" + protocol_type: ProtocolType + name: str + category: ProtocolCategory # TRANSPORT, NETWORK, ENHANCED, TIMING, TELEMETRY + version: Optional[str] = None + confidence: float = 1.0 + + port: Optional[int] = None + subtype: Optional[str] = None # e.g., "CH10-Data", "PTP-Sync" + vendor: Optional[str] = None + +@dataclass +class ProtocolDecodeResult: + """Result of protocol decoding""" + protocol_info: ProtocolInfo + fields: List[DecodedField] = field(default_factory=list) + frame_type: Optional[str] = None + payload_size: int = 0 + errors: List[str] = field(default_factory=list) + warnings: List[str] = field(default_factory=list) +``` + +## Key Processing Components + +### EthernetAnalyzer +Top-level orchestrator with background processing capabilities: +```python +class EthernetAnalyzer: + def __init__(self, enable_realtime: bool = False, outlier_threshold_sigma: float = 3.0): + self.statistics_engine = StatisticsEngine(outlier_threshold_sigma, enable_realtime) + self.flow_manager = FlowManager(self.statistics_engine) + self.all_packets: List[Packet] = [] + self.flows = self.flow_manager.flows # Exposed for UI access + self.background_analyzer = None # For large PCAP background processing +``` + +### BackgroundAnalyzer +Thread pool based PCAP processing for large files: +```python +class BackgroundAnalyzer: + def __init__(self, analyzer: EthernetAnalyzer, + num_threads: int = 4, + batch_size: int = 1000, + progress_callback: Optional[Callable[[ParsingProgress], None]] = None, + flow_update_callback: Optional[Callable[[], None]] = None): + self.analyzer = analyzer + self.executor = ThreadPoolExecutor(max_workers=num_threads) + self.batch_size = batch_size + self.progress_callback = progress_callback + self.flow_update_callback = flow_update_callback + self.is_running = False + self._shutdown_event = threading.Event() +``` + +### FlowManager +Packet processing and flow creation with enhanced protocol support: +```python +class FlowManager: + def __init__(self, statistics_engine=None): + self.flows: Dict[Tuple[str, str], FlowStats] = {} + self.specialized_dissectors = { + 'chapter10': Chapter10Dissector(), + 'ptp': PTPDissector(), + 'iena': IENADissector() + } + self.enhanced_ch10_decoder = EnhancedChapter10Decoder() + self.ch10_timing_plugin = Chapter10TimingAnalysisPlugin() + self.protocol_registry = ProtocolRegistry() # New protocol management +``` + +### ProtocolRegistry +Centralized protocol information management: +```python +class ProtocolRegistry: + def __init__(self): + self._protocols: Dict[ProtocolType, ProtocolInfo] = {} + self._register_standard_protocols() # UDP, TCP, ICMP, IGMP + self._register_enhanced_protocols() # CH10, PTP, IENA, NTP + + def get_enhanced_protocols(self) -> List[ProtocolInfo] + def is_enhanced_protocol(self, protocol_type: ProtocolType) -> bool + def get_protocols_by_category(self, category: ProtocolCategory) -> List[ProtocolInfo] +``` + +## Enhanced Protocol Definitions + +### Chapter 10 (CH10) +IRIG 106 Chapter 10 data recording format: +- Frame types: `CH10-Data`, `CH10-ACTTS`, `CH10-GPS`, `CH10-ACMI`, `CH10-Timing` +- Enhanced timing analysis with clock drift detection +- TMATS metadata parsing + +### Precision Time Protocol (PTP) +IEEE 1588 precision clock synchronization: +- Frame types: `PTP-Sync`, `PTP-Delay_Req`, `PTP-Follow_Up` +- Clock synchronization quality analysis + +### IENA +iNET-compatible Ethernet protocol: +- Frame types: `IENA-Control`, `IENA-Data` +- Packet structure validation + +## TUI Display Rules + +### Main Flow Row Shows: +- Source/Destination IP:Port +- Transport protocol (UDP/TCP) +- Total packets and volume +- **Full timing statistics** (ΔT, σ, outliers) if no enhanced sub-flows exist +- **Basic timeline only** (duration, first/last seen) if enhanced sub-flows exist + +### Sub-Rows Show (Enhanced Protocols Only): +- Frame type name (e.g., `CH10-Data`) +- Packet count and percentage of flow +- Individual timing statistics (ΔT, σ, outliers) +- Protocol-specific enhanced analysis + +### Standard Protocols (UDP/TCP): +- Aggregated into main flow statistics +- No sub-rows created +- Timing calculated across all standard packets in flow + +## Processing Efficiency + +- **Streaming**: Packets processed one-by-one (memory efficient) +- **Bucketing**: O(1) flow lookup via hash map +- **Hierarchical**: Statistics calculated at multiple levels simultaneously +- **Real-time**: Live updates during capture without full recalculation +- **Selective display**: Only enhanced protocols get detailed sub-flow analysis +- **Background processing**: Large PCAP files processed in thread pools with progressive UI updates +- **Thread-safe updates**: Concurrent data access protected with locks and atomic operations +- **Modular data structures**: Separated timing, quality, and decoded data for focused analysis + +## Recent Architecture Enhancements + +### Background Processing System +- **ThreadPoolExecutor**: Multi-threaded PCAP processing for large files +- **Progress callbacks**: Real-time parsing progress updates to TUI +- **Flow update callbacks**: Progressive flow data updates every 50 packets +- **Thread-safe shutdown**: Proper cleanup and signal handling + +### Modular Data Models +- **TimingAnalysis**: Dedicated timing metrics with quality classifications +- **QualityMetrics**: Comprehensive error tracking and confidence scoring +- **DecodedData**: Structured protocol field and channel information +- **ProtocolRegistry**: Centralized protocol information management +- **Legacy compatibility**: Maintains existing API while enabling new features + +### Enhanced TUI Features +- **Progressive loading**: TUI appears immediately while PCAP loads in background +- **Real-time updates**: Flow table refreshes as new packets are processed +- **Split panel layout**: Separate main flow and sub-flow detail views +- **Enhanced-only sub-rows**: Standard protocols aggregate into main flow +- **Timing column visibility**: Context-aware display based on sub-flow presence + +This architecture enables **real-time analysis of multi-protocol network traffic** with **enhanced timing analysis** for specialized protocols while maintaining **efficient memory usage**, **responsive UI updates**, and **scalable background processing** for large PCAP files. \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..07ddb8c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,25 @@ +contourpy==1.3.2 +cycler==0.12.1 +fonttools==4.59.0 +kiwisolver==1.4.8 +linkify-it-py==2.0.3 +markdown-it-py==3.0.0 +matplotlib==3.10.3 +mdit-py-plugins==0.4.2 +mdurl==0.1.2 +numpy==2.3.2 +packaging==25.0 +pillow==11.3.2 +Pygments==2.19.2 +pyparsing==3.2.3 +PySide6==6.9.1 +PySide6_Addons==6.9.1 +PySide6_Essentials==6.9.1 +python-dateutil==2.9.0.post0 +rich==14.1.0 +scapy==2.6.1 +shiboken6==6.9.1 +six==1.17.0 +textual==5.0.1 +typing_extensions==4.14.1 +uc-micro-py==1.0.3 \ No newline at end of file