tabbed frametype filtering
This commit is contained in:
@@ -37,7 +37,7 @@ class BackgroundAnalyzer:
|
||||
"""Analyzer that processes PCAP files in background threads"""
|
||||
|
||||
def __init__(self, analyzer: EthernetAnalyzer,
|
||||
num_threads: int = 4,
|
||||
num_threads: int = 1, # Force single-threaded to avoid race conditions
|
||||
batch_size: int = 1000,
|
||||
progress_callback: Optional[Callable[[ParsingProgress], None]] = None,
|
||||
flow_update_callback: Optional[Callable[[], None]] = None):
|
||||
@@ -74,7 +74,7 @@ class BackgroundAnalyzer:
|
||||
|
||||
# Flow update batching
|
||||
self.packets_since_update = 0
|
||||
self.update_batch_size = 50 # Update UI every 50 packets (more frequent)
|
||||
self.update_batch_size = 100 # Update UI every 100 packets (slower for less frequent updates)
|
||||
self.update_lock = threading.Lock()
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@@ -87,6 +87,7 @@ class BackgroundAnalyzer:
|
||||
return
|
||||
|
||||
self.is_parsing = True
|
||||
self.analyzer.is_parsing = True # Set parsing flag on analyzer
|
||||
self.stop_event.clear()
|
||||
self.start_time = time.time()
|
||||
self.processed_packets = 0
|
||||
@@ -221,8 +222,8 @@ class BackgroundAnalyzer:
|
||||
try:
|
||||
current_time = time.time()
|
||||
|
||||
# Update every 0.5 seconds
|
||||
if current_time - last_update_time >= 0.5:
|
||||
# Update every 2.0 seconds (slower progress updates)
|
||||
if current_time - last_update_time >= 2.0:
|
||||
with self.parse_lock:
|
||||
current_packets = self.processed_packets
|
||||
|
||||
@@ -246,7 +247,7 @@ class BackgroundAnalyzer:
|
||||
if all(f.done() for f in futures):
|
||||
break
|
||||
|
||||
time.sleep(0.1)
|
||||
time.sleep(0.5) # Slower monitoring loop
|
||||
except KeyboardInterrupt:
|
||||
self.logger.info("Monitor thread interrupted")
|
||||
break
|
||||
@@ -256,6 +257,7 @@ class BackgroundAnalyzer:
|
||||
|
||||
# Final update
|
||||
self.is_parsing = False
|
||||
self.analyzer.is_parsing = False # Clear parsing flag on analyzer
|
||||
self._report_progress(is_complete=True)
|
||||
|
||||
# Final flow update
|
||||
@@ -267,7 +269,7 @@ class BackgroundAnalyzer:
|
||||
|
||||
# Calculate final statistics
|
||||
with self.flow_lock:
|
||||
self.analyzer.statistics_engine.calculate_all_statistics()
|
||||
self.analyzer.statistics_engine.calculate_flow_statistics(self.analyzer.flows)
|
||||
|
||||
def _report_progress(self, packets_per_second: float = 0,
|
||||
elapsed_time: float = 0,
|
||||
|
||||
@@ -26,6 +26,7 @@ class EthernetAnalyzer:
|
||||
self.all_packets: List[Packet] = []
|
||||
self.is_live = False
|
||||
self.stop_capture = False
|
||||
self.is_parsing = False # Flag to track parsing state
|
||||
|
||||
# Expose flows for backward compatibility
|
||||
self.flows = self.flow_manager.flows
|
||||
|
||||
@@ -256,21 +256,31 @@ class FlowManager:
|
||||
decoded = ch10_info['decoded_payload']
|
||||
data_type_name = decoded.get('data_type_name', 'CH10-Data')
|
||||
|
||||
# Simplify timing frame names for display
|
||||
# For timing analysis purposes, group frames by their actual timing behavior
|
||||
# rather than their semantic meaning. Based on debug analysis:
|
||||
# - Some timing frames have ~26s intervals (high-level timing)
|
||||
# - Other frames (including some timing) have ~100ms intervals (data stream)
|
||||
|
||||
# Keep high-level timing frames separate (they have very different timing)
|
||||
if 'ACTTS' in data_type_name:
|
||||
return 'CH10-ACTTS'
|
||||
# Note: Extended Timing frames often have the same ~100ms timing as data frames
|
||||
# so they should be grouped with CH10-Data for accurate timing analysis
|
||||
elif 'Sync' in data_type_name and 'Custom' in data_type_name:
|
||||
return 'CH10-Sync'
|
||||
elif 'Clock' in data_type_name and 'Custom' in data_type_name:
|
||||
return 'CH10-Clock'
|
||||
elif ('Time' in data_type_name or 'Timing' in data_type_name) and 'Custom' in data_type_name:
|
||||
# Custom timing frames often have the 26s interval pattern
|
||||
if 'Time' in data_type_name:
|
||||
return 'CH10-Time'
|
||||
else:
|
||||
return 'CH10-Timing'
|
||||
# Special data types that should remain separate
|
||||
elif 'GPS NMEA' in data_type_name:
|
||||
return 'CH10-GPS'
|
||||
elif 'EAG ACMI' in data_type_name:
|
||||
return 'CH10-ACMI'
|
||||
elif 'Custom' in data_type_name and 'Timing' in data_type_name:
|
||||
# Extract variant for custom timing
|
||||
if 'Variant 0x04' in data_type_name:
|
||||
return 'CH10-ACTTS'
|
||||
elif 'Extended Timing' in data_type_name:
|
||||
return 'CH10-ExtTiming'
|
||||
else:
|
||||
return 'CH10-Timing'
|
||||
elif 'Ethernet' in data_type_name:
|
||||
return 'CH10-Ethernet'
|
||||
elif 'Image' in data_type_name:
|
||||
@@ -279,10 +289,10 @@ class FlowManager:
|
||||
return 'CH10-UART'
|
||||
elif 'CAN' in data_type_name:
|
||||
return 'CH10-CAN'
|
||||
elif 'Unknown' not in data_type_name:
|
||||
# Extract first word for other known types
|
||||
first_word = data_type_name.split()[0]
|
||||
return f'CH10-{first_word}'
|
||||
# Everything else gets grouped as CH10-Data for consistent timing analysis
|
||||
# This includes: Multi-Source, regular timing frames, custom data types, etc.
|
||||
else:
|
||||
return 'CH10-Data'
|
||||
|
||||
return 'CH10-Data'
|
||||
|
||||
|
||||
@@ -27,6 +27,13 @@ class StatisticsEngine:
|
||||
for flow in flows.values():
|
||||
self._calculate_single_flow_statistics(flow)
|
||||
|
||||
def calculate_all_statistics(self, analyzer=None) -> None:
|
||||
"""Calculate statistics for all flows (called by background analyzer)"""
|
||||
# This is called by the background analyzer
|
||||
# The analyzer parameter should be passed in
|
||||
if analyzer and hasattr(analyzer, 'flows'):
|
||||
self.calculate_flow_statistics(analyzer.flows)
|
||||
|
||||
def _calculate_single_flow_statistics(self, flow: FlowStats) -> None:
|
||||
"""Calculate statistics for a single flow"""
|
||||
# Ensure timeline statistics are calculated
|
||||
@@ -77,11 +84,18 @@ class StatisticsEngine:
|
||||
# Detect outliers for this frame type
|
||||
ft_threshold = ft_stats.avg_inter_arrival + (self.outlier_threshold_sigma * ft_stats.std_inter_arrival)
|
||||
|
||||
# Clear existing outliers to recalculate
|
||||
ft_stats.outlier_frames.clear()
|
||||
ft_stats.outlier_details.clear()
|
||||
ft_stats.enhanced_outlier_details.clear()
|
||||
|
||||
for i, inter_time in enumerate(ft_stats.inter_arrival_times):
|
||||
if inter_time > ft_threshold:
|
||||
frame_number = ft_stats.frame_numbers[i + 1]
|
||||
frame_number = ft_stats.frame_numbers[i + 1] # Current frame
|
||||
prev_frame_number = ft_stats.frame_numbers[i] # Previous frame
|
||||
ft_stats.outlier_frames.append(frame_number)
|
||||
ft_stats.outlier_details.append((frame_number, inter_time))
|
||||
ft_stats.outlier_details.append((frame_number, inter_time)) # Legacy format
|
||||
ft_stats.enhanced_outlier_details.append((frame_number, prev_frame_number, inter_time)) # Enhanced format
|
||||
|
||||
def get_flow_summary_statistics(self, flows: Dict[tuple, FlowStats]) -> Dict[str, float]:
|
||||
"""Get summary statistics across all flows"""
|
||||
@@ -232,9 +246,11 @@ class StatisticsEngine:
|
||||
threshold = avg + (self.outlier_threshold_sigma * std)
|
||||
if new_time > threshold:
|
||||
frame_number = ft_stats.frame_numbers[-1]
|
||||
prev_frame_number = ft_stats.frame_numbers[-2] if len(ft_stats.frame_numbers) > 1 else 0
|
||||
if frame_number not in ft_stats.outlier_frames:
|
||||
ft_stats.outlier_frames.append(frame_number)
|
||||
ft_stats.outlier_details.append((frame_number, new_time))
|
||||
ft_stats.outlier_details.append((frame_number, new_time)) # Legacy format
|
||||
ft_stats.enhanced_outlier_details.append((frame_number, prev_frame_number, new_time)) # Enhanced format
|
||||
stats['outlier_count'] += 1
|
||||
|
||||
stats['last_avg'] = avg
|
||||
|
||||
@@ -13,6 +13,7 @@ from .tui import TUIInterface
|
||||
from .tui.modern_interface import ModernTUIInterface
|
||||
from .tui.textual.app_v2 import StreamLensAppV2
|
||||
from .utils import PCAPLoader, LiveCapture
|
||||
from .reporting import FlowReportGenerator
|
||||
|
||||
|
||||
def main():
|
||||
@@ -28,6 +29,10 @@ def main():
|
||||
help='Outlier detection threshold in standard deviations (default: 3.0)')
|
||||
parser.add_argument('--report', action='store_true',
|
||||
help='Generate comprehensive outlier report and exit (no TUI)')
|
||||
parser.add_argument('--flow-report', metavar='OUTPUT_FILE',
|
||||
help='Generate comprehensive flow analysis report to specified file and exit')
|
||||
parser.add_argument('--report-format', choices=['markdown', 'html', 'text'], default='markdown',
|
||||
help='Report output format (default: markdown)')
|
||||
parser.add_argument('--gui', action='store_true',
|
||||
help='Launch GUI mode (requires PySide6)')
|
||||
parser.add_argument('--classic', action='store_true',
|
||||
@@ -114,6 +119,11 @@ def main():
|
||||
generate_outlier_report(analyzer, args.outlier_threshold)
|
||||
return
|
||||
|
||||
# Handle flow report mode
|
||||
if args.flow_report:
|
||||
generate_flow_report(analyzer, args.flow_report, args.report_format)
|
||||
return
|
||||
|
||||
# TUI mode - choose between classic, modern curses, and textual interface
|
||||
if args.textual:
|
||||
# Use new Textual-based interface (TipTop-inspired) with background parsing
|
||||
@@ -251,6 +261,31 @@ def print_console_results(analyzer: EthernetAnalyzer):
|
||||
print(f"{flow.src_ip} -> {flow.dst_ip}: CV = {cv:.3f}")
|
||||
|
||||
|
||||
def generate_flow_report(analyzer: EthernetAnalyzer, output_file: str, format_type: str):
|
||||
"""Generate comprehensive flow analysis report"""
|
||||
print(f"Generating {format_type} flow analysis report...")
|
||||
|
||||
try:
|
||||
# Create report generator
|
||||
report_generator = FlowReportGenerator(analyzer)
|
||||
|
||||
# Generate report
|
||||
report_content = report_generator.generate_report(output_file, format_type)
|
||||
|
||||
print(f"✅ Flow analysis report generated successfully!")
|
||||
print(f"📄 Output file: {output_file}")
|
||||
print(f"📊 Format: {format_type}")
|
||||
print(f"📈 Flows analyzed: {len(analyzer.flows)}")
|
||||
|
||||
# Show preview of report length
|
||||
lines = report_content.count('\n')
|
||||
print(f"📝 Report length: {lines} lines")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error generating flow report: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def generate_outlier_report(analyzer: EthernetAnalyzer, threshold_sigma: float):
|
||||
"""Generate comprehensive outlier report without TUI"""
|
||||
summary = analyzer.get_summary()
|
||||
@@ -334,18 +369,34 @@ def generate_outlier_report(analyzer: EthernetAnalyzer, threshold_sigma: float):
|
||||
threshold = ft_stats.avg_inter_arrival + (threshold_sigma * ft_stats.std_inter_arrival)
|
||||
print(f" Threshold: {threshold:.6f}s (>{threshold_sigma}σ from mean {ft_stats.avg_inter_arrival:.6f}s)")
|
||||
|
||||
print(f" {'Frame#':<10} {'Inter-arrival':<15} {'Deviation':<12}")
|
||||
print(f" {'-' * 10} {'-' * 15} {'-' * 12}")
|
||||
|
||||
for frame_num, inter_arrival_time in ft_stats.outlier_details:
|
||||
if ft_stats.avg_inter_arrival > 0:
|
||||
deviation = inter_arrival_time - ft_stats.avg_inter_arrival
|
||||
sigma_dev = deviation / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
dev_str = f"+{sigma_dev:.1f}σ"
|
||||
else:
|
||||
dev_str = "N/A"
|
||||
# Use enhanced outlier details if available
|
||||
if hasattr(ft_stats, 'enhanced_outlier_details') and ft_stats.enhanced_outlier_details:
|
||||
print(f" {'Frame#':<10} {'From Frame':<10} {'Inter-arrival':<15} {'Deviation':<12}")
|
||||
print(f" {'-' * 10} {'-' * 10} {'-' * 15} {'-' * 12}")
|
||||
|
||||
print(f" {frame_num:<10} {inter_arrival_time:.6f}s{'':<3} {dev_str:<12}")
|
||||
for frame_num, prev_frame_num, inter_arrival_time in ft_stats.enhanced_outlier_details:
|
||||
if ft_stats.avg_inter_arrival > 0:
|
||||
deviation = inter_arrival_time - ft_stats.avg_inter_arrival
|
||||
sigma_dev = deviation / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
dev_str = f"+{sigma_dev:.1f}σ"
|
||||
else:
|
||||
dev_str = "N/A"
|
||||
|
||||
print(f" {frame_num:<10} {prev_frame_num:<10} {inter_arrival_time:.6f}s{'':<3} {dev_str:<12}")
|
||||
else:
|
||||
# Fallback to legacy outlier details
|
||||
print(f" {'Frame#':<10} {'Inter-arrival':<15} {'Deviation':<12}")
|
||||
print(f" {'-' * 10} {'-' * 15} {'-' * 12}")
|
||||
|
||||
for frame_num, inter_arrival_time in ft_stats.outlier_details:
|
||||
if ft_stats.avg_inter_arrival > 0:
|
||||
deviation = inter_arrival_time - ft_stats.avg_inter_arrival
|
||||
sigma_dev = deviation / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
dev_str = f"+{sigma_dev:.1f}σ"
|
||||
else:
|
||||
dev_str = "N/A"
|
||||
|
||||
print(f" {frame_num:<10} {inter_arrival_time:.6f}s{'':<3} {dev_str:<12}")
|
||||
|
||||
# High jitter flows summary
|
||||
high_jitter = analyzer.get_high_jitter_flows()
|
||||
|
||||
@@ -18,7 +18,8 @@ class FrameTypeStats:
|
||||
avg_inter_arrival: float = 0.0
|
||||
std_inter_arrival: float = 0.0
|
||||
outlier_frames: List[int] = field(default_factory=list)
|
||||
outlier_details: List[Tuple[int, float]] = field(default_factory=list)
|
||||
outlier_details: List[Tuple[int, float]] = field(default_factory=list) # (frame_num, delta_t) - legacy
|
||||
enhanced_outlier_details: List[Tuple[int, int, float]] = field(default_factory=list) # (frame_num, prev_frame_num, delta_t)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
7
analyzer/reporting/__init__.py
Normal file
7
analyzer/reporting/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
StreamLens Reporting Module
|
||||
"""
|
||||
|
||||
from .flow_report import FlowReportGenerator
|
||||
|
||||
__all__ = ['FlowReportGenerator']
|
||||
393
analyzer/reporting/flow_report.py
Normal file
393
analyzer/reporting/flow_report.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Flow Analysis Report Generator
|
||||
Generates comprehensive flow analysis reports with markup formatting
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from typing import Dict, List, Optional
|
||||
from pathlib import Path
|
||||
from ..models import FlowStats, FrameTypeStats
|
||||
|
||||
|
||||
class FlowReportGenerator:
|
||||
"""Generate comprehensive flow analysis reports"""
|
||||
|
||||
def __init__(self, analyzer):
|
||||
self.analyzer = analyzer
|
||||
|
||||
def generate_report(self, output_path: Optional[str] = None, format_type: str = "markdown") -> str:
|
||||
"""Generate comprehensive flow analysis report"""
|
||||
if format_type == "markdown":
|
||||
return self._generate_markdown_report(output_path)
|
||||
elif format_type == "html":
|
||||
return self._generate_html_report(output_path)
|
||||
else:
|
||||
return self._generate_text_report(output_path)
|
||||
|
||||
def _generate_markdown_report(self, output_path: Optional[str] = None) -> str:
|
||||
"""Generate markdown-formatted report"""
|
||||
flows = list(self.analyzer.flows.values())
|
||||
|
||||
# Sort flows by importance (enhanced first, then by packet count)
|
||||
flows.sort(key=lambda x: (
|
||||
x.enhanced_analysis.decoder_type != "Standard",
|
||||
len(x.outlier_frames),
|
||||
x.frame_count
|
||||
), reverse=True)
|
||||
|
||||
report_lines = []
|
||||
|
||||
# Header
|
||||
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
report_lines.extend([
|
||||
"# StreamLens Flow Analysis Report",
|
||||
f"**Generated:** {timestamp}",
|
||||
f"**Total Flows:** {len(flows)}",
|
||||
f"**Analysis Engine:** {self.analyzer.__class__.__name__}",
|
||||
"",
|
||||
"---",
|
||||
""
|
||||
])
|
||||
|
||||
# Executive Summary
|
||||
report_lines.extend(self._generate_executive_summary(flows))
|
||||
|
||||
# Detailed Flow Analysis
|
||||
report_lines.extend([
|
||||
"## 📊 Detailed Flow Analysis",
|
||||
""
|
||||
])
|
||||
|
||||
for i, flow in enumerate(flows, 1):
|
||||
report_lines.extend(self._generate_flow_section(flow, i))
|
||||
|
||||
# Statistics Summary
|
||||
report_lines.extend(self._generate_statistics_summary(flows))
|
||||
|
||||
report_content = "\n".join(report_lines)
|
||||
|
||||
# Save to file if path provided
|
||||
if output_path:
|
||||
output_file = Path(output_path)
|
||||
output_file.write_text(report_content, encoding='utf-8')
|
||||
|
||||
return report_content
|
||||
|
||||
def _generate_executive_summary(self, flows: List[FlowStats]) -> List[str]:
|
||||
"""Generate executive summary section"""
|
||||
total_packets = sum(flow.frame_count for flow in flows)
|
||||
total_bytes = sum(flow.total_bytes for flow in flows)
|
||||
enhanced_flows = [f for f in flows if f.enhanced_analysis.decoder_type != "Standard"]
|
||||
high_outlier_flows = [f for f in flows if len(f.outlier_frames) > f.frame_count * 0.1]
|
||||
|
||||
return [
|
||||
"## 📋 Executive Summary",
|
||||
"",
|
||||
f"- **Total Network Flows:** {len(flows)}",
|
||||
f"- **Total Packets Analyzed:** {total_packets:,}",
|
||||
f"- **Total Data Volume:** {self._format_bytes(total_bytes)}",
|
||||
f"- **Enhanced Protocol Flows:** {len(enhanced_flows)} ({len(enhanced_flows)/len(flows)*100:.1f}%)",
|
||||
f"- **Flows with Timing Issues:** {len(high_outlier_flows)} ({len(high_outlier_flows)/len(flows)*100:.1f}%)",
|
||||
"",
|
||||
"### 🎯 Key Findings",
|
||||
""
|
||||
]
|
||||
|
||||
def _generate_flow_section(self, flow: FlowStats, flow_num: int) -> List[str]:
|
||||
"""Generate detailed section for a single flow"""
|
||||
lines = []
|
||||
|
||||
# Flow Header
|
||||
status_emoji = self._get_flow_status_emoji(flow)
|
||||
quality_score = self._get_quality_score(flow)
|
||||
|
||||
lines.extend([
|
||||
f"### {status_emoji} Flow #{flow_num}: {flow.src_ip}:{flow.src_port} → {flow.dst_ip}:{flow.dst_port}",
|
||||
""
|
||||
])
|
||||
|
||||
# Basic Information Table
|
||||
lines.extend([
|
||||
"| Attribute | Value |",
|
||||
"|-----------|-------|",
|
||||
f"| **Protocol** | {flow.transport_protocol} |",
|
||||
f"| **Classification** | {flow.traffic_classification} |",
|
||||
f"| **Packets** | {flow.frame_count:,} |",
|
||||
f"| **Volume** | {self._format_bytes(flow.total_bytes)} |",
|
||||
f"| **Quality Score** | {quality_score}% |",
|
||||
f"| **Duration** | {flow.duration:.2f}s |",
|
||||
f"| **First Seen** | {self._format_timestamp(flow.first_seen)} |",
|
||||
f"| **Last Seen** | {self._format_timestamp(flow.last_seen)} |",
|
||||
""
|
||||
])
|
||||
|
||||
# Enhanced Analysis (if available)
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
lines.extend(self._generate_enhanced_analysis_section(flow))
|
||||
|
||||
# Frame Type Breakdown
|
||||
if flow.frame_types:
|
||||
lines.extend(self._generate_frame_types_section(flow))
|
||||
|
||||
# Timing Analysis
|
||||
lines.extend(self._generate_timing_analysis_section(flow))
|
||||
|
||||
lines.append("")
|
||||
return lines
|
||||
|
||||
def _generate_enhanced_analysis_section(self, flow: FlowStats) -> List[str]:
|
||||
"""Generate enhanced analysis section"""
|
||||
ea = flow.enhanced_analysis
|
||||
|
||||
lines = [
|
||||
"#### 🔬 Enhanced Protocol Analysis",
|
||||
"",
|
||||
"| Metric | Value |",
|
||||
"|--------|-------|",
|
||||
f"| **Decoder Type** | {ea.decoder_type} |",
|
||||
f"| **Frame Quality** | {ea.avg_frame_quality:.1f}% |",
|
||||
f"| **Field Count** | {ea.field_count} |",
|
||||
f"| **Timing Accuracy** | {ea.timing_accuracy:.1f}% |",
|
||||
f"| **Signal Quality** | {ea.signal_quality:.1f}% |"
|
||||
]
|
||||
|
||||
if ea.decoder_type.startswith("Chapter10"):
|
||||
lines.extend([
|
||||
f"| **Channel Count** | {ea.channel_count} |",
|
||||
f"| **Analog Channels** | {ea.analog_channels} |",
|
||||
f"| **PCM Channels** | {ea.pcm_channels} |",
|
||||
f"| **TMATS Frames** | {ea.tmats_frames} |",
|
||||
f"| **Clock Drift** | {ea.avg_clock_drift_ppm:.2f} ppm |",
|
||||
f"| **Timing Quality** | {ea.timing_quality} |"
|
||||
])
|
||||
|
||||
lines.extend(["", ""])
|
||||
return lines
|
||||
|
||||
def _generate_frame_types_section(self, flow: FlowStats) -> List[str]:
|
||||
"""Generate frame types breakdown section"""
|
||||
lines = [
|
||||
"#### 📦 Frame Type Analysis",
|
||||
"",
|
||||
"| Frame Type | Count | % | Avg ΔT | Std σ | Outliers | Outlier Frames |",
|
||||
"|------------|-------|---|---------|--------|----------|----------------|"
|
||||
]
|
||||
|
||||
# Sort frame types by count
|
||||
sorted_types = sorted(
|
||||
flow.frame_types.items(),
|
||||
key=lambda x: x[1].count,
|
||||
reverse=True
|
||||
)
|
||||
|
||||
total_count = flow.frame_count
|
||||
for frame_type, stats in sorted_types:
|
||||
percentage = (stats.count / total_count * 100) if total_count > 0 else 0
|
||||
|
||||
# Format timing values
|
||||
delta_t = ""
|
||||
if stats.avg_inter_arrival > 0:
|
||||
dt_ms = stats.avg_inter_arrival * 1000
|
||||
delta_t = f"{dt_ms:.1f}ms" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
||||
|
||||
sigma = ""
|
||||
if stats.std_inter_arrival > 0:
|
||||
sig_ms = stats.std_inter_arrival * 1000
|
||||
sigma = f"{sig_ms:.1f}ms" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
||||
|
||||
outliers = len(stats.outlier_frames)
|
||||
outlier_str = f"⚠️ {outliers}" if outliers > 0 else f"{outliers}"
|
||||
|
||||
# Format outlier frames (show first 5)
|
||||
outlier_frames = ""
|
||||
if stats.outlier_frames:
|
||||
frames = sorted(stats.outlier_frames[:5])
|
||||
outlier_frames = ", ".join(map(str, frames))
|
||||
if len(stats.outlier_frames) > 5:
|
||||
outlier_frames += f", +{len(stats.outlier_frames) - 5}"
|
||||
|
||||
lines.append(
|
||||
f"| `{frame_type}` | {stats.count:,} | {percentage:.1f}% | {delta_t} | {sigma} | {outlier_str} | {outlier_frames} |"
|
||||
)
|
||||
|
||||
lines.extend(["", ""])
|
||||
return lines
|
||||
|
||||
def _generate_timing_analysis_section(self, flow: FlowStats) -> List[str]:
|
||||
"""Generate timing analysis section"""
|
||||
lines = [
|
||||
"#### ⏱️ Timing Analysis",
|
||||
""
|
||||
]
|
||||
|
||||
if len(flow.inter_arrival_times) < 2:
|
||||
lines.extend([
|
||||
"*Insufficient timing data for analysis*",
|
||||
""
|
||||
])
|
||||
return lines
|
||||
|
||||
# Overall timing metrics
|
||||
avg_ms = flow.avg_inter_arrival * 1000
|
||||
std_ms = flow.std_inter_arrival * 1000
|
||||
jitter_ms = flow.jitter * 1000
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
|
||||
lines.extend([
|
||||
"| Timing Metric | Value |",
|
||||
"|---------------|-------|",
|
||||
f"| **Average Inter-arrival** | {avg_ms:.2f}ms |",
|
||||
f"| **Standard Deviation** | {std_ms:.2f}ms |",
|
||||
f"| **Jitter** | {jitter_ms:.2f}ms |",
|
||||
f"| **Outlier Percentage** | {outlier_pct:.1f}% |",
|
||||
f"| **Total Outliers** | {len(flow.outlier_frames)} |",
|
||||
""
|
||||
])
|
||||
|
||||
# Outlier Frame Details
|
||||
if flow.outlier_frames:
|
||||
lines.extend([
|
||||
"##### 🚨 Outlier Frames",
|
||||
"",
|
||||
f"**Frame Numbers:** {', '.join(map(str, sorted(flow.outlier_frames)))}",
|
||||
""
|
||||
])
|
||||
|
||||
if flow.outlier_details:
|
||||
lines.extend([
|
||||
"| Frame # | Inter-arrival Time | Deviation |",
|
||||
"|---------|-------------------|-----------|"
|
||||
])
|
||||
|
||||
# Show up to 20 outliers in detail
|
||||
for frame_num, inter_time in sorted(flow.outlier_details[:20]):
|
||||
deviation = (inter_time - flow.avg_inter_arrival) / flow.std_inter_arrival if flow.std_inter_arrival > 0 else 0
|
||||
lines.append(
|
||||
f"| {frame_num} | {inter_time * 1000:.3f}ms | {deviation:.1f}σ |"
|
||||
)
|
||||
|
||||
if len(flow.outlier_details) > 20:
|
||||
lines.append(f"| ... | +{len(flow.outlier_details) - 20} more | ... |")
|
||||
|
||||
lines.append("")
|
||||
|
||||
# Timing Quality Assessment
|
||||
if outlier_pct < 1:
|
||||
timing_assessment = "🟢 **Excellent** - Very stable timing"
|
||||
elif outlier_pct < 5:
|
||||
timing_assessment = "🟡 **Good** - Minor timing variations"
|
||||
elif outlier_pct < 10:
|
||||
timing_assessment = "🟠 **Fair** - Noticeable timing issues"
|
||||
else:
|
||||
timing_assessment = "🔴 **Poor** - Significant timing problems"
|
||||
|
||||
lines.extend([
|
||||
f"**Timing Quality:** {timing_assessment}",
|
||||
""
|
||||
])
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_statistics_summary(self, flows: List[FlowStats]) -> List[str]:
|
||||
"""Generate overall statistics summary"""
|
||||
if not flows:
|
||||
return []
|
||||
|
||||
# Calculate aggregate statistics
|
||||
total_packets = sum(flow.frame_count for flow in flows)
|
||||
total_bytes = sum(flow.total_bytes for flow in flows)
|
||||
total_outliers = sum(len(flow.outlier_frames) for flow in flows)
|
||||
|
||||
# Protocol distribution
|
||||
protocol_counts = {}
|
||||
for flow in flows:
|
||||
proto = flow.transport_protocol
|
||||
protocol_counts[proto] = protocol_counts.get(proto, 0) + 1
|
||||
|
||||
# Enhanced protocol distribution
|
||||
enhanced_types = {}
|
||||
for flow in flows:
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced_types[flow.enhanced_analysis.decoder_type] = enhanced_types.get(flow.enhanced_analysis.decoder_type, 0) + 1
|
||||
|
||||
lines = [
|
||||
"---",
|
||||
"",
|
||||
"## 📈 Statistical Summary",
|
||||
"",
|
||||
"### Protocol Distribution",
|
||||
"",
|
||||
"| Protocol | Flows | Percentage |",
|
||||
"|----------|-------|------------|"
|
||||
]
|
||||
|
||||
for protocol, count in sorted(protocol_counts.items(), key=lambda x: x[1], reverse=True):
|
||||
percentage = count / len(flows) * 100
|
||||
lines.append(f"| {protocol} | {count} | {percentage:.1f}% |")
|
||||
|
||||
if enhanced_types:
|
||||
lines.extend([
|
||||
"",
|
||||
"### Enhanced Protocol Analysis",
|
||||
"",
|
||||
"| Enhanced Type | Flows | Percentage |",
|
||||
"|---------------|-------|------------|"
|
||||
])
|
||||
|
||||
for enhanced_type, count in sorted(enhanced_types.items(), key=lambda x: x[1], reverse=True):
|
||||
percentage = count / len(flows) * 100
|
||||
lines.append(f"| {enhanced_type} | {count} | {percentage:.1f}% |")
|
||||
|
||||
lines.extend([
|
||||
"",
|
||||
"### Overall Metrics",
|
||||
"",
|
||||
f"- **Total Analysis Duration:** {max(f.last_seen for f in flows if f.last_seen > 0) - min(f.first_seen for f in flows if f.first_seen > 0):.2f}s",
|
||||
f"- **Average Packets per Flow:** {total_packets / len(flows):.1f}",
|
||||
f"- **Average Bytes per Flow:** {self._format_bytes(total_bytes // len(flows))}",
|
||||
f"- **Overall Outlier Rate:** {total_outliers / total_packets * 100:.2f}%",
|
||||
"",
|
||||
"---",
|
||||
"",
|
||||
"*Report generated by StreamLens Network Analysis Tool*"
|
||||
])
|
||||
|
||||
return lines
|
||||
|
||||
def _get_flow_status_emoji(self, flow: FlowStats) -> str:
|
||||
"""Get emoji for flow status"""
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return "🔬" # Enhanced
|
||||
elif len(flow.outlier_frames) > flow.frame_count * 0.1:
|
||||
return "⚠️" # Alert
|
||||
elif len(flow.outlier_frames) > 0:
|
||||
return "⚡" # Warning
|
||||
else:
|
||||
return "✅" # Normal
|
||||
|
||||
def _get_quality_score(self, flow: FlowStats) -> int:
|
||||
"""Calculate quality score for flow"""
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return int(flow.enhanced_analysis.avg_frame_quality)
|
||||
else:
|
||||
# Base quality on outlier percentage
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
return max(0, int(100 - outlier_pct * 10))
|
||||
|
||||
def _format_bytes(self, bytes_count: int) -> str:
|
||||
"""Format byte count with units"""
|
||||
if bytes_count >= 1_000_000_000:
|
||||
return f"{bytes_count / 1_000_000_000:.2f} GB"
|
||||
elif bytes_count >= 1_000_000:
|
||||
return f"{bytes_count / 1_000_000:.2f} MB"
|
||||
elif bytes_count >= 1_000:
|
||||
return f"{bytes_count / 1_000:.2f} KB"
|
||||
else:
|
||||
return f"{bytes_count} B"
|
||||
|
||||
def _format_timestamp(self, timestamp: float) -> str:
|
||||
"""Format timestamp for display"""
|
||||
if timestamp == 0:
|
||||
return "N/A"
|
||||
dt = datetime.datetime.fromtimestamp(timestamp)
|
||||
return dt.strftime("%H:%M:%S.%f")[:-3]
|
||||
@@ -5,10 +5,11 @@ Modern TUI with real-time metrics, sparklines, and professional monitoring aesth
|
||||
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
||||
from textual.widgets import Header, Footer, Static, DataTable, Label
|
||||
from textual.widgets import Header, Footer, Static, DataTable, Label, TabPane
|
||||
from textual.reactive import reactive
|
||||
from textual.timer import Timer
|
||||
from textual.events import MouseDown, MouseMove
|
||||
from textual.binding import Binding
|
||||
from typing import TYPE_CHECKING
|
||||
from rich.text import Text
|
||||
from rich.console import Group
|
||||
@@ -17,14 +18,30 @@ from rich.table import Table
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
from .widgets.sparkline import SparklineWidget
|
||||
from .widgets.metric_card import MetricCard
|
||||
from .widgets.flow_table_v2 import EnhancedFlowTable
|
||||
from .widgets.filtered_flow_view import FilteredFlowView
|
||||
from ...reporting import FlowReportGenerator
|
||||
from .widgets.split_flow_details import FlowMainDetailsPanel, SubFlowDetailsPanel
|
||||
from .widgets.debug_panel import DebugPanel
|
||||
from .widgets.progress_bar import ParsingProgressBar
|
||||
from ...analysis.background_analyzer import BackgroundAnalyzer
|
||||
|
||||
|
||||
# Debugging imports
|
||||
try:
|
||||
from textual_state_visualizer import TextualStateMonitor, TextualStateWebServer
|
||||
from textual_inspector import inspect_textual_app, print_widget_tree
|
||||
DEBUGGING_AVAILABLE = True
|
||||
except ImportError:
|
||||
DEBUGGING_AVAILABLE = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...analysis.core import EthernetAnalyzer
|
||||
|
||||
@@ -47,14 +64,35 @@ class StreamLensAppV2(App):
|
||||
|
||||
BINDINGS = [
|
||||
("q", "quit", "Quit"),
|
||||
("1", "sort('flows')", "Sort Flows"),
|
||||
("2", "sort('packets')", "Sort Packets"),
|
||||
("3", "sort('volume')", "Sort Volume"),
|
||||
("4", "sort('quality')", "Sort Quality"),
|
||||
("1", "select_filter('1')", "Overview"),
|
||||
("2", "select_filter('2')", "Frame Type 2"),
|
||||
("3", "select_filter('3')", "Frame Type 3"),
|
||||
("4", "select_filter('4')", "Frame Type 4"),
|
||||
("5", "select_filter('5')", "Frame Type 5"),
|
||||
("6", "select_filter('6')", "Frame Type 6"),
|
||||
("7", "select_filter('7')", "Frame Type 7"),
|
||||
("8", "select_filter('8')", "Frame Type 8"),
|
||||
("9", "select_filter('9')", "Frame Type 9"),
|
||||
("0", "select_filter('0')", "Frame Type 10"),
|
||||
("alt+1", "sort_table_column(0)", "Sort by column 1"),
|
||||
("alt+2", "sort_table_column(1)", "Sort by column 2"),
|
||||
("alt+3", "sort_table_column(2)", "Sort by column 3"),
|
||||
("alt+4", "sort_table_column(3)", "Sort by column 4"),
|
||||
("alt+5", "sort_table_column(4)", "Sort by column 5"),
|
||||
("alt+6", "sort_table_column(5)", "Sort by column 6"),
|
||||
("alt+7", "sort_table_column(6)", "Sort by column 7"),
|
||||
("alt+8", "sort_table_column(7)", "Sort by column 8"),
|
||||
("alt+9", "sort_table_column(8)", "Sort by column 9"),
|
||||
("alt+0", "sort_table_column(9)", "Sort by column 10"),
|
||||
("p", "toggle_pause", "Pause"),
|
||||
("d", "show_details", "Details"),
|
||||
("v", "toggle_view_mode", "Toggle View"),
|
||||
("r", "generate_report", "Generate Report"),
|
||||
("o", "copy_outliers", "Copy Outliers"),
|
||||
("?", "toggle_help", "Help"),
|
||||
Binding("ctrl+d,t", "debug_tree", "Debug: Widget Tree", show=False),
|
||||
Binding("ctrl+d,f", "debug_focus", "Debug: Focused Widget", show=False),
|
||||
Binding("ctrl+d,w", "start_web_debug", "Debug: Web Interface", show=False),
|
||||
]
|
||||
|
||||
# Reactive attributes
|
||||
@@ -77,12 +115,12 @@ class StreamLensAppV2(App):
|
||||
self.sub_title = "Network Flow Analysis"
|
||||
self.paused = False
|
||||
|
||||
# Background parsing support
|
||||
# Background parsing support - Use single thread to avoid race conditions in frame reference tracking
|
||||
self.background_analyzer = BackgroundAnalyzer(
|
||||
analyzer=analyzer,
|
||||
num_threads=4,
|
||||
num_threads=1, # Single-threaded to prevent race conditions in outlier frame references
|
||||
batch_size=1000,
|
||||
progress_callback=None,
|
||||
progress_callback=self._on_progress_update,
|
||||
flow_update_callback=self._on_flow_update
|
||||
)
|
||||
self.pcap_file = None
|
||||
@@ -99,6 +137,9 @@ class StreamLensAppV2(App):
|
||||
yield Header()
|
||||
|
||||
with Container(id="main-container"):
|
||||
# Progress bar for PCAP loading (initially hidden)
|
||||
yield ParsingProgressBar(id="progress-bar")
|
||||
|
||||
# Ultra-compact metrics bar
|
||||
with Horizontal(id="metrics-bar"):
|
||||
yield MetricCard("Flows", f"{self.total_flows}", id="flows-metric")
|
||||
@@ -109,10 +150,10 @@ class StreamLensAppV2(App):
|
||||
|
||||
# Main content area with conditional debug panel
|
||||
with Horizontal(id="content-area"):
|
||||
# Left - Enhanced flow table
|
||||
yield EnhancedFlowTable(
|
||||
# Left - Filtered flow view with frame type buttons
|
||||
yield FilteredFlowView(
|
||||
self.analyzer,
|
||||
id="flow-table",
|
||||
id="filtered-flow-view",
|
||||
classes="panel-wide"
|
||||
)
|
||||
|
||||
@@ -153,9 +194,9 @@ class StreamLensAppV2(App):
|
||||
|
||||
self.update_metrics()
|
||||
|
||||
# Set up update intervals like TipTop (reduced frequency since we have real-time updates)
|
||||
self.metric_timer = self.set_interval(2.0, self.update_metrics) # 0.5Hz for background updates
|
||||
self.flow_timer = self.set_interval(5.0, self.update_flows) # 0.2Hz for fallback flow updates
|
||||
# Set up update intervals (slower during parsing to reduce CPU usage)
|
||||
self.metric_timer = self.set_interval(5.0, self.update_metrics) # 0.2Hz for slower background updates
|
||||
self.flow_timer = self.set_interval(10.0, self.update_flows) # 0.1Hz for slower fallback flow updates
|
||||
|
||||
# Initialize sparkline history
|
||||
self._initialize_history()
|
||||
@@ -164,13 +205,12 @@ class StreamLensAppV2(App):
|
||||
self.call_after_refresh(self._set_initial_focus)
|
||||
|
||||
def _set_initial_focus(self):
|
||||
"""Set initial focus to the flow table after widgets are ready"""
|
||||
"""Set initial focus to the filtered flow view after widgets are ready"""
|
||||
try:
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
data_table = flow_table.query_one("#flows-data-table", DataTable)
|
||||
data_table.focus()
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.flow_table.focus()
|
||||
except Exception:
|
||||
# If table isn't ready yet, try again after a short delay
|
||||
# If flow view isn't ready yet, try again after a short delay
|
||||
self.set_timer(0.1, self._set_initial_focus)
|
||||
|
||||
def _initialize_history(self):
|
||||
@@ -210,13 +250,15 @@ class StreamLensAppV2(App):
|
||||
for flow in flows.values():
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced += 1
|
||||
outliers += len(flow.outlier_frames)
|
||||
# Use frame-type-specific outliers instead of flow-level outliers
|
||||
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
except Exception:
|
||||
# Fallback to direct access if background analyzer not available
|
||||
for flow in self.analyzer.flows.values():
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced += 1
|
||||
outliers += len(flow.outlier_frames)
|
||||
# Use frame-type-specific outliers instead of flow-level outliers
|
||||
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
|
||||
self.enhanced_flows = enhanced
|
||||
self.outlier_count = outliers
|
||||
@@ -286,10 +328,45 @@ class StreamLensAppV2(App):
|
||||
if self.paused:
|
||||
return
|
||||
|
||||
# Update flow table
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
flow_table.refresh_data()
|
||||
# Update filtered flow view
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.refresh_frame_types()
|
||||
flow_view.refresh_flow_data()
|
||||
def _on_progress_update(self, progress):
|
||||
"""Handle progress updates from background parser"""
|
||||
try:
|
||||
# Use call_from_thread to safely update UI from background thread
|
||||
self.call_from_thread(self._update_progress_ui, progress)
|
||||
except Exception:
|
||||
# Ignore errors during shutdown
|
||||
pass
|
||||
|
||||
def _update_progress_ui(self, progress):
|
||||
"""Update progress UI (called from main thread)"""
|
||||
try:
|
||||
progress_bar = self.query_one("#progress-bar", ParsingProgressBar)
|
||||
|
||||
if progress.error:
|
||||
progress_bar.show_error(progress.error)
|
||||
elif progress.is_complete:
|
||||
progress_bar.complete_parsing()
|
||||
# Trigger frame type button creation now that parsing is complete
|
||||
self._create_frame_type_buttons()
|
||||
else:
|
||||
# Start progress if this is the first update
|
||||
if not progress_bar.is_visible and progress.total_packets > 0:
|
||||
progress_bar.start_parsing(progress.total_packets)
|
||||
|
||||
# Update progress
|
||||
progress_bar.update_progress(
|
||||
progress.processed_packets,
|
||||
progress.total_packets,
|
||||
progress.packets_per_second,
|
||||
progress.estimated_time_remaining
|
||||
)
|
||||
except Exception as e:
|
||||
# Progress bar widget may not be available yet
|
||||
pass
|
||||
|
||||
def _on_flow_update(self):
|
||||
"""Handle flow data updates from background parser"""
|
||||
@@ -303,14 +380,30 @@ class StreamLensAppV2(App):
|
||||
def _update_flow_ui(self):
|
||||
"""Update flow UI (called from main thread)"""
|
||||
try:
|
||||
# Update flow table
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
flow_table.refresh_data()
|
||||
# Update filtered flow view - frame types first for dynamic button creation
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.refresh_frame_types() # This will create buttons as frame types are detected
|
||||
flow_view.refresh_flow_data()
|
||||
|
||||
# Also trigger button creation if parsing is complete but buttons haven't been created yet
|
||||
if not self.analyzer.is_parsing and not getattr(flow_view, '_buttons_created', False):
|
||||
self._create_frame_type_buttons()
|
||||
|
||||
# Also update metrics in real-time
|
||||
self.update_metrics()
|
||||
except Exception:
|
||||
# Flow table widget may not be available yet
|
||||
# Flow view widget may not be available yet
|
||||
pass
|
||||
|
||||
def _create_frame_type_buttons(self):
|
||||
"""Create frame type buttons now that parsing is complete"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
# Force refresh of frame types now that parsing is complete
|
||||
flow_view.refresh_frame_types()
|
||||
flow_view.refresh_flow_data()
|
||||
except Exception as e:
|
||||
# Flow view widget may not be available yet
|
||||
pass
|
||||
|
||||
def start_background_parsing(self, pcap_file: str):
|
||||
@@ -372,18 +465,24 @@ class StreamLensAppV2(App):
|
||||
self.paused = not self.paused
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
|
||||
# Get current view mode to maintain it in subtitle
|
||||
try:
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
view_mode = flow_table.get_current_view_mode()
|
||||
self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW"
|
||||
except:
|
||||
self.sub_title = f"Network Flow Analysis - {status}"
|
||||
# Update subtitle
|
||||
self.sub_title = f"Network Flow Analysis - {status}"
|
||||
|
||||
def action_sort(self, key: str) -> None:
|
||||
"""Sort flow table by specified key"""
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
flow_table.sort_by(key)
|
||||
def action_select_filter(self, number: str) -> None:
|
||||
"""Select frame type filter by number key"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.action_select_filter(number)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def action_sort_table_column(self, column_index: int) -> None:
|
||||
"""Sort table by column index"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.action_sort_column(column_index)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def action_show_details(self) -> None:
|
||||
"""Show detailed view for selected flow"""
|
||||
@@ -391,14 +490,11 @@ class StreamLensAppV2(App):
|
||||
pass
|
||||
|
||||
def action_toggle_view_mode(self) -> None:
|
||||
"""Toggle between simplified and detailed view modes"""
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
flow_table.toggle_view_mode()
|
||||
|
||||
# Update subtitle to show current view mode
|
||||
view_mode = flow_table.get_current_view_mode()
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW"
|
||||
"""Toggle between different display modes"""
|
||||
# For now, this could cycle through different column layouts
|
||||
# or show more/less detail in the frame type views
|
||||
pass
|
||||
|
||||
|
||||
def on_mouse_down(self, event: MouseDown) -> None:
|
||||
"""Prevent default mouse down behavior to disable mouse interaction."""
|
||||
@@ -408,6 +504,126 @@ class StreamLensAppV2(App):
|
||||
"""Prevent default mouse move behavior to disable mouse interaction."""
|
||||
event.prevent_default()
|
||||
|
||||
def action_generate_report(self) -> None:
|
||||
"""Generate comprehensive flow analysis report"""
|
||||
try:
|
||||
# Generate timestamp-based filename
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
output_file = f"streamlens_flow_report_{timestamp}.md"
|
||||
|
||||
# Create report generator
|
||||
report_generator = FlowReportGenerator(self.analyzer)
|
||||
|
||||
# Generate report (markdown format)
|
||||
report_content = report_generator.generate_report(output_file, "markdown")
|
||||
|
||||
# Show success notification in the footer
|
||||
self.sub_title = f"✅ Report generated: {output_file}"
|
||||
|
||||
# Set a timer to restore the original subtitle
|
||||
self.set_timer(3.0, self._restore_subtitle)
|
||||
|
||||
except Exception as e:
|
||||
# Show error notification
|
||||
self.sub_title = f"❌ Report generation failed: {str(e)}"
|
||||
self.set_timer(3.0, self._restore_subtitle)
|
||||
|
||||
def _restore_subtitle(self) -> None:
|
||||
"""Restore the original subtitle"""
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
self.sub_title = f"Network Flow Analysis - {status}"
|
||||
|
||||
def action_copy_outliers(self) -> None:
|
||||
"""Copy outlier frame information to clipboard"""
|
||||
try:
|
||||
# Get selected flow from the filtered view
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
# For now, get the first flow (could be improved to use actual selection)
|
||||
flows = list(self.analyzer.flows.values())
|
||||
selected_flow = flows[0] if flows else None
|
||||
|
||||
if not selected_flow:
|
||||
self.sub_title = "⚠️ No flow selected"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
return
|
||||
|
||||
# Build frame-type-specific outlier information
|
||||
outlier_info = []
|
||||
outlier_info.append(f"Flow: {selected_flow.src_ip}:{selected_flow.src_port} → {selected_flow.dst_ip}:{selected_flow.dst_port}")
|
||||
outlier_info.append(f"Protocol: {selected_flow.transport_protocol}")
|
||||
outlier_info.append(f"Total Packets: {selected_flow.frame_count}")
|
||||
|
||||
# Calculate total frame-type-specific outliers
|
||||
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
||||
outlier_info.append(f"Total Frame-Type Outliers: {total_frame_type_outliers}")
|
||||
|
||||
if total_frame_type_outliers > 0:
|
||||
outlier_info.append(f"\n=== Frame Type Outlier Analysis ===")
|
||||
|
||||
# Show outliers per frame type
|
||||
for frame_type, ft_stats in sorted(selected_flow.frame_types.items(), key=lambda x: len(x[1].outlier_frames), reverse=True):
|
||||
if ft_stats.outlier_frames:
|
||||
outlier_info.append(f"\n{frame_type}: {len(ft_stats.outlier_frames)} outliers")
|
||||
outlier_info.append(f" Frames: {', '.join(map(str, sorted(ft_stats.outlier_frames)))}")
|
||||
outlier_info.append(f" Avg ΔT: {ft_stats.avg_inter_arrival * 1000:.3f} ms")
|
||||
outlier_info.append(f" Std σ: {ft_stats.std_inter_arrival * 1000:.3f} ms")
|
||||
outlier_info.append(f" 3σ Threshold: {(ft_stats.avg_inter_arrival + 3 * ft_stats.std_inter_arrival) * 1000:.3f} ms")
|
||||
|
||||
# Show enhanced outlier information for this frame type
|
||||
if hasattr(ft_stats, 'enhanced_outlier_details') and ft_stats.enhanced_outlier_details:
|
||||
outlier_info.append(f" Enhanced Outlier Details:")
|
||||
for frame_num, prev_frame_num, inter_time in sorted(ft_stats.enhanced_outlier_details[:5]):
|
||||
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
outlier_info.append(f" Frame {frame_num} (from {prev_frame_num}): {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
||||
if len(ft_stats.enhanced_outlier_details) > 5:
|
||||
outlier_info.append(f" ... and {len(ft_stats.enhanced_outlier_details) - 5} more")
|
||||
elif ft_stats.outlier_details:
|
||||
outlier_info.append(f" Outlier Details:")
|
||||
for frame_num, inter_time in sorted(ft_stats.outlier_details[:5]):
|
||||
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
outlier_info.append(f" Frame {frame_num}: {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
||||
if len(ft_stats.outlier_details) > 5:
|
||||
outlier_info.append(f" ... and {len(ft_stats.outlier_details) - 5} more")
|
||||
else:
|
||||
outlier_info.append("\nNo frame-type-specific timing outliers detected.")
|
||||
|
||||
# Copy to clipboard
|
||||
clipboard_text = "\n".join(outlier_info)
|
||||
self._copy_to_clipboard(clipboard_text)
|
||||
|
||||
# Show success notification
|
||||
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
||||
self.sub_title = f"✅ Copied {total_frame_type_outliers} frame-type outliers to clipboard"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
|
||||
except Exception as e:
|
||||
self.sub_title = f"❌ Failed to copy: {str(e)}"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
|
||||
def _copy_to_clipboard(self, text: str) -> None:
|
||||
"""Copy text to system clipboard"""
|
||||
system = platform.system()
|
||||
|
||||
if system == "Darwin": # macOS
|
||||
process = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
elif system == "Linux":
|
||||
# Try xclip first, then xsel
|
||||
try:
|
||||
process = subprocess.Popen(['xclip', '-selection', 'clipboard'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
try:
|
||||
process = subprocess.Popen(['xsel', '--clipboard', '--input'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
raise Exception("Neither xclip nor xsel found. Please install one.")
|
||||
elif system == "Windows":
|
||||
process = subprocess.Popen(['clip'], stdin=subprocess.PIPE, shell=True)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
def action_quit(self) -> None:
|
||||
"""Quit the application with proper cleanup"""
|
||||
self.cleanup()
|
||||
@@ -415,4 +631,68 @@ class StreamLensAppV2(App):
|
||||
|
||||
def on_unmount(self) -> None:
|
||||
"""Called when app is being unmounted - ensure cleanup"""
|
||||
self.cleanup()
|
||||
self.cleanup()
|
||||
|
||||
|
||||
# Debugging methods
|
||||
def start_debugging(self, web_interface: bool = True, port: int = 8080):
|
||||
"""Start debugging tools"""
|
||||
if not DEBUGGING_AVAILABLE:
|
||||
print("❌ Debugging tools not available. Run: pip install watchdog")
|
||||
return
|
||||
|
||||
self._debug_monitor = TextualStateMonitor(self)
|
||||
self._debug_monitor.start_monitoring()
|
||||
|
||||
if web_interface:
|
||||
self._debug_server = TextualStateWebServer(self._debug_monitor, port)
|
||||
self._debug_server.start()
|
||||
|
||||
print(f"🔍 Debug monitoring started!")
|
||||
if web_interface:
|
||||
print(f"🌐 Web interface: http://localhost:{port}")
|
||||
|
||||
def stop_debugging(self):
|
||||
"""Stop debugging tools"""
|
||||
if hasattr(self, '_debug_monitor') and self._debug_monitor:
|
||||
self._debug_monitor.stop_monitoring()
|
||||
if hasattr(self, '_debug_server') and self._debug_server:
|
||||
self._debug_server.stop()
|
||||
|
||||
def debug_widget_tree(self):
|
||||
"""Print current widget tree to console"""
|
||||
if not DEBUGGING_AVAILABLE:
|
||||
print("❌ Debugging tools not available")
|
||||
return
|
||||
|
||||
data = inspect_textual_app(self)
|
||||
print("🔍 TEXTUAL APP INSPECTION")
|
||||
print("=" * 50)
|
||||
print_widget_tree(data.get('current_screen', {}))
|
||||
|
||||
def debug_focused_widget(self):
|
||||
"""Print info about currently focused widget"""
|
||||
focused = self.focused
|
||||
if focused:
|
||||
print(f"🎯 Focused widget: {focused.__class__.__name__}")
|
||||
if hasattr(focused, 'id'):
|
||||
print(f" ID: {focused.id}")
|
||||
if hasattr(focused, 'classes'):
|
||||
print(f" Classes: {list(focused.classes)}")
|
||||
if hasattr(focused, 'label'):
|
||||
print(f" Label: {focused.label}")
|
||||
else:
|
||||
print("🎯 No widget has focus")
|
||||
|
||||
# Debugging key bindings
|
||||
def action_debug_tree(self):
|
||||
"""Debug action: Print widget tree"""
|
||||
self.debug_widget_tree()
|
||||
|
||||
def action_debug_focus(self):
|
||||
"""Debug action: Print focused widget"""
|
||||
self.debug_focused_widget()
|
||||
|
||||
def action_start_web_debug(self):
|
||||
"""Debug action: Start web debugging interface"""
|
||||
self.start_debugging()
|
||||
|
||||
621
analyzer/tui/textual/app_v2.py.backup
Normal file
621
analyzer/tui/textual/app_v2.py.backup
Normal file
@@ -0,0 +1,621 @@
|
||||
"""
|
||||
StreamLens Textual Application V2 - TipTop-Inspired Design
|
||||
Modern TUI with real-time metrics, sparklines, and professional monitoring aesthetic
|
||||
"""
|
||||
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
||||
from textual.widgets import Header, Footer, Static, DataTable, Label, TabPane
|
||||
from textual.reactive import reactive
|
||||
from textual.timer import Timer
|
||||
from textual.events import MouseDown, MouseMove
|
||||
from typing import TYPE_CHECKING
|
||||
from rich.text import Text
|
||||
from rich.console import Group
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
from .widgets.sparkline import SparklineWidget
|
||||
from .widgets.metric_card import MetricCard
|
||||
from .widgets.flow_table_v2 import EnhancedFlowTable
|
||||
from .widgets.filtered_flow_view import FilteredFlowView
|
||||
from ...reporting import FlowReportGenerator
|
||||
from .widgets.split_flow_details import FlowMainDetailsPanel, SubFlowDetailsPanel
|
||||
from .widgets.debug_panel import DebugPanel
|
||||
from .widgets.progress_bar import ParsingProgressBar
|
||||
from ...analysis.background_analyzer import BackgroundAnalyzer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...analysis.core import EthernetAnalyzer
|
||||
|
||||
|
||||
class StreamLensAppV2(App):
|
||||
"""
|
||||
StreamLens TipTop-Inspired Interface
|
||||
|
||||
Features:
|
||||
- Real-time metrics with sparklines
|
||||
- Color-coded quality indicators
|
||||
- Compact information display
|
||||
- Multi-column layout
|
||||
- Smooth live updates
|
||||
"""
|
||||
|
||||
CSS_PATH = "styles/streamlens_v2.tcss"
|
||||
ENABLE_COMMAND_PALETTE = False
|
||||
AUTO_FOCUS = None
|
||||
|
||||
BINDINGS = [
|
||||
("q", "quit", "Quit"),
|
||||
("1", "select_filter('1')", "Overview"),
|
||||
("2", "select_filter('2')", "Frame Type 2"),
|
||||
("3", "select_filter('3')", "Frame Type 3"),
|
||||
("4", "select_filter('4')", "Frame Type 4"),
|
||||
("5", "select_filter('5')", "Frame Type 5"),
|
||||
("6", "select_filter('6')", "Frame Type 6"),
|
||||
("7", "select_filter('7')", "Frame Type 7"),
|
||||
("8", "select_filter('8')", "Frame Type 8"),
|
||||
("9", "select_filter('9')", "Frame Type 9"),
|
||||
("0", "select_filter('0')", "Frame Type 10"),
|
||||
("alt+1", "sort_table_column(0)", "Sort by column 1"),
|
||||
("alt+2", "sort_table_column(1)", "Sort by column 2"),
|
||||
("alt+3", "sort_table_column(2)", "Sort by column 3"),
|
||||
("alt+4", "sort_table_column(3)", "Sort by column 4"),
|
||||
("alt+5", "sort_table_column(4)", "Sort by column 5"),
|
||||
("alt+6", "sort_table_column(5)", "Sort by column 6"),
|
||||
("alt+7", "sort_table_column(6)", "Sort by column 7"),
|
||||
("alt+8", "sort_table_column(7)", "Sort by column 8"),
|
||||
("alt+9", "sort_table_column(8)", "Sort by column 9"),
|
||||
("alt+0", "sort_table_column(9)", "Sort by column 10"),
|
||||
("p", "toggle_pause", "Pause"),
|
||||
("d", "show_details", "Details"),
|
||||
("v", "toggle_view_mode", "Toggle View"),
|
||||
("r", "generate_report", "Generate Report"),
|
||||
("o", "copy_outliers", "Copy Outliers"),
|
||||
("?", "toggle_help", "Help"),
|
||||
]
|
||||
|
||||
# Reactive attributes
|
||||
total_flows = reactive(0)
|
||||
total_packets = reactive(0)
|
||||
packets_per_sec = reactive(0.0)
|
||||
bytes_per_sec = reactive(0.0)
|
||||
enhanced_flows = reactive(0)
|
||||
outlier_count = reactive(0)
|
||||
debug_visible = reactive(False) # Hide debug panel for now
|
||||
|
||||
# Update timers
|
||||
metric_timer: Timer = None
|
||||
flow_timer: Timer = None
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer'):
|
||||
super().__init__()
|
||||
self.analyzer = analyzer
|
||||
self.title = "StreamLens"
|
||||
self.sub_title = "Network Flow Analysis"
|
||||
self.paused = False
|
||||
|
||||
# Background parsing support - Use single thread to avoid race conditions in frame reference tracking
|
||||
self.background_analyzer = BackgroundAnalyzer(
|
||||
analyzer=analyzer,
|
||||
num_threads=1, # Single-threaded to prevent race conditions in outlier frame references
|
||||
batch_size=1000,
|
||||
progress_callback=self._on_progress_update,
|
||||
flow_update_callback=self._on_flow_update
|
||||
)
|
||||
self.pcap_file = None
|
||||
|
||||
|
||||
# Metrics history for sparklines
|
||||
self.packets_history = []
|
||||
self.bytes_history = []
|
||||
self.flows_history = []
|
||||
self.max_history = 60 # 60 seconds of history
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
"""Create TipTop-inspired layout"""
|
||||
yield Header()
|
||||
|
||||
with Container(id="main-container"):
|
||||
# Progress bar for PCAP loading (initially hidden)
|
||||
yield ParsingProgressBar(id="progress-bar")
|
||||
|
||||
# Ultra-compact metrics bar
|
||||
with Horizontal(id="metrics-bar"):
|
||||
yield MetricCard("Flows", f"{self.total_flows}", id="flows-metric")
|
||||
yield MetricCard("Pkts/s", f"{self.packets_per_sec:.0f}", id="packets-metric")
|
||||
yield MetricCard("Vol/s", self._format_bytes_per_sec(self.bytes_per_sec), id="volume-metric")
|
||||
yield MetricCard("Enhanced", f"{self.enhanced_flows}", color="success", id="enhanced-metric")
|
||||
yield MetricCard("Outliers", f"{self.outlier_count}", color="warning" if self.outlier_count > 0 else "normal", id="outliers-metric")
|
||||
|
||||
# Main content area with conditional debug panel
|
||||
with Horizontal(id="content-area"):
|
||||
# Left - Filtered flow view with frame type buttons
|
||||
yield FilteredFlowView(
|
||||
self.analyzer,
|
||||
id="filtered-flow-view",
|
||||
classes="panel-wide"
|
||||
)
|
||||
|
||||
# Middle - Flow details
|
||||
with Vertical(id="flow-panels"):
|
||||
yield FlowMainDetailsPanel(id="main-flow-details")
|
||||
yield SubFlowDetailsPanel(id="sub-flow-details")
|
||||
|
||||
# Right - Debug panel (conditionally visible)
|
||||
if self.debug_visible:
|
||||
yield DebugPanel(id="debug-panel")
|
||||
|
||||
yield Footer()
|
||||
|
||||
def on_mount(self) -> None:
|
||||
"""Initialize the application with TipTop-style updates"""
|
||||
try:
|
||||
debug_panel = self.query_one("#debug-panel", DebugPanel)
|
||||
debug_panel.add_debug_message("APP: Application mounted, checking panels...")
|
||||
|
||||
try:
|
||||
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
|
||||
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
|
||||
debug_panel.add_debug_message("APP: Both panels found successfully")
|
||||
except Exception as e:
|
||||
debug_panel.add_debug_message(f"APP: Panel query failed: {e}")
|
||||
except:
|
||||
pass # Debug panel not visible
|
||||
|
||||
# Set initial subtitle with view mode
|
||||
try:
|
||||
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
||||
view_mode = flow_table.get_current_view_mode()
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW"
|
||||
except:
|
||||
pass
|
||||
|
||||
self.update_metrics()
|
||||
|
||||
# Set up update intervals (slower during parsing to reduce CPU usage)
|
||||
self.metric_timer = self.set_interval(5.0, self.update_metrics) # 0.2Hz for slower background updates
|
||||
self.flow_timer = self.set_interval(10.0, self.update_flows) # 0.1Hz for slower fallback flow updates
|
||||
|
||||
# Initialize sparkline history
|
||||
self._initialize_history()
|
||||
|
||||
# Set initial focus to the flow table for immediate keyboard navigation
|
||||
self.call_after_refresh(self._set_initial_focus)
|
||||
|
||||
def _set_initial_focus(self):
|
||||
"""Set initial focus to the filtered flow view after widgets are ready"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.flow_table.focus()
|
||||
except Exception:
|
||||
# If flow view isn't ready yet, try again after a short delay
|
||||
self.set_timer(0.1, self._set_initial_focus)
|
||||
|
||||
def _initialize_history(self):
|
||||
"""Initialize metrics history arrays"""
|
||||
current_time = time.time()
|
||||
for _ in range(self.max_history):
|
||||
self.packets_history.append(0)
|
||||
self.bytes_history.append(0)
|
||||
self.flows_history.append(0)
|
||||
|
||||
def update_metrics(self) -> None:
|
||||
"""Update real-time metrics and sparklines"""
|
||||
if self.paused:
|
||||
return
|
||||
|
||||
# Get current metrics
|
||||
summary = self.analyzer.get_summary()
|
||||
self.total_flows = summary.get('unique_flows', 0)
|
||||
self.total_packets = summary.get('total_packets', 0)
|
||||
|
||||
# Calculate rates (simplified for now)
|
||||
# In real implementation, track deltas over time
|
||||
current_time = time.time()
|
||||
if not hasattr(self, '_start_time'):
|
||||
self._start_time = current_time
|
||||
|
||||
elapsed = max(1, current_time - self._start_time)
|
||||
self.packets_per_sec = self.total_packets / elapsed
|
||||
self.bytes_per_sec = summary.get('total_bytes', 0) / elapsed
|
||||
|
||||
# Count enhanced and outliers (thread-safe access)
|
||||
enhanced = 0
|
||||
outliers = 0
|
||||
try:
|
||||
# Use background analyzer's thread-safe flow access
|
||||
flows = self.background_analyzer.get_current_flows()
|
||||
for flow in flows.values():
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced += 1
|
||||
# Use frame-type-specific outliers instead of flow-level outliers
|
||||
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
except Exception:
|
||||
# Fallback to direct access if background analyzer not available
|
||||
for flow in self.analyzer.flows.values():
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced += 1
|
||||
# Use frame-type-specific outliers instead of flow-level outliers
|
||||
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
|
||||
self.enhanced_flows = enhanced
|
||||
self.outlier_count = outliers
|
||||
|
||||
# Update metric cards
|
||||
self._update_metric_cards()
|
||||
|
||||
# Update sparklines (removed - no longer in left panel)
|
||||
# self._update_sparklines()
|
||||
|
||||
def _update_metric_cards(self):
|
||||
"""Update the metric card displays"""
|
||||
# Update flows metric
|
||||
flows_card = self.query_one("#flows-metric", MetricCard)
|
||||
flows_card.update_value(f"{self.total_flows}")
|
||||
|
||||
# Update packets/s with color coding
|
||||
packets_card = self.query_one("#packets-metric", MetricCard)
|
||||
packets_card.update_value(f"{self.packets_per_sec:.1f}")
|
||||
if self.packets_per_sec > 10000:
|
||||
packets_card.color = "warning"
|
||||
elif self.packets_per_sec > 50000:
|
||||
packets_card.color = "error"
|
||||
else:
|
||||
packets_card.color = "success"
|
||||
|
||||
# Update volume/s
|
||||
volume_card = self.query_one("#volume-metric", MetricCard)
|
||||
volume_card.update_value(self._format_bytes_per_sec(self.bytes_per_sec))
|
||||
|
||||
# Update enhanced flows
|
||||
enhanced_card = self.query_one("#enhanced-metric", MetricCard)
|
||||
enhanced_card.update_value(f"{self.enhanced_flows}")
|
||||
|
||||
# Update outliers with color
|
||||
outliers_card = self.query_one("#outliers-metric", MetricCard)
|
||||
outliers_card.update_value(f"{self.outlier_count}")
|
||||
if self.outlier_count > 100:
|
||||
outliers_card.color = "error"
|
||||
elif self.outlier_count > 10:
|
||||
outliers_card.color = "warning"
|
||||
else:
|
||||
outliers_card.color = "normal"
|
||||
|
||||
def _update_sparklines(self):
|
||||
"""Update sparkline charts with latest data"""
|
||||
# Add new data points
|
||||
self.packets_history.append(self.packets_per_sec)
|
||||
self.bytes_history.append(self.bytes_per_sec)
|
||||
self.flows_history.append(self.total_flows)
|
||||
|
||||
# Keep only recent history
|
||||
if len(self.packets_history) > self.max_history:
|
||||
self.packets_history.pop(0)
|
||||
self.bytes_history.pop(0)
|
||||
self.flows_history.pop(0)
|
||||
|
||||
# Update sparkline widgets
|
||||
flow_spark = self.query_one("#flow-rate-spark", SparklineWidget)
|
||||
flow_spark.update_data(self.flows_history)
|
||||
|
||||
packet_spark = self.query_one("#packet-rate-spark", SparklineWidget)
|
||||
packet_spark.update_data(self.packets_history)
|
||||
|
||||
def update_flows(self) -> None:
|
||||
"""Update flow table data"""
|
||||
if self.paused:
|
||||
return
|
||||
|
||||
# Update filtered flow view
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.refresh_frame_types()
|
||||
flow_view.refresh_flow_data()
|
||||
def _on_progress_update(self, progress):
|
||||
"""Handle progress updates from background parser"""
|
||||
try:
|
||||
# Use call_from_thread to safely update UI from background thread
|
||||
self.call_from_thread(self._update_progress_ui, progress)
|
||||
except Exception:
|
||||
# Ignore errors during shutdown
|
||||
pass
|
||||
|
||||
def _update_progress_ui(self, progress):
|
||||
"""Update progress UI (called from main thread)"""
|
||||
try:
|
||||
progress_bar = self.query_one("#progress-bar", ParsingProgressBar)
|
||||
|
||||
if progress.error:
|
||||
progress_bar.show_error(progress.error)
|
||||
elif progress.is_complete:
|
||||
progress_bar.complete_parsing()
|
||||
# Trigger frame type button creation now that parsing is complete
|
||||
self._create_frame_type_buttons()
|
||||
else:
|
||||
# Start progress if this is the first update
|
||||
if not progress_bar.is_visible and progress.total_packets > 0:
|
||||
progress_bar.start_parsing(progress.total_packets)
|
||||
|
||||
# Update progress
|
||||
progress_bar.update_progress(
|
||||
progress.processed_packets,
|
||||
progress.total_packets,
|
||||
progress.packets_per_second,
|
||||
progress.estimated_time_remaining
|
||||
)
|
||||
except Exception as e:
|
||||
# Progress bar widget may not be available yet
|
||||
pass
|
||||
|
||||
def _on_flow_update(self):
|
||||
"""Handle flow data updates from background parser"""
|
||||
try:
|
||||
# Use call_from_thread to safely update UI from background thread
|
||||
self.call_from_thread(self._update_flow_ui)
|
||||
except Exception:
|
||||
# Ignore errors during shutdown
|
||||
pass
|
||||
|
||||
def _update_flow_ui(self):
|
||||
"""Update flow UI (called from main thread)"""
|
||||
try:
|
||||
# Update filtered flow view - frame types first for dynamic button creation
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.refresh_frame_types() # This will create buttons as frame types are detected
|
||||
flow_view.refresh_flow_data()
|
||||
|
||||
# Also trigger button creation if parsing is complete but buttons haven't been created yet
|
||||
if not self.analyzer.is_parsing and not getattr(flow_view, '_buttons_created', False):
|
||||
self._create_frame_type_buttons()
|
||||
|
||||
# Also update metrics in real-time
|
||||
self.update_metrics()
|
||||
except Exception:
|
||||
# Flow view widget may not be available yet
|
||||
pass
|
||||
|
||||
def _create_frame_type_buttons(self):
|
||||
"""Create frame type buttons now that parsing is complete"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
# Force refresh of frame types now that parsing is complete
|
||||
flow_view.refresh_frame_types()
|
||||
flow_view.refresh_flow_data()
|
||||
except Exception as e:
|
||||
# Flow view widget may not be available yet
|
||||
pass
|
||||
|
||||
def start_background_parsing(self, pcap_file: str):
|
||||
"""Start parsing PCAP file in background"""
|
||||
self.pcap_file = pcap_file
|
||||
|
||||
# Start background parsing
|
||||
self.background_analyzer.start_parsing(pcap_file)
|
||||
|
||||
def stop_background_parsing(self):
|
||||
"""Stop background parsing"""
|
||||
self.background_analyzer.stop_parsing()
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup resources when app shuts down"""
|
||||
try:
|
||||
self.background_analyzer.cleanup()
|
||||
# Cancel any pending timers
|
||||
if self.metric_timer:
|
||||
self.metric_timer.stop()
|
||||
if self.flow_timer:
|
||||
self.flow_timer.stop()
|
||||
except Exception as e:
|
||||
# Don't let cleanup errors prevent shutdown
|
||||
pass
|
||||
|
||||
def on_enhanced_flow_table_flow_selected(self, event: EnhancedFlowTable.FlowSelected) -> None:
|
||||
"""Handle flow selection events"""
|
||||
try:
|
||||
debug_panel = self.query_one("#debug-panel", DebugPanel)
|
||||
flow_info = f"{event.flow.src_ip}:{event.flow.src_port}" if event.flow else "None"
|
||||
debug_panel.add_debug_message(f"APP: Flow selected - {flow_info}, subflow={event.subflow_type}")
|
||||
except:
|
||||
pass # Debug panel not visible
|
||||
|
||||
if event.flow:
|
||||
# Update main flow details panel
|
||||
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
|
||||
main_panel.update_flow(event.flow)
|
||||
|
||||
# Update sub-flow details panel
|
||||
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
|
||||
sub_panel.update_flow(event.flow, event.subflow_type)
|
||||
|
||||
|
||||
def _format_bytes_per_sec(self, bps: float) -> str:
|
||||
"""Format bytes per second with appropriate units"""
|
||||
if bps >= 1_000_000_000:
|
||||
return f"{bps / 1_000_000_000:.1f} GB/s"
|
||||
elif bps >= 1_000_000:
|
||||
return f"{bps / 1_000_000:.1f} MB/s"
|
||||
elif bps >= 1_000:
|
||||
return f"{bps / 1_000:.1f} KB/s"
|
||||
else:
|
||||
return f"{bps:.0f} B/s"
|
||||
|
||||
def action_toggle_pause(self) -> None:
|
||||
"""Toggle pause state"""
|
||||
self.paused = not self.paused
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
|
||||
# Update subtitle
|
||||
self.sub_title = f"Network Flow Analysis - {status}"
|
||||
|
||||
def action_select_filter(self, number: str) -> None:
|
||||
"""Select frame type filter by number key"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.action_select_filter(number)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def action_sort_table_column(self, column_index: int) -> None:
|
||||
"""Sort table by column index"""
|
||||
try:
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
flow_view.action_sort_column(column_index)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def action_show_details(self) -> None:
|
||||
"""Show detailed view for selected flow"""
|
||||
# TODO: Implement detailed flow modal
|
||||
pass
|
||||
|
||||
def action_toggle_view_mode(self) -> None:
|
||||
"""Toggle between different display modes"""
|
||||
# For now, this could cycle through different column layouts
|
||||
# or show more/less detail in the frame type views
|
||||
pass
|
||||
|
||||
|
||||
def on_mouse_down(self, event: MouseDown) -> None:
|
||||
"""Prevent default mouse down behavior to disable mouse interaction."""
|
||||
event.prevent_default()
|
||||
|
||||
def on_mouse_move(self, event: MouseMove) -> None:
|
||||
"""Prevent default mouse move behavior to disable mouse interaction."""
|
||||
event.prevent_default()
|
||||
|
||||
def action_generate_report(self) -> None:
|
||||
"""Generate comprehensive flow analysis report"""
|
||||
try:
|
||||
# Generate timestamp-based filename
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
output_file = f"streamlens_flow_report_{timestamp}.md"
|
||||
|
||||
# Create report generator
|
||||
report_generator = FlowReportGenerator(self.analyzer)
|
||||
|
||||
# Generate report (markdown format)
|
||||
report_content = report_generator.generate_report(output_file, "markdown")
|
||||
|
||||
# Show success notification in the footer
|
||||
self.sub_title = f"✅ Report generated: {output_file}"
|
||||
|
||||
# Set a timer to restore the original subtitle
|
||||
self.set_timer(3.0, self._restore_subtitle)
|
||||
|
||||
except Exception as e:
|
||||
# Show error notification
|
||||
self.sub_title = f"❌ Report generation failed: {str(e)}"
|
||||
self.set_timer(3.0, self._restore_subtitle)
|
||||
|
||||
def _restore_subtitle(self) -> None:
|
||||
"""Restore the original subtitle"""
|
||||
status = "PAUSED" if self.paused else "LIVE"
|
||||
self.sub_title = f"Network Flow Analysis - {status}"
|
||||
|
||||
def action_copy_outliers(self) -> None:
|
||||
"""Copy outlier frame information to clipboard"""
|
||||
try:
|
||||
# Get selected flow from the filtered view
|
||||
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
||||
# For now, get the first flow (could be improved to use actual selection)
|
||||
flows = list(self.analyzer.flows.values())
|
||||
selected_flow = flows[0] if flows else None
|
||||
|
||||
if not selected_flow:
|
||||
self.sub_title = "⚠️ No flow selected"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
return
|
||||
|
||||
# Build frame-type-specific outlier information
|
||||
outlier_info = []
|
||||
outlier_info.append(f"Flow: {selected_flow.src_ip}:{selected_flow.src_port} → {selected_flow.dst_ip}:{selected_flow.dst_port}")
|
||||
outlier_info.append(f"Protocol: {selected_flow.transport_protocol}")
|
||||
outlier_info.append(f"Total Packets: {selected_flow.frame_count}")
|
||||
|
||||
# Calculate total frame-type-specific outliers
|
||||
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
||||
outlier_info.append(f"Total Frame-Type Outliers: {total_frame_type_outliers}")
|
||||
|
||||
if total_frame_type_outliers > 0:
|
||||
outlier_info.append(f"\n=== Frame Type Outlier Analysis ===")
|
||||
|
||||
# Show outliers per frame type
|
||||
for frame_type, ft_stats in sorted(selected_flow.frame_types.items(), key=lambda x: len(x[1].outlier_frames), reverse=True):
|
||||
if ft_stats.outlier_frames:
|
||||
outlier_info.append(f"\n{frame_type}: {len(ft_stats.outlier_frames)} outliers")
|
||||
outlier_info.append(f" Frames: {', '.join(map(str, sorted(ft_stats.outlier_frames)))}")
|
||||
outlier_info.append(f" Avg ΔT: {ft_stats.avg_inter_arrival * 1000:.3f} ms")
|
||||
outlier_info.append(f" Std σ: {ft_stats.std_inter_arrival * 1000:.3f} ms")
|
||||
outlier_info.append(f" 3σ Threshold: {(ft_stats.avg_inter_arrival + 3 * ft_stats.std_inter_arrival) * 1000:.3f} ms")
|
||||
|
||||
# Show enhanced outlier information for this frame type
|
||||
if hasattr(ft_stats, 'enhanced_outlier_details') and ft_stats.enhanced_outlier_details:
|
||||
outlier_info.append(f" Enhanced Outlier Details:")
|
||||
for frame_num, prev_frame_num, inter_time in sorted(ft_stats.enhanced_outlier_details[:5]):
|
||||
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
outlier_info.append(f" Frame {frame_num} (from {prev_frame_num}): {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
||||
if len(ft_stats.enhanced_outlier_details) > 5:
|
||||
outlier_info.append(f" ... and {len(ft_stats.enhanced_outlier_details) - 5} more")
|
||||
elif ft_stats.outlier_details:
|
||||
outlier_info.append(f" Outlier Details:")
|
||||
for frame_num, inter_time in sorted(ft_stats.outlier_details[:5]):
|
||||
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
||||
outlier_info.append(f" Frame {frame_num}: {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
||||
if len(ft_stats.outlier_details) > 5:
|
||||
outlier_info.append(f" ... and {len(ft_stats.outlier_details) - 5} more")
|
||||
else:
|
||||
outlier_info.append("\nNo frame-type-specific timing outliers detected.")
|
||||
|
||||
# Copy to clipboard
|
||||
clipboard_text = "\n".join(outlier_info)
|
||||
self._copy_to_clipboard(clipboard_text)
|
||||
|
||||
# Show success notification
|
||||
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
||||
self.sub_title = f"✅ Copied {total_frame_type_outliers} frame-type outliers to clipboard"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
|
||||
except Exception as e:
|
||||
self.sub_title = f"❌ Failed to copy: {str(e)}"
|
||||
self.set_timer(2.0, self._restore_subtitle)
|
||||
|
||||
def _copy_to_clipboard(self, text: str) -> None:
|
||||
"""Copy text to system clipboard"""
|
||||
system = platform.system()
|
||||
|
||||
if system == "Darwin": # macOS
|
||||
process = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
elif system == "Linux":
|
||||
# Try xclip first, then xsel
|
||||
try:
|
||||
process = subprocess.Popen(['xclip', '-selection', 'clipboard'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
try:
|
||||
process = subprocess.Popen(['xsel', '--clipboard', '--input'], stdin=subprocess.PIPE)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
raise Exception("Neither xclip nor xsel found. Please install one.")
|
||||
elif system == "Windows":
|
||||
process = subprocess.Popen(['clip'], stdin=subprocess.PIPE, shell=True)
|
||||
process.communicate(text.encode('utf-8'))
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
def action_quit(self) -> None:
|
||||
"""Quit the application with proper cleanup"""
|
||||
self.cleanup()
|
||||
self.exit()
|
||||
|
||||
def on_unmount(self) -> None:
|
||||
"""Called when app is being unmounted - ensure cleanup"""
|
||||
self.cleanup()
|
||||
@@ -68,13 +68,13 @@ MetricCard {
|
||||
}
|
||||
|
||||
FlowMainDetailsPanel {
|
||||
height: 3fr;
|
||||
height: 2fr;
|
||||
background: #1a1a1a;
|
||||
border: solid #ff8800;
|
||||
}
|
||||
|
||||
SubFlowDetailsPanel {
|
||||
height: 2fr;
|
||||
height: 3fr;
|
||||
background: #1a1a1a;
|
||||
border: solid #ff8800;
|
||||
}
|
||||
@@ -206,4 +206,82 @@ DataTable:focus {
|
||||
|
||||
/* Panel Borders - Removed for clean look */
|
||||
|
||||
/* Tabbed Content Styling */
|
||||
TabbedContent {
|
||||
height: 1fr;
|
||||
background: #1a1a1a;
|
||||
dock: top;
|
||||
}
|
||||
|
||||
TabbedContent > ContentSwitcher {
|
||||
height: 1fr;
|
||||
background: #1a1a1a;
|
||||
}
|
||||
|
||||
/* Tab Bar Styling - Force horizontal layout */
|
||||
TabbedContent > Horizontal {
|
||||
height: 3;
|
||||
background: #262626;
|
||||
dock: top;
|
||||
}
|
||||
|
||||
TabbedContent Tabs {
|
||||
height: 3;
|
||||
background: #262626;
|
||||
color: #999999;
|
||||
dock: top;
|
||||
}
|
||||
|
||||
TabbedContent Tab {
|
||||
padding: 0 2;
|
||||
background: transparent;
|
||||
color: #999999;
|
||||
text-style: none;
|
||||
}
|
||||
|
||||
TabbedContent Tab:hover {
|
||||
background: #333333;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
TabbedContent Tab.-active {
|
||||
background: #0080ff;
|
||||
color: #ffffff;
|
||||
text-style: bold;
|
||||
}
|
||||
|
||||
TabbedContent Tab:disabled {
|
||||
color: #666666;
|
||||
text-style: dim;
|
||||
}
|
||||
|
||||
/* Tab Pane Content */
|
||||
TabPane {
|
||||
padding: 0;
|
||||
height: 1fr;
|
||||
}
|
||||
|
||||
/* Frame Type Content Layout */
|
||||
FrameTypeTabContent {
|
||||
height: 1fr;
|
||||
width: 1fr;
|
||||
}
|
||||
|
||||
FrameTypeTabContent > Horizontal {
|
||||
height: 1fr;
|
||||
}
|
||||
|
||||
FrameTypeFlowTable {
|
||||
width: 70%;
|
||||
height: 1fr;
|
||||
border: solid #666666;
|
||||
}
|
||||
|
||||
FrameTypeStatsPanel {
|
||||
width: 30%;
|
||||
height: 1fr;
|
||||
border: solid #666666;
|
||||
padding: 1;
|
||||
}
|
||||
|
||||
/* End of styles */
|
||||
728
analyzer/tui/textual/widgets/filtered_flow_view.py
Normal file
728
analyzer/tui/textual/widgets/filtered_flow_view.py
Normal file
@@ -0,0 +1,728 @@
|
||||
"""
|
||||
Filtered Flow View Widget - Grid with frame type filter buttons
|
||||
"""
|
||||
|
||||
from textual.widgets import Button, DataTable, Static
|
||||
from textual.containers import Vertical, Horizontal
|
||||
from textual.reactive import reactive
|
||||
from textual.message import Message
|
||||
from textual.binding import Binding
|
||||
from typing import TYPE_CHECKING, Optional, List, Dict
|
||||
from rich.text import Text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class FrameTypeButton(Button):
|
||||
"""Button for frame type filtering"""
|
||||
|
||||
def __init__(self, frame_type: str, hotkey: str, count: int = 0, **kwargs):
|
||||
self.frame_type = frame_type
|
||||
self.count = count
|
||||
# Shorten frame type names for 1-row buttons
|
||||
short_name = self._shorten_frame_type(frame_type)
|
||||
label = f"{hotkey}.{short_name}({count})" # Remove spaces to be more compact
|
||||
# Create valid ID by removing/replacing invalid characters
|
||||
safe_id = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
super().__init__(label, id=f"btn-{safe_id}", **kwargs)
|
||||
|
||||
# Ensure proper styling from initialization
|
||||
self.styles.background = "#404040"
|
||||
self.styles.color = "white"
|
||||
|
||||
def _shorten_frame_type(self, frame_type: str) -> str:
|
||||
"""Shorten frame type names for compact 1-row buttons"""
|
||||
abbreviations = {
|
||||
'CH10-Data': 'CH10',
|
||||
'CH10-Multi-Source': 'Multi',
|
||||
'CH10-Extended': 'Ext',
|
||||
'CH10-ACTTS': 'ACTTS',
|
||||
'PTP-Signaling': 'PTP-S',
|
||||
'PTP-FollowUp': 'PTP-F',
|
||||
'PTP-Sync': 'PTP',
|
||||
'PTP-Unknown (0x6)': 'PTP-U',
|
||||
'UDP': 'UDP',
|
||||
'TMATS': 'TMATS',
|
||||
'TCP': 'TCP'
|
||||
}
|
||||
return abbreviations.get(frame_type, frame_type[:6]) # Max 6 chars for unknown types
|
||||
|
||||
|
||||
import time
|
||||
import traceback
|
||||
|
||||
def debug_log(message):
|
||||
"""Debug logging with timestamp"""
|
||||
timestamp = time.strftime("%H:%M:%S.%f")[:-3]
|
||||
print(f"[{timestamp}] 🔍 DEBUG: {message}")
|
||||
|
||||
def debug_button_state(frame_type_buttons, phase):
|
||||
"""Log current button state"""
|
||||
debug_log(f"=== BUTTON STATE - {phase} ===")
|
||||
debug_log(f"Total buttons in dict: {len(frame_type_buttons)}")
|
||||
for name, btn in frame_type_buttons.items():
|
||||
if hasattr(btn, 'parent') and btn.parent:
|
||||
parent_info = f"parent: {btn.parent.__class__.__name__}"
|
||||
else:
|
||||
parent_info = "NO PARENT"
|
||||
debug_log(f" {name}: {btn.__class__.__name__} ({parent_info})")
|
||||
debug_log("=" * 40)
|
||||
class FilteredFlowView(Vertical):
|
||||
"""Flow grid with frame type filter buttons"""
|
||||
|
||||
BINDINGS = [
|
||||
Binding("alt+1", "sort_column(0)", "Sort by column 1", show=False),
|
||||
Binding("alt+2", "sort_column(1)", "Sort by column 2", show=False),
|
||||
Binding("alt+3", "sort_column(2)", "Sort by column 3", show=False),
|
||||
Binding("alt+4", "sort_column(3)", "Sort by column 4", show=False),
|
||||
Binding("alt+5", "sort_column(4)", "Sort by column 5", show=False),
|
||||
Binding("alt+6", "sort_column(5)", "Sort by column 6", show=False),
|
||||
Binding("alt+7", "sort_column(6)", "Sort by column 7", show=False),
|
||||
Binding("alt+8", "sort_column(7)", "Sort by column 8", show=False),
|
||||
Binding("alt+9", "sort_column(8)", "Sort by column 9", show=False),
|
||||
Binding("alt+0", "sort_column(9)", "Sort by column 10", show=False),
|
||||
]
|
||||
|
||||
DEFAULT_CSS = """
|
||||
FilteredFlowView {
|
||||
height: 1fr;
|
||||
}
|
||||
|
||||
#filter-bar {
|
||||
height: 3; /* Fixed height to match button height */
|
||||
min-height: 3;
|
||||
max-height: 3;
|
||||
background: #262626;
|
||||
padding: 0 1;
|
||||
dock: top;
|
||||
layout: horizontal;
|
||||
}
|
||||
|
||||
#filter-bar Button {
|
||||
margin: 0 1 0 0; /* Consistent right spacing */
|
||||
min-width: 10; /* Reduced for compact labels */
|
||||
height: 3; /* Fixed height to ensure text visibility */
|
||||
max-height: 3; /* Prevent button from growing */
|
||||
padding: 0 1; /* Minimal horizontal padding for text readability */
|
||||
text-align: center; /* Center text in button */
|
||||
content-align: center middle;
|
||||
background: #404040; /* Default gray background - not black */
|
||||
color: white;
|
||||
border: solid #666666; /* Visible border - Textual format */
|
||||
}
|
||||
|
||||
#btn-overview {
|
||||
margin: 0 1 0 0; /* Overview button - same spacing */
|
||||
height: 3; /* Fixed height to ensure text visibility */
|
||||
max-height: 3; /* Prevent button from growing */
|
||||
padding: 0 1; /* Minimal horizontal padding for text readability */
|
||||
text-align: center; /* Center text in button */
|
||||
content-align: center middle;
|
||||
background: #404040; /* Default gray background - not black */
|
||||
color: white;
|
||||
border: solid #666666; /* Visible border - Textual format */
|
||||
}
|
||||
|
||||
#filter-bar Button:hover {
|
||||
background: #0080ff;
|
||||
}
|
||||
|
||||
#filter-bar Button.-active {
|
||||
background: #0080ff;
|
||||
color: white; /* Ensure text is visible on active state */
|
||||
text-style: bold;
|
||||
border: solid #0080ff; /* Match border to background - Textual format */
|
||||
}
|
||||
|
||||
#filtered-flow-table {
|
||||
height: 1fr;
|
||||
}
|
||||
"""
|
||||
|
||||
selected_frame_type = reactive("Overview")
|
||||
|
||||
class FrameTypeSelected(Message):
|
||||
"""Message when frame type filter is selected"""
|
||||
def __init__(self, frame_type: str) -> None:
|
||||
self.frame_type = frame_type
|
||||
super().__init__()
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
debug_log("FilteredFlowView.__init__ called")
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
self.frame_type_buttons = {}
|
||||
self.flow_table = None
|
||||
self._last_frame_types = set() # Track frame types to avoid unnecessary refreshes
|
||||
self._buttons_created = False # Track if buttons have been created to avoid flicker
|
||||
|
||||
# Table sorting state
|
||||
self.sort_column = None # Index of column to sort by (None = no sorting)
|
||||
self.sort_reverse = False # True for descending, False for ascending
|
||||
|
||||
# Button refresh throttling to prevent race conditions
|
||||
self._last_refresh_time = 0
|
||||
self._refresh_throttle_seconds = 1.0 # Only refresh buttons once per second
|
||||
|
||||
# Predefined frame types that will have buttons created at initialization
|
||||
# Order is now static and will not change based on counts during parsing
|
||||
self.predefined_frame_types = [
|
||||
'UDP', # Most common transport protocol
|
||||
'CH10-Data', # Common Chapter 10 data frames
|
||||
'PTP-Sync', # PTP synchronization
|
||||
'PTP-Signaling', # PTP signaling
|
||||
'TMATS', # Telemetry metadata
|
||||
'TCP', # TCP transport
|
||||
'PTP-FollowUp', # PTP follow-up
|
||||
'CH10-Multi-Source',
|
||||
'CH10-Extended'
|
||||
]
|
||||
|
||||
def compose(self):
|
||||
"""Create the filter bar and flow grid - ALL BUTTONS CREATED ONCE, NEVER DESTROYED"""
|
||||
debug_log("compose() - Creating filter bar and ALL buttons at initialization")
|
||||
debug_button_state(self.frame_type_buttons, "BEFORE_COMPOSE")
|
||||
|
||||
# Filter button bar at top
|
||||
with Horizontal(id="filter-bar"):
|
||||
# Overview button (hotkey 1) - always visible, always active initially
|
||||
overview_btn = Button("1.Overview", id="btn-overview", classes="-active")
|
||||
overview_btn.styles.background = "#0080ff" # Active blue background
|
||||
overview_btn.styles.color = "white"
|
||||
self.frame_type_buttons["Overview"] = overview_btn
|
||||
yield overview_btn
|
||||
|
||||
# Create ALL possible frame type buttons at initialization - NEVER RECREATED
|
||||
# Static order prevents any tab reordering throughout the application lifecycle
|
||||
hotkeys = ['2', '3', '4', '5', '6', '7', '8', '9', '0']
|
||||
|
||||
# Create buttons for ALL predefined frame types
|
||||
for i, frame_type in enumerate(self.predefined_frame_types):
|
||||
if i < len(hotkeys):
|
||||
# Start with 0 count, initially hidden - visibility managed by refresh logic
|
||||
btn = FrameTypeButton(frame_type, hotkeys[i], 0)
|
||||
btn.visible = False # Hidden until data is available
|
||||
self.frame_type_buttons[frame_type] = btn
|
||||
yield btn
|
||||
|
||||
# Create placeholder buttons for dynamic frame types discovered during parsing
|
||||
# These will be activated/shown as new frame types are discovered
|
||||
remaining_hotkeys = len(self.predefined_frame_types)
|
||||
for i in range(remaining_hotkeys, len(hotkeys)):
|
||||
# Create placeholder button that can be reassigned to new frame types
|
||||
placeholder_btn = FrameTypeButton("", hotkeys[i], 0)
|
||||
placeholder_btn.visible = False # Hidden until assigned to a frame type
|
||||
placeholder_btn.placeholder_index = i # Track which placeholder this is
|
||||
# Use a special key for placeholders
|
||||
self.frame_type_buttons[f"__placeholder_{i}__"] = placeholder_btn
|
||||
yield placeholder_btn
|
||||
|
||||
# Flow data table
|
||||
self.flow_table = DataTable(
|
||||
id="filtered-flow-table",
|
||||
cursor_type="row",
|
||||
zebra_stripes=True,
|
||||
show_header=True,
|
||||
show_row_labels=False
|
||||
)
|
||||
yield self.flow_table
|
||||
debug_log("compose() - All widgets created")
|
||||
debug_button_state(self.frame_type_buttons, "AFTER_COMPOSE")
|
||||
|
||||
def on_mount(self):
|
||||
"""Initialize the view"""
|
||||
debug_log("on_mount() - Initializing view")
|
||||
debug_button_state(self.frame_type_buttons, "BEFORE_MOUNT_SETUP")
|
||||
self._setup_flow_table()
|
||||
# Mark buttons as created since we pre-created them in compose()
|
||||
self._buttons_created = True
|
||||
# Update button counts and data
|
||||
self.refresh_frame_types()
|
||||
self.refresh_flow_data()
|
||||
# Ensure Overview button starts highlighted
|
||||
self._update_button_highlighting()
|
||||
debug_log("on_mount() - Initialization complete")
|
||||
debug_button_state(self.frame_type_buttons, "AFTER_MOUNT_COMPLETE")
|
||||
|
||||
def _setup_flow_table(self):
|
||||
"""Setup table columns based on selected frame type"""
|
||||
table = self.flow_table
|
||||
table.clear(columns=True)
|
||||
|
||||
if self.selected_frame_type == "Overview":
|
||||
# Overview columns with individual frame type columns
|
||||
table.add_column("#", width=4, key="num")
|
||||
table.add_column("Source", width=18, key="source")
|
||||
table.add_column("Destination", width=18, key="dest")
|
||||
table.add_column("Protocol", width=8, key="protocol")
|
||||
table.add_column("Total", width=8, key="total_packets")
|
||||
|
||||
# Add columns for each detected frame type
|
||||
all_frame_types = self._get_all_frame_types()
|
||||
for frame_type in sorted(all_frame_types.keys(), key=lambda x: all_frame_types[x], reverse=True):
|
||||
# Shorten column name for better display
|
||||
short_name = self._shorten_frame_type_name(frame_type)
|
||||
# Create safe key for column
|
||||
safe_key = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
table.add_column(short_name, width=8, key=f"ft_{safe_key}")
|
||||
|
||||
table.add_column("Status", width=10, key="status")
|
||||
else:
|
||||
# Frame type specific columns
|
||||
table.add_column("#", width=4, key="num")
|
||||
table.add_column("Source", width=20, key="source")
|
||||
table.add_column("Destination", width=20, key="dest")
|
||||
table.add_column("Protocol", width=8, key="protocol")
|
||||
table.add_column(f"{self.selected_frame_type} Packets", width=12, key="ft_packets")
|
||||
table.add_column("Avg ΔT", width=10, key="avg_delta")
|
||||
table.add_column("Std ΔT", width=10, key="std_delta")
|
||||
table.add_column("Min ΔT", width=10, key="min_delta")
|
||||
table.add_column("Max ΔT", width=10, key="max_delta")
|
||||
table.add_column("Outliers", width=8, key="outliers")
|
||||
table.add_column("Quality", width=8, key="quality")
|
||||
|
||||
def refresh_frame_types(self):
|
||||
"""Update button visibility and content - NEVER CREATE OR DESTROY BUTTONS"""
|
||||
debug_log("refresh_frame_types() - Starting refresh (VISIBILITY-ONLY MODE)")
|
||||
debug_button_state(self.frame_type_buttons, "BEFORE_REFRESH")
|
||||
# Throttle button refresh to prevent race conditions
|
||||
import time
|
||||
current_time = time.time()
|
||||
if current_time - self._last_refresh_time < self._refresh_throttle_seconds:
|
||||
debug_log("refresh_frame_types() - THROTTLED, skipping refresh")
|
||||
return # Skip refresh if called too recently
|
||||
self._last_refresh_time = current_time
|
||||
|
||||
# Get all detected frame types with their total packet counts
|
||||
frame_types = self._get_all_frame_types()
|
||||
|
||||
# Calculate flow counts for all frame types
|
||||
frame_type_flow_counts = {}
|
||||
for frame_type in frame_types.keys():
|
||||
flow_count = sum(1 for flow in self.analyzer.flows.values() if frame_type in flow.frame_types)
|
||||
frame_type_flow_counts[frame_type] = flow_count
|
||||
|
||||
# UPDATE PREDEFINED FRAME TYPE BUTTONS (show/hide and update counts only)
|
||||
for frame_type in self.predefined_frame_types:
|
||||
if frame_type in self.frame_type_buttons:
|
||||
btn = self.frame_type_buttons[frame_type]
|
||||
if frame_type in frame_type_flow_counts:
|
||||
flow_count = frame_type_flow_counts[frame_type]
|
||||
# Update button content only
|
||||
hotkey = btn.label.split('.')[0] if '.' in btn.label else '?'
|
||||
short_name = btn._shorten_frame_type(frame_type)
|
||||
btn.label = f"{hotkey}.{short_name}({flow_count})"
|
||||
btn.count = flow_count
|
||||
|
||||
# Show button if it has data or is predefined (always show predefined during loading)
|
||||
should_show = flow_count > 0 or frame_type in self.predefined_frame_types
|
||||
btn.visible = should_show
|
||||
else:
|
||||
# No data for this frame type yet, keep hidden but maintain button
|
||||
btn.visible = False
|
||||
|
||||
# HANDLE NEW FRAME TYPES - assign to placeholder buttons only
|
||||
new_frame_types = set(frame_type_flow_counts.keys()) - set(self.predefined_frame_types)
|
||||
placeholder_keys = [k for k in self.frame_type_buttons.keys() if k.startswith("__placeholder_")]
|
||||
|
||||
# Find available placeholders (not already assigned)
|
||||
assigned_frame_types = set()
|
||||
for frame_type in new_frame_types:
|
||||
if frame_type in self.frame_type_buttons:
|
||||
assigned_frame_types.add(frame_type)
|
||||
|
||||
unassigned_new_types = new_frame_types - assigned_frame_types
|
||||
available_placeholders = []
|
||||
for placeholder_key in placeholder_keys:
|
||||
btn = self.frame_type_buttons[placeholder_key]
|
||||
if not hasattr(btn, 'assigned_frame_type') or not btn.visible:
|
||||
available_placeholders.append(placeholder_key)
|
||||
|
||||
# Assign new frame types to available placeholders
|
||||
for i, frame_type in enumerate(sorted(unassigned_new_types)):
|
||||
if i < len(available_placeholders) and frame_type_flow_counts[frame_type] > 0:
|
||||
placeholder_key = available_placeholders[i]
|
||||
btn = self.frame_type_buttons[placeholder_key]
|
||||
|
||||
# Assign this placeholder to the new frame type
|
||||
flow_count = frame_type_flow_counts[frame_type]
|
||||
hotkey = str(btn.placeholder_index + 2) # hotkeys 2-0
|
||||
short_name = btn._shorten_frame_type(frame_type)
|
||||
btn.label = f"{hotkey}.{short_name}({flow_count})"
|
||||
btn.count = flow_count
|
||||
btn.frame_type = frame_type
|
||||
btn.assigned_frame_type = frame_type
|
||||
btn.visible = True
|
||||
|
||||
# Also add to frame_type_buttons with the frame type as key for easy lookup
|
||||
self.frame_type_buttons[frame_type] = btn
|
||||
|
||||
# Update existing assigned placeholder buttons
|
||||
for frame_type in assigned_frame_types:
|
||||
if frame_type in self.frame_type_buttons:
|
||||
btn = self.frame_type_buttons[frame_type]
|
||||
flow_count = frame_type_flow_counts[frame_type]
|
||||
hotkey = btn.label.split('.')[0] if '.' in btn.label else '?'
|
||||
short_name = btn._shorten_frame_type(frame_type)
|
||||
btn.label = f"{hotkey}.{short_name}({flow_count})"
|
||||
btn.count = flow_count
|
||||
btn.visible = flow_count > 0
|
||||
|
||||
# Update button highlighting
|
||||
self._update_button_highlighting()
|
||||
debug_log("refresh_frame_types() - Button visibility and content updated (NO RECREATION)")
|
||||
debug_button_state(self.frame_type_buttons, "AFTER_VISIBILITY_UPDATE")
|
||||
|
||||
# Track frame types for change detection
|
||||
current_frame_types = set(frame_types.keys())
|
||||
if current_frame_types != self._last_frame_types:
|
||||
self._last_frame_types = current_frame_types
|
||||
|
||||
# CRITICAL: Rebuild table columns when frame types change (for Overview mode)
|
||||
if self.selected_frame_type == "Overview":
|
||||
self._setup_flow_table()
|
||||
# Clear existing data before adding new data with new column structure
|
||||
self.flow_table.clear()
|
||||
|
||||
# _update_button_counts method removed - buttons are now managed by visibility only
|
||||
|
||||
|
||||
def refresh_flow_data(self):
|
||||
"""Refresh the flow table based on selected filter"""
|
||||
self.flow_table.clear()
|
||||
|
||||
if self.selected_frame_type == "Overview":
|
||||
self._show_overview()
|
||||
else:
|
||||
self._show_frame_type_flows(self.selected_frame_type)
|
||||
|
||||
def _show_overview(self):
|
||||
"""Show all flows in overview mode with frame type columns"""
|
||||
flows = list(self.analyzer.flows.values())
|
||||
all_frame_types = self._get_all_frame_types()
|
||||
sorted_frame_types = sorted(all_frame_types.keys(), key=lambda x: all_frame_types[x], reverse=True)
|
||||
|
||||
# Get current table columns to check what frame types are expected
|
||||
try:
|
||||
table_columns = [col.key for col in self.flow_table._columns]
|
||||
except (AttributeError, TypeError):
|
||||
# If columns aren't accessible, fall back to using current frame types
|
||||
table_columns = []
|
||||
|
||||
expected_frame_types = []
|
||||
for col_key in table_columns:
|
||||
if col_key.startswith("ft_"):
|
||||
# Extract frame type from column key
|
||||
expected_frame_types.append(col_key[3:]) # Remove "ft_" prefix
|
||||
|
||||
# If no frame type columns detected, use sorted frame types directly
|
||||
if not expected_frame_types:
|
||||
expected_frame_types = [frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_') for frame_type in sorted_frame_types]
|
||||
|
||||
# Collect all row data first
|
||||
all_rows = []
|
||||
|
||||
for i, flow in enumerate(flows):
|
||||
# Status based on enhanced analysis
|
||||
status = "Enhanced" if flow.enhanced_analysis.decoder_type != "Standard" else "Normal"
|
||||
status_style = "green" if status == "Enhanced" else "white"
|
||||
|
||||
# Start with basic flow info
|
||||
row_data = [
|
||||
str(i + 1),
|
||||
f"{flow.src_ip}:{flow.src_port}",
|
||||
f"{flow.dst_ip}:{flow.dst_port}",
|
||||
flow.transport_protocol,
|
||||
str(flow.frame_count)
|
||||
]
|
||||
|
||||
# Add packet count for each frame type column in the order they appear in table
|
||||
for expected_ft_key in expected_frame_types:
|
||||
# Find the actual frame type that matches this column key
|
||||
matching_frame_type = None
|
||||
for frame_type in sorted_frame_types:
|
||||
safe_key = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
if safe_key == expected_ft_key:
|
||||
matching_frame_type = frame_type
|
||||
break
|
||||
|
||||
if matching_frame_type and matching_frame_type in flow.frame_types:
|
||||
count = flow.frame_types[matching_frame_type].count
|
||||
if count > 0:
|
||||
colored_count = self._color_code_packet_count(count, all_frame_types[matching_frame_type])
|
||||
row_data.append(colored_count)
|
||||
else:
|
||||
row_data.append("-")
|
||||
else:
|
||||
row_data.append("-")
|
||||
|
||||
# Add status
|
||||
row_data.append(Text(status, style=status_style))
|
||||
|
||||
# Store row data with original flow index for key
|
||||
all_rows.append((row_data, i))
|
||||
|
||||
# Sort rows if sorting is enabled
|
||||
if self.sort_column is not None and all_rows:
|
||||
all_rows.sort(key=lambda x: self._get_sort_key(x[0], self.sort_column), reverse=self.sort_reverse)
|
||||
|
||||
# Add sorted rows to table
|
||||
for row_data, original_index in all_rows:
|
||||
# CRITICAL: Validate row data matches column count before adding
|
||||
try:
|
||||
# Get column count for validation
|
||||
column_count = len(self.flow_table.ordered_columns) if hasattr(self.flow_table, 'ordered_columns') else 0
|
||||
if column_count > 0 and len(row_data) != column_count:
|
||||
# Skip this row if data doesn't match columns - table structure is being updated
|
||||
continue
|
||||
|
||||
self.flow_table.add_row(*row_data, key=f"flow-{original_index}")
|
||||
except (ValueError, AttributeError) as e:
|
||||
# Skip this row if there's a column mismatch - table is being rebuilt
|
||||
continue
|
||||
|
||||
def _show_frame_type_flows(self, frame_type: str):
|
||||
"""Show flows filtered by frame type with timing statistics"""
|
||||
flows_with_type = []
|
||||
|
||||
for i, flow in enumerate(self.analyzer.flows.values()):
|
||||
if frame_type in flow.frame_types:
|
||||
flows_with_type.append((i, flow, flow.frame_types[frame_type]))
|
||||
|
||||
# Collect all row data first
|
||||
all_rows = []
|
||||
|
||||
for flow_idx, flow, ft_stats in flows_with_type:
|
||||
# Calculate timing statistics
|
||||
if ft_stats.inter_arrival_times:
|
||||
min_delta = min(ft_stats.inter_arrival_times) * 1000
|
||||
max_delta = max(ft_stats.inter_arrival_times) * 1000
|
||||
else:
|
||||
min_delta = max_delta = 0
|
||||
|
||||
# Quality score
|
||||
quality = self._calculate_quality(ft_stats)
|
||||
quality_text = self._format_quality(quality)
|
||||
|
||||
row_data = [
|
||||
str(flow_idx + 1),
|
||||
f"{flow.src_ip}:{flow.src_port}",
|
||||
f"{flow.dst_ip}:{flow.dst_port}",
|
||||
flow.transport_protocol,
|
||||
str(ft_stats.count),
|
||||
f"{ft_stats.avg_inter_arrival * 1000:.1f}ms" if ft_stats.avg_inter_arrival > 0 else "N/A",
|
||||
f"{ft_stats.std_inter_arrival * 1000:.1f}ms" if ft_stats.std_inter_arrival > 0 else "N/A",
|
||||
f"{min_delta:.1f}ms" if min_delta > 0 else "N/A",
|
||||
f"{max_delta:.1f}ms" if max_delta > 0 else "N/A",
|
||||
str(len(ft_stats.outlier_frames)),
|
||||
quality_text
|
||||
]
|
||||
|
||||
# Store row data with original flow index for key
|
||||
all_rows.append((row_data, flow_idx))
|
||||
|
||||
# Sort rows if sorting is enabled
|
||||
if self.sort_column is not None and all_rows:
|
||||
all_rows.sort(key=lambda x: self._get_sort_key(x[0], self.sort_column), reverse=self.sort_reverse)
|
||||
|
||||
# Add sorted rows to table
|
||||
for row_data, original_index in all_rows:
|
||||
# CRITICAL: Validate row data matches column count before adding
|
||||
try:
|
||||
# Get column count for validation
|
||||
column_count = len(self.flow_table.ordered_columns) if hasattr(self.flow_table, 'ordered_columns') else 0
|
||||
if column_count > 0 and len(row_data) != column_count:
|
||||
# Skip this row if data doesn't match columns - table structure is being updated
|
||||
continue
|
||||
|
||||
self.flow_table.add_row(*row_data, key=f"flow-{original_index}")
|
||||
except (ValueError, AttributeError) as e:
|
||||
# Skip this row if there's a column mismatch - table is being rebuilt
|
||||
continue
|
||||
|
||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||
"""Handle filter button clicks"""
|
||||
button = event.button
|
||||
|
||||
# Determine frame type from button
|
||||
if button.id == "btn-overview":
|
||||
self.select_frame_type("Overview")
|
||||
else:
|
||||
# Extract frame type from button
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if btn == button:
|
||||
self.select_frame_type(frame_type)
|
||||
break
|
||||
|
||||
def select_frame_type(self, frame_type: str):
|
||||
"""Select a frame type filter"""
|
||||
if self.selected_frame_type != frame_type:
|
||||
self.selected_frame_type = frame_type
|
||||
self._setup_flow_table()
|
||||
self.refresh_flow_data()
|
||||
self.post_message(self.FrameTypeSelected(frame_type))
|
||||
|
||||
# Update button highlighting
|
||||
self._update_button_highlighting()
|
||||
debug_log("refresh_frame_types() - Buttons recreated")
|
||||
debug_button_state(self.frame_type_buttons, "AFTER_BUTTON_CREATION")
|
||||
debug_log("on_mount() - Initialization complete")
|
||||
debug_button_state(self.frame_type_buttons, "AFTER_MOUNT_COMPLETE")
|
||||
|
||||
def _update_button_highlighting(self):
|
||||
"""Update which button appears active/highlighted"""
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if frame_type == self.selected_frame_type:
|
||||
btn.add_class("-active")
|
||||
else:
|
||||
btn.remove_class("-active")
|
||||
|
||||
def action_select_filter(self, number: str):
|
||||
"""Handle number key press for filter selection"""
|
||||
if number == '1':
|
||||
# Overview
|
||||
self.select_frame_type("Overview")
|
||||
else:
|
||||
# Frame type buttons - find by hotkey
|
||||
hotkeys = ['2', '3', '4', '5', '6', '7', '8', '9', '0']
|
||||
if number in hotkeys:
|
||||
# Find the button with this hotkey
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if frame_type != "Overview" and hasattr(btn, 'frame_type'):
|
||||
# Check if this button's label starts with this number
|
||||
if btn.label.plain.startswith(f"{number}."):
|
||||
self.select_frame_type(frame_type)
|
||||
break
|
||||
|
||||
def action_sort_column(self, column_index: int):
|
||||
"""Sort table by specified column index (0-based)"""
|
||||
# Check if we have enough columns
|
||||
if not self.flow_table or not hasattr(self.flow_table, 'ordered_columns'):
|
||||
return
|
||||
|
||||
if column_index >= len(self.flow_table.ordered_columns):
|
||||
return # Column doesn't exist
|
||||
|
||||
# Toggle sort direction if same column, otherwise start with ascending
|
||||
if self.sort_column == column_index:
|
||||
self.sort_reverse = not self.sort_reverse
|
||||
else:
|
||||
self.sort_column = column_index
|
||||
self.sort_reverse = False
|
||||
|
||||
# Refresh data with new sorting
|
||||
self.refresh_flow_data()
|
||||
|
||||
def _get_sort_key(self, row_data: list, column_index: int):
|
||||
"""Get sort key for a row based on column index"""
|
||||
if column_index >= len(row_data):
|
||||
return ""
|
||||
|
||||
value = row_data[column_index]
|
||||
|
||||
# Handle Text objects (extract plain text)
|
||||
if hasattr(value, 'plain'):
|
||||
text_value = value.plain
|
||||
else:
|
||||
text_value = str(value)
|
||||
|
||||
# Try to convert to number for numeric sorting
|
||||
try:
|
||||
# Handle values like "1,105" (remove commas)
|
||||
if ',' in text_value:
|
||||
text_value = text_value.replace(',', '')
|
||||
|
||||
# Handle values with units like "102.2ms" or "1.5MB"
|
||||
if text_value.endswith('ms'):
|
||||
return float(text_value[:-2])
|
||||
elif text_value.endswith('MB'):
|
||||
return float(text_value[:-2]) * 1000000
|
||||
elif text_value.endswith('KB'):
|
||||
return float(text_value[:-2]) * 1000
|
||||
elif text_value.endswith('B'):
|
||||
return float(text_value[:-1])
|
||||
elif text_value.endswith('%'):
|
||||
return float(text_value[:-1])
|
||||
elif text_value == "N/A" or text_value == "-":
|
||||
return -1 # Sort N/A and "-" values to the end
|
||||
else:
|
||||
return float(text_value)
|
||||
except (ValueError, AttributeError):
|
||||
# For string values, use alphabetical sorting
|
||||
return text_value.lower()
|
||||
|
||||
def _format_bytes(self, bytes_val: int) -> str:
|
||||
"""Format bytes to human readable"""
|
||||
if bytes_val < 1024:
|
||||
return f"{bytes_val}B"
|
||||
elif bytes_val < 1024 * 1024:
|
||||
return f"{bytes_val / 1024:.1f}KB"
|
||||
else:
|
||||
return f"{bytes_val / (1024 * 1024):.1f}MB"
|
||||
|
||||
def _calculate_quality(self, ft_stats) -> float:
|
||||
"""Calculate quality score for frame type stats"""
|
||||
if ft_stats.count == 0:
|
||||
return 0.0
|
||||
|
||||
outlier_rate = len(ft_stats.outlier_frames) / ft_stats.count
|
||||
consistency = 1.0 - min(outlier_rate * 2, 1.0)
|
||||
return consistency * 100
|
||||
|
||||
def _format_quality(self, quality: float) -> Text:
|
||||
"""Format quality with color"""
|
||||
if quality >= 90:
|
||||
return Text(f"{quality:.0f}%", style="green")
|
||||
elif quality >= 70:
|
||||
return Text(f"{quality:.0f}%", style="yellow")
|
||||
else:
|
||||
return Text(f"{quality:.0f}%", style="red")
|
||||
|
||||
def _get_all_frame_types(self) -> dict:
|
||||
"""Get all frame types across all flows with their total counts"""
|
||||
frame_types = {}
|
||||
for flow in self.analyzer.flows.values():
|
||||
for frame_type, stats in flow.frame_types.items():
|
||||
if frame_type not in frame_types:
|
||||
frame_types[frame_type] = 0
|
||||
frame_types[frame_type] += stats.count
|
||||
return frame_types
|
||||
|
||||
def _shorten_frame_type_name(self, frame_type: str) -> str:
|
||||
"""Shorten frame type names for better column display"""
|
||||
# Common abbreviations for better column display
|
||||
abbreviations = {
|
||||
'CH10-Data': 'CH10',
|
||||
'CH10-Multi-Source': 'Multi',
|
||||
'CH10-Extended': 'Ext',
|
||||
'CH10-ACTTS': 'ACTTS',
|
||||
'PTP-Signaling': 'PTP-Sig',
|
||||
'PTP-FollowUp': 'PTP-FU',
|
||||
'PTP-Sync': 'PTP-Syn',
|
||||
'PTP-Unknown (0x6)': 'PTP-Unk',
|
||||
'UDP': 'UDP',
|
||||
'TMATS': 'TMATS',
|
||||
'TCP': 'TCP'
|
||||
}
|
||||
return abbreviations.get(frame_type, frame_type[:8])
|
||||
|
||||
def _color_code_packet_count(self, count: int, max_count: int) -> Text:
|
||||
"""Color code packet counts based on relative frequency"""
|
||||
if max_count == 0:
|
||||
return Text(str(count), style="white")
|
||||
|
||||
# Calculate percentage of maximum for this frame type
|
||||
percentage = (count / max_count) * 100
|
||||
|
||||
if percentage >= 80: # High volume (80-100% of max)
|
||||
return Text(str(count), style="red bold")
|
||||
elif percentage >= 50: # Medium-high volume (50-79% of max)
|
||||
return Text(str(count), style="yellow bold")
|
||||
elif percentage >= 20: # Medium volume (20-49% of max)
|
||||
return Text(str(count), style="cyan")
|
||||
elif percentage >= 5: # Low volume (5-19% of max)
|
||||
return Text(str(count), style="blue")
|
||||
else: # Very low volume (0-4% of max)
|
||||
return Text(str(count), style="dim white")
|
||||
692
analyzer/tui/textual/widgets/filtered_flow_view.py.debug_backup
Normal file
692
analyzer/tui/textual/widgets/filtered_flow_view.py.debug_backup
Normal file
@@ -0,0 +1,692 @@
|
||||
"""
|
||||
Filtered Flow View Widget - Grid with frame type filter buttons
|
||||
"""
|
||||
|
||||
from textual.widgets import Button, DataTable, Static
|
||||
from textual.containers import Vertical, Horizontal
|
||||
from textual.reactive import reactive
|
||||
from textual.message import Message
|
||||
from textual.binding import Binding
|
||||
from typing import TYPE_CHECKING, Optional, List, Dict
|
||||
from rich.text import Text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class FrameTypeButton(Button):
|
||||
"""Button for frame type filtering"""
|
||||
|
||||
def __init__(self, frame_type: str, hotkey: str, count: int = 0, **kwargs):
|
||||
self.frame_type = frame_type
|
||||
self.count = count
|
||||
# Shorten frame type names for 1-row buttons
|
||||
short_name = self._shorten_frame_type(frame_type)
|
||||
label = f"{hotkey}.{short_name}({count})" # Remove spaces to be more compact
|
||||
# Create valid ID by removing/replacing invalid characters
|
||||
safe_id = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
super().__init__(label, id=f"btn-{safe_id}", **kwargs)
|
||||
|
||||
def _shorten_frame_type(self, frame_type: str) -> str:
|
||||
"""Shorten frame type names for compact 1-row buttons"""
|
||||
abbreviations = {
|
||||
'CH10-Data': 'CH10',
|
||||
'CH10-Multi-Source': 'Multi',
|
||||
'CH10-Extended': 'Ext',
|
||||
'CH10-ACTTS': 'ACTTS',
|
||||
'PTP-Signaling': 'PTP-S',
|
||||
'PTP-FollowUp': 'PTP-F',
|
||||
'PTP-Sync': 'PTP',
|
||||
'PTP-Unknown (0x6)': 'PTP-U',
|
||||
'UDP': 'UDP',
|
||||
'TMATS': 'TMATS',
|
||||
'TCP': 'TCP'
|
||||
}
|
||||
return abbreviations.get(frame_type, frame_type[:6]) # Max 6 chars for unknown types
|
||||
|
||||
|
||||
class FilteredFlowView(Vertical):
|
||||
"""Flow grid with frame type filter buttons"""
|
||||
|
||||
BINDINGS = [
|
||||
Binding("alt+1", "sort_column(0)", "Sort by column 1", show=False),
|
||||
Binding("alt+2", "sort_column(1)", "Sort by column 2", show=False),
|
||||
Binding("alt+3", "sort_column(2)", "Sort by column 3", show=False),
|
||||
Binding("alt+4", "sort_column(3)", "Sort by column 4", show=False),
|
||||
Binding("alt+5", "sort_column(4)", "Sort by column 5", show=False),
|
||||
Binding("alt+6", "sort_column(5)", "Sort by column 6", show=False),
|
||||
Binding("alt+7", "sort_column(6)", "Sort by column 7", show=False),
|
||||
Binding("alt+8", "sort_column(7)", "Sort by column 8", show=False),
|
||||
Binding("alt+9", "sort_column(8)", "Sort by column 9", show=False),
|
||||
Binding("alt+0", "sort_column(9)", "Sort by column 10", show=False),
|
||||
]
|
||||
|
||||
DEFAULT_CSS = """
|
||||
FilteredFlowView {
|
||||
height: 1fr;
|
||||
}
|
||||
|
||||
#filter-bar {
|
||||
height: auto;
|
||||
min-height: 1;
|
||||
max-height: 1;
|
||||
background: #262626;
|
||||
padding: 0 1;
|
||||
dock: top;
|
||||
layout: horizontal;
|
||||
}
|
||||
|
||||
#filter-bar Button {
|
||||
margin: 0 1 0 0; /* Consistent right spacing */
|
||||
min-width: 10; /* Reduced for compact labels */
|
||||
height: auto;
|
||||
min-height: 1;
|
||||
padding: 0; /* Remove padding to fit text in 1 row */
|
||||
text-align: center; /* Center text in button */
|
||||
content-align: center middle;
|
||||
}
|
||||
|
||||
#btn-overview {
|
||||
margin: 0 1 0 0; /* Overview button - same spacing */
|
||||
height: auto;
|
||||
min-height: 1;
|
||||
padding: 0; /* Remove padding to fit text in 1 row */
|
||||
text-align: center; /* Center text in button */
|
||||
content-align: center middle;
|
||||
}
|
||||
|
||||
#filter-bar Button:hover {
|
||||
background: #0080ff;
|
||||
}
|
||||
|
||||
#filter-bar Button.-active {
|
||||
background: #0080ff;
|
||||
text-style: bold;
|
||||
}
|
||||
|
||||
#filtered-flow-table {
|
||||
height: 1fr;
|
||||
}
|
||||
"""
|
||||
|
||||
selected_frame_type = reactive("Overview")
|
||||
|
||||
class FrameTypeSelected(Message):
|
||||
"""Message when frame type filter is selected"""
|
||||
def __init__(self, frame_type: str) -> None:
|
||||
self.frame_type = frame_type
|
||||
super().__init__()
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
self.frame_type_buttons = {}
|
||||
self.flow_table = None
|
||||
self._last_frame_types = set() # Track frame types to avoid unnecessary refreshes
|
||||
self._buttons_created = False # Track if buttons have been created to avoid flicker
|
||||
|
||||
# Table sorting state
|
||||
self.sort_column = None # Index of column to sort by (None = no sorting)
|
||||
self.sort_reverse = False # True for descending, False for ascending
|
||||
|
||||
# Button refresh throttling to prevent race conditions
|
||||
self._last_refresh_time = 0
|
||||
self._refresh_throttle_seconds = 1.0 # Only refresh buttons once per second
|
||||
|
||||
# Predefined frame types that will have buttons created at initialization
|
||||
self.predefined_frame_types = [
|
||||
'CH10-Data',
|
||||
'UDP',
|
||||
'PTP-Sync',
|
||||
'PTP-Signaling',
|
||||
'PTP-FollowUp',
|
||||
'TMATS',
|
||||
'TCP',
|
||||
'CH10-Multi-Source',
|
||||
'CH10-Extended'
|
||||
]
|
||||
|
||||
def compose(self):
|
||||
"""Create the filter bar and flow grid"""
|
||||
# Filter button bar at top
|
||||
with Horizontal(id="filter-bar"):
|
||||
# Overview button (hotkey 1) - compact format
|
||||
overview_btn = Button("1.Overview", id="btn-overview", classes="-active")
|
||||
self.frame_type_buttons["Overview"] = overview_btn
|
||||
yield overview_btn
|
||||
|
||||
# Create predefined frame type buttons at initialization
|
||||
# Note: Initial order will be updated by refresh_frame_types() to sort by count
|
||||
hotkeys = ['2', '3', '4', '5', '6', '7', '8', '9', '0']
|
||||
for i, frame_type in enumerate(self.predefined_frame_types):
|
||||
if i < len(hotkeys):
|
||||
# Start with 0 count - will be updated during data refresh
|
||||
btn = FrameTypeButton(frame_type, hotkeys[i], 0)
|
||||
self.frame_type_buttons[frame_type] = btn
|
||||
yield btn
|
||||
|
||||
# Flow data table
|
||||
self.flow_table = DataTable(
|
||||
id="filtered-flow-table",
|
||||
cursor_type="row",
|
||||
zebra_stripes=True,
|
||||
show_header=True,
|
||||
show_row_labels=False
|
||||
)
|
||||
yield self.flow_table
|
||||
|
||||
def on_mount(self):
|
||||
"""Initialize the view"""
|
||||
self._setup_flow_table()
|
||||
# Mark buttons as created since we pre-created them in compose()
|
||||
self._buttons_created = True
|
||||
# Update button counts and data
|
||||
self.refresh_frame_types()
|
||||
self.refresh_flow_data()
|
||||
# Ensure Overview button starts highlighted
|
||||
self._update_button_highlighting()
|
||||
|
||||
def _setup_flow_table(self):
|
||||
"""Setup table columns based on selected frame type"""
|
||||
table = self.flow_table
|
||||
table.clear(columns=True)
|
||||
|
||||
if self.selected_frame_type == "Overview":
|
||||
# Overview columns with individual frame type columns
|
||||
table.add_column("#", width=4, key="num")
|
||||
table.add_column("Source", width=18, key="source")
|
||||
table.add_column("Destination", width=18, key="dest")
|
||||
table.add_column("Protocol", width=8, key="protocol")
|
||||
table.add_column("Total", width=8, key="total_packets")
|
||||
|
||||
# Add columns for each detected frame type
|
||||
all_frame_types = self._get_all_frame_types()
|
||||
for frame_type in sorted(all_frame_types.keys(), key=lambda x: all_frame_types[x], reverse=True):
|
||||
# Shorten column name for better display
|
||||
short_name = self._shorten_frame_type_name(frame_type)
|
||||
# Create safe key for column
|
||||
safe_key = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
table.add_column(short_name, width=8, key=f"ft_{safe_key}")
|
||||
|
||||
table.add_column("Status", width=10, key="status")
|
||||
else:
|
||||
# Frame type specific columns
|
||||
table.add_column("#", width=4, key="num")
|
||||
table.add_column("Source", width=20, key="source")
|
||||
table.add_column("Destination", width=20, key="dest")
|
||||
table.add_column("Protocol", width=8, key="protocol")
|
||||
table.add_column(f"{self.selected_frame_type} Packets", width=12, key="ft_packets")
|
||||
table.add_column("Avg ΔT", width=10, key="avg_delta")
|
||||
table.add_column("Std ΔT", width=10, key="std_delta")
|
||||
table.add_column("Min ΔT", width=10, key="min_delta")
|
||||
table.add_column("Max ΔT", width=10, key="max_delta")
|
||||
table.add_column("Outliers", width=8, key="outliers")
|
||||
table.add_column("Quality", width=8, key="quality")
|
||||
|
||||
def refresh_frame_types(self):
|
||||
"""Update frame type button counts and reorder by count (highest to left)"""
|
||||
# Throttle button refresh to prevent race conditions
|
||||
import time
|
||||
current_time = time.time()
|
||||
if current_time - self._last_refresh_time < self._refresh_throttle_seconds:
|
||||
return # Skip refresh if called too recently
|
||||
self._last_refresh_time = current_time
|
||||
|
||||
# Get all detected frame types with their total packet counts
|
||||
frame_types = self._get_all_frame_types()
|
||||
|
||||
# If no frame types yet, skip button update
|
||||
if not frame_types:
|
||||
return
|
||||
|
||||
# Calculate flow counts for all frame types (including new ones)
|
||||
frame_type_flow_counts = {}
|
||||
for frame_type in frame_types.keys():
|
||||
flow_count = sum(1 for flow in self.analyzer.flows.values() if frame_type in flow.frame_types)
|
||||
frame_type_flow_counts[frame_type] = flow_count
|
||||
|
||||
# Sort frame types by count (highest first)
|
||||
sorted_frame_types = sorted(frame_type_flow_counts.items(), key=lambda x: x[1], reverse=True)
|
||||
|
||||
# Check if the order has actually changed to avoid unnecessary updates
|
||||
# Include predefined frame types even with 0 count to avoid unnecessary recreation
|
||||
current_order = [ft for ft, _ in sorted_frame_types[:9]
|
||||
if frame_type_flow_counts[ft] > 0 or ft in self.predefined_frame_types]
|
||||
|
||||
# Get the previous order from button tracking
|
||||
previous_order = [ft for ft in self.frame_type_buttons.keys() if ft != "Overview"]
|
||||
|
||||
# Check if we can just update counts instead of recreating buttons
|
||||
# During early loading, be more flexible about order changes for predefined types
|
||||
can_update_counts_only = False
|
||||
|
||||
if len(current_order) == len(previous_order):
|
||||
# Same number of buttons - check if they're the same set (order can be different during loading)
|
||||
current_set = set(current_order)
|
||||
previous_set = set(previous_order)
|
||||
|
||||
if current_set == previous_set:
|
||||
# Same frame types, just update counts without recreating
|
||||
can_update_counts_only = True
|
||||
elif all(ft in self.predefined_frame_types for ft in current_set.symmetric_difference(previous_set)):
|
||||
# Only predefined types differ - still safe to just update counts during loading
|
||||
can_update_counts_only = True
|
||||
|
||||
if can_update_counts_only:
|
||||
# Just update counts in existing buttons
|
||||
self._update_button_counts(frame_type_flow_counts)
|
||||
return
|
||||
|
||||
# Order changed, need to recreate buttons
|
||||
try:
|
||||
filter_bar = self.query_one("#filter-bar", Horizontal)
|
||||
except Exception:
|
||||
# Filter bar not available yet
|
||||
return
|
||||
|
||||
# Remove all buttons except Overview - use a safer approach
|
||||
overview_btn = None
|
||||
buttons_to_remove = []
|
||||
|
||||
for widget in list(filter_bar.children):
|
||||
if widget.id == "btn-overview":
|
||||
overview_btn = widget
|
||||
else:
|
||||
buttons_to_remove.append(widget)
|
||||
|
||||
# Remove non-overview buttons
|
||||
for widget in buttons_to_remove:
|
||||
try:
|
||||
if widget.parent: # Only remove if still has parent
|
||||
widget.remove()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Clear frame type buttons dict and keep overview
|
||||
self.frame_type_buttons.clear()
|
||||
if overview_btn:
|
||||
self.frame_type_buttons["Overview"] = overview_btn
|
||||
|
||||
# Add new buttons in sorted order
|
||||
hotkeys = ['2', '3', '4', '5', '6', '7', '8', '9', '0']
|
||||
for i, (frame_type, flow_count) in enumerate(sorted_frame_types[:9]):
|
||||
# Always show predefined frame types, even with 0 count during early loading
|
||||
# Only skip if count is 0 AND it's not a predefined frame type
|
||||
should_show = (flow_count > 0) or (frame_type in self.predefined_frame_types)
|
||||
|
||||
if i < len(hotkeys) and should_show:
|
||||
btn = FrameTypeButton(frame_type, hotkeys[i], flow_count)
|
||||
self.frame_type_buttons[frame_type] = btn
|
||||
try:
|
||||
filter_bar.mount(btn)
|
||||
except Exception:
|
||||
# If mount fails, skip this button
|
||||
pass
|
||||
|
||||
# Update button highlighting
|
||||
self._update_button_highlighting()
|
||||
|
||||
# Track frame types for change detection
|
||||
current_frame_types = set(frame_types.keys())
|
||||
if current_frame_types != self._last_frame_types:
|
||||
self._last_frame_types = current_frame_types
|
||||
|
||||
# CRITICAL: Rebuild table columns when frame types change (for Overview mode)
|
||||
if self.selected_frame_type == "Overview":
|
||||
self._setup_flow_table()
|
||||
# Clear existing data before adding new data with new column structure
|
||||
self.flow_table.clear()
|
||||
|
||||
def _update_button_counts(self, frame_type_flow_counts: dict):
|
||||
"""Update button counts without recreating buttons"""
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if frame_type == "Overview":
|
||||
continue
|
||||
|
||||
if frame_type in frame_type_flow_counts:
|
||||
flow_count = frame_type_flow_counts[frame_type]
|
||||
# Extract hotkey from current label
|
||||
try:
|
||||
hotkey = btn.label.split('.')[0]
|
||||
short_name = btn._shorten_frame_type(frame_type)
|
||||
btn.label = f"{hotkey}.{short_name}({flow_count})"
|
||||
btn.count = flow_count
|
||||
except Exception:
|
||||
# If label update fails, ignore
|
||||
pass
|
||||
|
||||
|
||||
def refresh_flow_data(self):
|
||||
"""Refresh the flow table based on selected filter"""
|
||||
self.flow_table.clear()
|
||||
|
||||
if self.selected_frame_type == "Overview":
|
||||
self._show_overview()
|
||||
else:
|
||||
self._show_frame_type_flows(self.selected_frame_type)
|
||||
|
||||
def _show_overview(self):
|
||||
"""Show all flows in overview mode with frame type columns"""
|
||||
flows = list(self.analyzer.flows.values())
|
||||
all_frame_types = self._get_all_frame_types()
|
||||
sorted_frame_types = sorted(all_frame_types.keys(), key=lambda x: all_frame_types[x], reverse=True)
|
||||
|
||||
# Get current table columns to check what frame types are expected
|
||||
try:
|
||||
table_columns = [col.key for col in self.flow_table._columns]
|
||||
except (AttributeError, TypeError):
|
||||
# If columns aren't accessible, fall back to using current frame types
|
||||
table_columns = []
|
||||
|
||||
expected_frame_types = []
|
||||
for col_key in table_columns:
|
||||
if col_key.startswith("ft_"):
|
||||
# Extract frame type from column key
|
||||
expected_frame_types.append(col_key[3:]) # Remove "ft_" prefix
|
||||
|
||||
# If no frame type columns detected, use sorted frame types directly
|
||||
if not expected_frame_types:
|
||||
expected_frame_types = [frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_') for frame_type in sorted_frame_types]
|
||||
|
||||
# Collect all row data first
|
||||
all_rows = []
|
||||
|
||||
for i, flow in enumerate(flows):
|
||||
# Status based on enhanced analysis
|
||||
status = "Enhanced" if flow.enhanced_analysis.decoder_type != "Standard" else "Normal"
|
||||
status_style = "green" if status == "Enhanced" else "white"
|
||||
|
||||
# Start with basic flow info
|
||||
row_data = [
|
||||
str(i + 1),
|
||||
f"{flow.src_ip}:{flow.src_port}",
|
||||
f"{flow.dst_ip}:{flow.dst_port}",
|
||||
flow.transport_protocol,
|
||||
str(flow.frame_count)
|
||||
]
|
||||
|
||||
# Add packet count for each frame type column in the order they appear in table
|
||||
for expected_ft_key in expected_frame_types:
|
||||
# Find the actual frame type that matches this column key
|
||||
matching_frame_type = None
|
||||
for frame_type in sorted_frame_types:
|
||||
safe_key = frame_type.replace('-', '_').replace(':', '_').replace('(', '_').replace(')', '_').replace(' ', '_').replace('.', '_')
|
||||
if safe_key == expected_ft_key:
|
||||
matching_frame_type = frame_type
|
||||
break
|
||||
|
||||
if matching_frame_type and matching_frame_type in flow.frame_types:
|
||||
count = flow.frame_types[matching_frame_type].count
|
||||
if count > 0:
|
||||
colored_count = self._color_code_packet_count(count, all_frame_types[matching_frame_type])
|
||||
row_data.append(colored_count)
|
||||
else:
|
||||
row_data.append("-")
|
||||
else:
|
||||
row_data.append("-")
|
||||
|
||||
# Add status
|
||||
row_data.append(Text(status, style=status_style))
|
||||
|
||||
# Store row data with original flow index for key
|
||||
all_rows.append((row_data, i))
|
||||
|
||||
# Sort rows if sorting is enabled
|
||||
if self.sort_column is not None and all_rows:
|
||||
all_rows.sort(key=lambda x: self._get_sort_key(x[0], self.sort_column), reverse=self.sort_reverse)
|
||||
|
||||
# Add sorted rows to table
|
||||
for row_data, original_index in all_rows:
|
||||
# CRITICAL: Validate row data matches column count before adding
|
||||
try:
|
||||
# Get column count for validation
|
||||
column_count = len(self.flow_table.ordered_columns) if hasattr(self.flow_table, 'ordered_columns') else 0
|
||||
if column_count > 0 and len(row_data) != column_count:
|
||||
# Skip this row if data doesn't match columns - table structure is being updated
|
||||
continue
|
||||
|
||||
self.flow_table.add_row(*row_data, key=f"flow-{original_index}")
|
||||
except (ValueError, AttributeError) as e:
|
||||
# Skip this row if there's a column mismatch - table is being rebuilt
|
||||
continue
|
||||
|
||||
def _show_frame_type_flows(self, frame_type: str):
|
||||
"""Show flows filtered by frame type with timing statistics"""
|
||||
flows_with_type = []
|
||||
|
||||
for i, flow in enumerate(self.analyzer.flows.values()):
|
||||
if frame_type in flow.frame_types:
|
||||
flows_with_type.append((i, flow, flow.frame_types[frame_type]))
|
||||
|
||||
# Collect all row data first
|
||||
all_rows = []
|
||||
|
||||
for flow_idx, flow, ft_stats in flows_with_type:
|
||||
# Calculate timing statistics
|
||||
if ft_stats.inter_arrival_times:
|
||||
min_delta = min(ft_stats.inter_arrival_times) * 1000
|
||||
max_delta = max(ft_stats.inter_arrival_times) * 1000
|
||||
else:
|
||||
min_delta = max_delta = 0
|
||||
|
||||
# Quality score
|
||||
quality = self._calculate_quality(ft_stats)
|
||||
quality_text = self._format_quality(quality)
|
||||
|
||||
row_data = [
|
||||
str(flow_idx + 1),
|
||||
f"{flow.src_ip}:{flow.src_port}",
|
||||
f"{flow.dst_ip}:{flow.dst_port}",
|
||||
flow.transport_protocol,
|
||||
str(ft_stats.count),
|
||||
f"{ft_stats.avg_inter_arrival * 1000:.1f}ms" if ft_stats.avg_inter_arrival > 0 else "N/A",
|
||||
f"{ft_stats.std_inter_arrival * 1000:.1f}ms" if ft_stats.std_inter_arrival > 0 else "N/A",
|
||||
f"{min_delta:.1f}ms" if min_delta > 0 else "N/A",
|
||||
f"{max_delta:.1f}ms" if max_delta > 0 else "N/A",
|
||||
str(len(ft_stats.outlier_frames)),
|
||||
quality_text
|
||||
]
|
||||
|
||||
# Store row data with original flow index for key
|
||||
all_rows.append((row_data, flow_idx))
|
||||
|
||||
# Sort rows if sorting is enabled
|
||||
if self.sort_column is not None and all_rows:
|
||||
all_rows.sort(key=lambda x: self._get_sort_key(x[0], self.sort_column), reverse=self.sort_reverse)
|
||||
|
||||
# Add sorted rows to table
|
||||
for row_data, original_index in all_rows:
|
||||
# CRITICAL: Validate row data matches column count before adding
|
||||
try:
|
||||
# Get column count for validation
|
||||
column_count = len(self.flow_table.ordered_columns) if hasattr(self.flow_table, 'ordered_columns') else 0
|
||||
if column_count > 0 and len(row_data) != column_count:
|
||||
# Skip this row if data doesn't match columns - table structure is being updated
|
||||
continue
|
||||
|
||||
self.flow_table.add_row(*row_data, key=f"flow-{original_index}")
|
||||
except (ValueError, AttributeError) as e:
|
||||
# Skip this row if there's a column mismatch - table is being rebuilt
|
||||
continue
|
||||
|
||||
def on_button_pressed(self, event: Button.Pressed) -> None:
|
||||
"""Handle filter button clicks"""
|
||||
button = event.button
|
||||
|
||||
# Determine frame type from button
|
||||
if button.id == "btn-overview":
|
||||
self.select_frame_type("Overview")
|
||||
else:
|
||||
# Extract frame type from button
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if btn == button:
|
||||
self.select_frame_type(frame_type)
|
||||
break
|
||||
|
||||
def select_frame_type(self, frame_type: str):
|
||||
"""Select a frame type filter"""
|
||||
if self.selected_frame_type != frame_type:
|
||||
self.selected_frame_type = frame_type
|
||||
self._setup_flow_table()
|
||||
self.refresh_flow_data()
|
||||
self.post_message(self.FrameTypeSelected(frame_type))
|
||||
|
||||
# Update button highlighting
|
||||
self._update_button_highlighting()
|
||||
|
||||
def _update_button_highlighting(self):
|
||||
"""Update which button appears active/highlighted"""
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if frame_type == self.selected_frame_type:
|
||||
btn.add_class("-active")
|
||||
else:
|
||||
btn.remove_class("-active")
|
||||
|
||||
def action_select_filter(self, number: str):
|
||||
"""Handle number key press for filter selection"""
|
||||
if number == '1':
|
||||
# Overview
|
||||
self.select_frame_type("Overview")
|
||||
else:
|
||||
# Frame type buttons - find by hotkey
|
||||
hotkeys = ['2', '3', '4', '5', '6', '7', '8', '9', '0']
|
||||
if number in hotkeys:
|
||||
# Find the button with this hotkey
|
||||
for frame_type, btn in self.frame_type_buttons.items():
|
||||
if frame_type != "Overview" and hasattr(btn, 'frame_type'):
|
||||
# Check if this button's label starts with this number
|
||||
if btn.label.plain.startswith(f"{number}."):
|
||||
self.select_frame_type(frame_type)
|
||||
break
|
||||
|
||||
def action_sort_column(self, column_index: int):
|
||||
"""Sort table by specified column index (0-based)"""
|
||||
# Check if we have enough columns
|
||||
if not self.flow_table or not hasattr(self.flow_table, 'ordered_columns'):
|
||||
return
|
||||
|
||||
if column_index >= len(self.flow_table.ordered_columns):
|
||||
return # Column doesn't exist
|
||||
|
||||
# Toggle sort direction if same column, otherwise start with ascending
|
||||
if self.sort_column == column_index:
|
||||
self.sort_reverse = not self.sort_reverse
|
||||
else:
|
||||
self.sort_column = column_index
|
||||
self.sort_reverse = False
|
||||
|
||||
# Refresh data with new sorting
|
||||
self.refresh_flow_data()
|
||||
|
||||
def _get_sort_key(self, row_data: list, column_index: int):
|
||||
"""Get sort key for a row based on column index"""
|
||||
if column_index >= len(row_data):
|
||||
return ""
|
||||
|
||||
value = row_data[column_index]
|
||||
|
||||
# Handle Text objects (extract plain text)
|
||||
if hasattr(value, 'plain'):
|
||||
text_value = value.plain
|
||||
else:
|
||||
text_value = str(value)
|
||||
|
||||
# Try to convert to number for numeric sorting
|
||||
try:
|
||||
# Handle values like "1,105" (remove commas)
|
||||
if ',' in text_value:
|
||||
text_value = text_value.replace(',', '')
|
||||
|
||||
# Handle values with units like "102.2ms" or "1.5MB"
|
||||
if text_value.endswith('ms'):
|
||||
return float(text_value[:-2])
|
||||
elif text_value.endswith('MB'):
|
||||
return float(text_value[:-2]) * 1000000
|
||||
elif text_value.endswith('KB'):
|
||||
return float(text_value[:-2]) * 1000
|
||||
elif text_value.endswith('B'):
|
||||
return float(text_value[:-1])
|
||||
elif text_value.endswith('%'):
|
||||
return float(text_value[:-1])
|
||||
elif text_value == "N/A" or text_value == "-":
|
||||
return -1 # Sort N/A and "-" values to the end
|
||||
else:
|
||||
return float(text_value)
|
||||
except (ValueError, AttributeError):
|
||||
# For string values, use alphabetical sorting
|
||||
return text_value.lower()
|
||||
|
||||
def _format_bytes(self, bytes_val: int) -> str:
|
||||
"""Format bytes to human readable"""
|
||||
if bytes_val < 1024:
|
||||
return f"{bytes_val}B"
|
||||
elif bytes_val < 1024 * 1024:
|
||||
return f"{bytes_val / 1024:.1f}KB"
|
||||
else:
|
||||
return f"{bytes_val / (1024 * 1024):.1f}MB"
|
||||
|
||||
def _calculate_quality(self, ft_stats) -> float:
|
||||
"""Calculate quality score for frame type stats"""
|
||||
if ft_stats.count == 0:
|
||||
return 0.0
|
||||
|
||||
outlier_rate = len(ft_stats.outlier_frames) / ft_stats.count
|
||||
consistency = 1.0 - min(outlier_rate * 2, 1.0)
|
||||
return consistency * 100
|
||||
|
||||
def _format_quality(self, quality: float) -> Text:
|
||||
"""Format quality with color"""
|
||||
if quality >= 90:
|
||||
return Text(f"{quality:.0f}%", style="green")
|
||||
elif quality >= 70:
|
||||
return Text(f"{quality:.0f}%", style="yellow")
|
||||
else:
|
||||
return Text(f"{quality:.0f}%", style="red")
|
||||
|
||||
def _get_all_frame_types(self) -> dict:
|
||||
"""Get all frame types across all flows with their total counts"""
|
||||
frame_types = {}
|
||||
for flow in self.analyzer.flows.values():
|
||||
for frame_type, stats in flow.frame_types.items():
|
||||
if frame_type not in frame_types:
|
||||
frame_types[frame_type] = 0
|
||||
frame_types[frame_type] += stats.count
|
||||
return frame_types
|
||||
|
||||
def _shorten_frame_type_name(self, frame_type: str) -> str:
|
||||
"""Shorten frame type names for better column display"""
|
||||
# Common abbreviations for better column display
|
||||
abbreviations = {
|
||||
'CH10-Data': 'CH10',
|
||||
'CH10-Multi-Source': 'Multi',
|
||||
'CH10-Extended': 'Ext',
|
||||
'CH10-ACTTS': 'ACTTS',
|
||||
'PTP-Signaling': 'PTP-Sig',
|
||||
'PTP-FollowUp': 'PTP-FU',
|
||||
'PTP-Sync': 'PTP-Syn',
|
||||
'PTP-Unknown (0x6)': 'PTP-Unk',
|
||||
'UDP': 'UDP',
|
||||
'TMATS': 'TMATS',
|
||||
'TCP': 'TCP'
|
||||
}
|
||||
return abbreviations.get(frame_type, frame_type[:8])
|
||||
|
||||
def _color_code_packet_count(self, count: int, max_count: int) -> Text:
|
||||
"""Color code packet counts based on relative frequency"""
|
||||
if max_count == 0:
|
||||
return Text(str(count), style="white")
|
||||
|
||||
# Calculate percentage of maximum for this frame type
|
||||
percentage = (count / max_count) * 100
|
||||
|
||||
if percentage >= 80: # High volume (80-100% of max)
|
||||
return Text(str(count), style="red bold")
|
||||
elif percentage >= 50: # Medium-high volume (50-79% of max)
|
||||
return Text(str(count), style="yellow bold")
|
||||
elif percentage >= 20: # Medium volume (20-49% of max)
|
||||
return Text(str(count), style="cyan")
|
||||
elif percentage >= 5: # Low volume (5-19% of max)
|
||||
return Text(str(count), style="blue")
|
||||
else: # Very low volume (0-4% of max)
|
||||
return Text(str(count), style="dim white")
|
||||
@@ -43,7 +43,7 @@ class EnhancedFlowTable(Vertical):
|
||||
|
||||
selected_flow_index = reactive(0)
|
||||
sort_key = reactive("flows")
|
||||
simplified_view = reactive(False) # Toggle between detailed and simplified view
|
||||
simplified_view = reactive(True) # Default to simplified view without subflows
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
@@ -96,11 +96,12 @@ class EnhancedFlowTable(Vertical):
|
||||
table.add_column("Destination", width=18, key="dest")
|
||||
table.add_column("Extended", width=8, key="extended")
|
||||
table.add_column("Frame Type", width=10, key="frame_type")
|
||||
table.add_column("Pkts", width=6, key="rate")
|
||||
table.add_column("Pkts", width=6, key="packets")
|
||||
table.add_column("Size", width=8, key="volume")
|
||||
table.add_column("ΔT(ms)", width=8, key="delta_t")
|
||||
table.add_column("σ(ms)", width=8, key="sigma")
|
||||
table.add_column("Out", width=5, key="outliers")
|
||||
table.add_column("Rate", width=6, key="rate")
|
||||
|
||||
def refresh_data(self):
|
||||
"""Refresh flow table with current view mode"""
|
||||
@@ -228,45 +229,30 @@ class EnhancedFlowTable(Vertical):
|
||||
frame_summary = self._get_frame_summary(flow)
|
||||
frame_text = Text(frame_summary, style="blue")
|
||||
|
||||
# Rate with sparkline
|
||||
# Packet count (separate from rate)
|
||||
packets_text = Text(str(flow.frame_count), justify="right")
|
||||
|
||||
# Rate sparkline (separate column)
|
||||
rate_spark = self._create_rate_sparkline(metrics['rate_history'])
|
||||
rate_text = Text(f"{metrics['rate_history'][-1]:.0f} {rate_spark}")
|
||||
rate_text = Text(rate_spark, justify="center")
|
||||
|
||||
# Size with actual value
|
||||
size_value = self._format_bytes(flow.total_bytes)
|
||||
size_text = Text(f"{size_value:>8}")
|
||||
|
||||
# Delta T (average time between packets in ms)
|
||||
if flow.avg_inter_arrival > 0:
|
||||
delta_t_ms = flow.avg_inter_arrival * 1000
|
||||
if delta_t_ms >= 1000:
|
||||
delta_t_str = f"{delta_t_ms/1000:.1f}s"
|
||||
else:
|
||||
delta_t_str = f"{delta_t_ms:.1f}"
|
||||
else:
|
||||
delta_t_str = "N/A"
|
||||
delta_t_text = Text(delta_t_str, justify="right")
|
||||
# Delta T and Sigma - empty for main flows (subflows show the detail)
|
||||
delta_t_text = Text("", justify="right")
|
||||
sigma_text = Text("", justify="right")
|
||||
|
||||
# Sigma (standard deviation in ms)
|
||||
if flow.std_inter_arrival > 0:
|
||||
sigma_ms = flow.std_inter_arrival * 1000
|
||||
if sigma_ms >= 1000:
|
||||
sigma_str = f"{sigma_ms/1000:.1f}s"
|
||||
else:
|
||||
sigma_str = f"{sigma_ms:.1f}"
|
||||
else:
|
||||
sigma_str = "N/A"
|
||||
sigma_text = Text(sigma_str, justify="right")
|
||||
|
||||
# Outlier count (packets outside tolerance)
|
||||
outlier_count = len(flow.outlier_frames)
|
||||
outlier_text = Text(str(outlier_count), justify="right",
|
||||
style="red" if outlier_count > 0 else "green")
|
||||
# Outlier count - sum of frame-type-specific outliers (not flow-level)
|
||||
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
outlier_text = Text(str(frame_type_outlier_count), justify="right",
|
||||
style="red" if frame_type_outlier_count > 0 else "green")
|
||||
|
||||
return [
|
||||
num_text, source_text, proto_text, dest_text,
|
||||
extended_text, frame_text, rate_text, size_text,
|
||||
delta_t_text, sigma_text, outlier_text
|
||||
extended_text, frame_text, packets_text, size_text,
|
||||
delta_t_text, sigma_text, outlier_text, rate_text
|
||||
]
|
||||
|
||||
def _create_simplified_row(self, num: int, flow: 'FlowStats') -> List[Text]:
|
||||
@@ -389,20 +375,24 @@ class EnhancedFlowTable(Vertical):
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return int(flow.enhanced_analysis.avg_frame_quality)
|
||||
else:
|
||||
# Base quality on outlier percentage
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
# Base quality on frame-type-specific outlier percentage
|
||||
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
outlier_pct = frame_type_outlier_count / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
return max(0, int(100 - outlier_pct * 10))
|
||||
|
||||
def _get_flow_status(self, flow: 'FlowStats') -> str:
|
||||
"""Determine flow status"""
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return "Enhanced"
|
||||
elif len(flow.outlier_frames) > flow.frame_count * 0.1:
|
||||
return "Alert"
|
||||
elif len(flow.outlier_frames) > 0:
|
||||
return "Warning"
|
||||
else:
|
||||
return "Normal"
|
||||
# Use frame-type-specific outliers for status
|
||||
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
||||
if frame_type_outlier_count > flow.frame_count * 0.1:
|
||||
return "Alert"
|
||||
elif frame_type_outlier_count > 0:
|
||||
return "Warning"
|
||||
else:
|
||||
return "Normal"
|
||||
|
||||
def _get_flow_style(self, flow: 'FlowStats') -> Optional[str]:
|
||||
"""Get styling for flow row"""
|
||||
@@ -465,12 +455,18 @@ class EnhancedFlowTable(Vertical):
|
||||
return combinations
|
||||
|
||||
def _create_protocol_subrows(self, flow: 'FlowStats') -> List[List[Text]]:
|
||||
"""Create sub-rows for enhanced protocol/frame type breakdown only"""
|
||||
"""Create sub-rows for protocol/frame type breakdown - matches details panel logic"""
|
||||
subrows = []
|
||||
enhanced_frame_types = self._get_enhanced_frame_types(flow)
|
||||
combinations = self._get_enhanced_protocol_frame_combinations(flow, enhanced_frame_types)
|
||||
|
||||
for extended_proto, frame_type, count, percentage in combinations: # Show all enhanced subrows
|
||||
# For enhanced flows, show ALL frame types (same logic as details panel)
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
combinations = self._get_protocol_frame_combinations(flow)
|
||||
else:
|
||||
# For standard flows, only show enhanced frame types
|
||||
enhanced_frame_types = self._get_enhanced_frame_types(flow)
|
||||
combinations = self._get_enhanced_protocol_frame_combinations(flow, enhanced_frame_types)
|
||||
|
||||
for extended_proto, frame_type, count, percentage in combinations:
|
||||
# Calculate timing for this frame type if available
|
||||
frame_delta_t = ""
|
||||
frame_sigma = ""
|
||||
@@ -478,12 +474,30 @@ class EnhancedFlowTable(Vertical):
|
||||
|
||||
if frame_type in flow.frame_types:
|
||||
ft_stats = flow.frame_types[frame_type]
|
||||
|
||||
# Always calculate timing if we have data, even if very small values
|
||||
if ft_stats.avg_inter_arrival > 0:
|
||||
dt_ms = ft_stats.avg_inter_arrival * 1000
|
||||
frame_delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
||||
elif len(ft_stats.inter_arrival_times) >= 2:
|
||||
# If avg is 0 but we have data, recalculate on the fly
|
||||
import statistics
|
||||
avg_arrival = statistics.mean(ft_stats.inter_arrival_times)
|
||||
if avg_arrival > 0:
|
||||
dt_ms = avg_arrival * 1000
|
||||
frame_delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
||||
|
||||
if ft_stats.std_inter_arrival > 0:
|
||||
sig_ms = ft_stats.std_inter_arrival * 1000
|
||||
frame_sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
||||
elif len(ft_stats.inter_arrival_times) >= 2:
|
||||
# If std is 0 but we have data, recalculate on the fly
|
||||
import statistics
|
||||
std_arrival = statistics.stdev(ft_stats.inter_arrival_times)
|
||||
if std_arrival > 0:
|
||||
sig_ms = std_arrival * 1000
|
||||
frame_sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
||||
|
||||
frame_outliers = str(len(ft_stats.outlier_frames))
|
||||
|
||||
subrow = [
|
||||
@@ -497,7 +511,8 @@ class EnhancedFlowTable(Vertical):
|
||||
Text(f"{self._format_bytes(count * (flow.total_bytes // flow.frame_count) if flow.frame_count > 0 else 0):>8}", style="dim"),
|
||||
Text(frame_delta_t, style="dim", justify="right"),
|
||||
Text(frame_sigma, style="dim", justify="right"),
|
||||
Text(frame_outliers, style="dim red" if frame_outliers and int(frame_outliers) > 0 else "dim", justify="right")
|
||||
Text(frame_outliers, style="dim red" if frame_outliers and int(frame_outliers) > 0 else "dim", justify="right"),
|
||||
Text("", style="dim") # Empty rate column for subrows
|
||||
]
|
||||
subrows.append(subrow)
|
||||
|
||||
|
||||
@@ -267,20 +267,46 @@ class SubFlowDetailsPanel(Vertical):
|
||||
sections.append(Text("Sub-Flow Timing", style="bold cyan"))
|
||||
sections.append(timing_table)
|
||||
|
||||
# Outlier details if any
|
||||
if subflow.outlier_frames and subflow.outlier_details:
|
||||
# Enhanced outlier details if any
|
||||
if subflow.outlier_frames:
|
||||
outlier_table = Table(show_header=True, box=None)
|
||||
outlier_table.add_column("Frame#", justify="right")
|
||||
outlier_table.add_column("Prev Frame#", justify="right")
|
||||
outlier_table.add_column("ΔT(ms)", justify="right")
|
||||
outlier_table.add_column("σ Dev", justify="right")
|
||||
|
||||
for frame_num, delta_t in subflow.outlier_details[:5]: # Show first 5 outliers
|
||||
# Use enhanced details if available, fallback to legacy details
|
||||
outlier_data = []
|
||||
if hasattr(subflow, 'enhanced_outlier_details') and subflow.enhanced_outlier_details:
|
||||
for frame_num, prev_frame_num, delta_t in subflow.enhanced_outlier_details[:5]:
|
||||
# Calculate sigma deviation
|
||||
sigma_dev = "N/A"
|
||||
if subflow.std_inter_arrival > 0 and subflow.avg_inter_arrival > 0:
|
||||
deviation = (delta_t - subflow.avg_inter_arrival) / subflow.std_inter_arrival
|
||||
sigma_dev = f"{deviation:.1f}σ"
|
||||
|
||||
outlier_data.append((frame_num, prev_frame_num, delta_t, sigma_dev))
|
||||
elif subflow.outlier_details:
|
||||
for frame_num, delta_t in subflow.outlier_details[:5]:
|
||||
# Calculate sigma deviation
|
||||
sigma_dev = "N/A"
|
||||
if subflow.std_inter_arrival > 0 and subflow.avg_inter_arrival > 0:
|
||||
deviation = (delta_t - subflow.avg_inter_arrival) / subflow.std_inter_arrival
|
||||
sigma_dev = f"{deviation:.1f}σ"
|
||||
|
||||
outlier_data.append((frame_num, "N/A", delta_t, sigma_dev))
|
||||
|
||||
for frame_num, prev_frame_num, delta_t, sigma_dev in outlier_data:
|
||||
outlier_table.add_row(
|
||||
str(frame_num),
|
||||
f"{delta_t * 1000:.1f}"
|
||||
str(prev_frame_num) if prev_frame_num != "N/A" else "N/A",
|
||||
f"{delta_t * 1000:.1f}",
|
||||
sigma_dev
|
||||
)
|
||||
|
||||
if len(subflow.outlier_details) > 5:
|
||||
outlier_table.add_row("...", f"+{len(subflow.outlier_details) - 5} more")
|
||||
total_outliers = len(subflow.enhanced_outlier_details) if hasattr(subflow, 'enhanced_outlier_details') else len(subflow.outlier_details)
|
||||
if total_outliers > 5:
|
||||
outlier_table.add_row("...", "...", f"+{total_outliers - 5}", "more")
|
||||
|
||||
sections.append(Text("Outlier Details", style="bold red"))
|
||||
sections.append(outlier_table)
|
||||
@@ -320,16 +346,40 @@ class SubFlowDetailsPanel(Vertical):
|
||||
reverse=True
|
||||
):
|
||||
percentage = (stats.count / total * 100) if total > 0 else 0
|
||||
delta_t = f"{stats.avg_inter_arrival * 1000:.1f}" if stats.avg_inter_arrival > 0 else "N/A"
|
||||
sigma = f"{stats.std_inter_arrival * 1000:.1f}" if stats.std_inter_arrival > 0 else "N/A"
|
||||
|
||||
# Use same logic as grid rows for consistency
|
||||
delta_t = ""
|
||||
if stats.avg_inter_arrival > 0:
|
||||
dt_ms = stats.avg_inter_arrival * 1000
|
||||
delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
||||
elif len(stats.inter_arrival_times) >= 2:
|
||||
# Fallback calculation if stored avg is zero
|
||||
import statistics
|
||||
avg_arrival = statistics.mean(stats.inter_arrival_times)
|
||||
if avg_arrival > 0:
|
||||
dt_ms = avg_arrival * 1000
|
||||
delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
||||
|
||||
sigma = ""
|
||||
if stats.std_inter_arrival > 0:
|
||||
sig_ms = stats.std_inter_arrival * 1000
|
||||
sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
||||
elif len(stats.inter_arrival_times) >= 2:
|
||||
# Fallback calculation if stored std is zero
|
||||
import statistics
|
||||
std_arrival = statistics.stdev(stats.inter_arrival_times)
|
||||
if std_arrival > 0:
|
||||
sig_ms = std_arrival * 1000
|
||||
sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
||||
|
||||
outliers = str(len(stats.outlier_frames))
|
||||
|
||||
frame_table.add_row(
|
||||
frame_type[:15],
|
||||
frame_type, # Show full frame type name
|
||||
f"{stats.count:,}",
|
||||
f"{percentage:.1f}%",
|
||||
delta_t,
|
||||
sigma,
|
||||
delta_t if delta_t else "N/A",
|
||||
sigma if sigma else "N/A",
|
||||
outliers
|
||||
)
|
||||
|
||||
|
||||
279
analyzer/tui/textual/widgets/tabbed_flow_view.py
Normal file
279
analyzer/tui/textual/widgets/tabbed_flow_view.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
Tabbed Flow View Widget - Shows Overview + Frame Type specific tabs
|
||||
"""
|
||||
|
||||
from textual.widgets import TabbedContent, TabPane, DataTable, Static
|
||||
from textual.containers import Vertical, Horizontal
|
||||
from textual.reactive import reactive
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Set
|
||||
from rich.text import Text
|
||||
from rich.table import Table
|
||||
from rich.panel import Panel
|
||||
from .flow_table_v2 import EnhancedFlowTable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class FrameTypeFlowTable(DataTable):
|
||||
"""Flow table filtered for a specific frame type"""
|
||||
|
||||
def __init__(self, frame_type: str, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.frame_type = frame_type
|
||||
self.cursor_type = "row"
|
||||
self.zebra_stripes = True
|
||||
self.show_header = True
|
||||
self.show_row_labels = False
|
||||
|
||||
def setup_columns(self):
|
||||
"""Setup columns for frame-type specific view"""
|
||||
self.add_column("Flow", width=4, key="flow_id")
|
||||
self.add_column("Source IP", width=16, key="src_ip")
|
||||
self.add_column("Src Port", width=8, key="src_port")
|
||||
self.add_column("Dest IP", width=16, key="dst_ip")
|
||||
self.add_column("Dst Port", width=8, key="dst_port")
|
||||
self.add_column("Protocol", width=8, key="protocol")
|
||||
self.add_column("Packets", width=8, key="packets")
|
||||
self.add_column("Avg ΔT", width=10, key="avg_delta")
|
||||
self.add_column("Std ΔT", width=10, key="std_delta")
|
||||
self.add_column("Outliers", width=8, key="outliers")
|
||||
self.add_column("Quality", width=8, key="quality")
|
||||
|
||||
|
||||
class FrameTypeStatsPanel(Static):
|
||||
"""Statistics panel for a specific frame type"""
|
||||
|
||||
def __init__(self, frame_type: str, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.frame_type = frame_type
|
||||
self._stats_content = f"Statistics for {self.frame_type}\n\nNo data available yet."
|
||||
|
||||
def render(self):
|
||||
"""Render frame type statistics"""
|
||||
return Panel(
|
||||
self._stats_content,
|
||||
title=f"📊 {self.frame_type} Statistics",
|
||||
border_style="blue"
|
||||
)
|
||||
|
||||
def update_content(self, content: str):
|
||||
"""Update the statistics content"""
|
||||
self._stats_content = content
|
||||
self.refresh()
|
||||
|
||||
|
||||
class FrameTypeTabContent(Vertical):
|
||||
"""Content for a specific frame type tab"""
|
||||
|
||||
def __init__(self, frame_type: str, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.frame_type = frame_type
|
||||
self.analyzer = analyzer
|
||||
|
||||
def compose(self):
|
||||
"""Compose the frame type tab content"""
|
||||
with Horizontal():
|
||||
# Left side - Flow table for this frame type (sanitize ID)
|
||||
table_id = f"table-{self.frame_type.replace('-', '_').replace(':', '_')}"
|
||||
yield FrameTypeFlowTable(self.frame_type, id=table_id)
|
||||
|
||||
# Right side - Frame type statistics (sanitize ID)
|
||||
stats_id = f"stats-{self.frame_type.replace('-', '_').replace(':', '_')}"
|
||||
yield FrameTypeStatsPanel(self.frame_type, id=stats_id)
|
||||
|
||||
def on_mount(self):
|
||||
"""Initialize the frame type tab"""
|
||||
table_id = f"#table-{self.frame_type.replace('-', '_').replace(':', '_')}"
|
||||
table = self.query_one(table_id, FrameTypeFlowTable)
|
||||
table.setup_columns()
|
||||
self.refresh_data()
|
||||
|
||||
def refresh_data(self):
|
||||
"""Refresh data for this frame type"""
|
||||
try:
|
||||
table_id = f"#table-{self.frame_type.replace('-', '_').replace(':', '_')}"
|
||||
table = self.query_one(table_id, FrameTypeFlowTable)
|
||||
|
||||
# Clear existing data
|
||||
table.clear()
|
||||
|
||||
# Get flows that have this frame type
|
||||
flows_with_frametype = []
|
||||
flow_list = list(self.analyzer.flows.values())
|
||||
|
||||
for i, flow in enumerate(flow_list):
|
||||
if self.frame_type in flow.frame_types:
|
||||
ft_stats = flow.frame_types[self.frame_type]
|
||||
flows_with_frametype.append((i, flow, ft_stats))
|
||||
|
||||
# Add rows for flows with this frame type
|
||||
for flow_idx, flow, ft_stats in flows_with_frametype:
|
||||
# Calculate quality score
|
||||
quality_score = self._calculate_quality_score(ft_stats)
|
||||
quality_text = self._format_quality(quality_score)
|
||||
|
||||
# Format timing statistics
|
||||
avg_delta = f"{ft_stats.avg_inter_arrival * 1000:.1f}ms" if ft_stats.avg_inter_arrival > 0 else "N/A"
|
||||
std_delta = f"{ft_stats.std_inter_arrival * 1000:.1f}ms" if ft_stats.std_inter_arrival > 0 else "N/A"
|
||||
|
||||
row_data = [
|
||||
str(flow_idx + 1), # Flow ID
|
||||
flow.src_ip,
|
||||
str(flow.src_port),
|
||||
flow.dst_ip,
|
||||
str(flow.dst_port),
|
||||
flow.transport_protocol,
|
||||
str(ft_stats.count),
|
||||
avg_delta,
|
||||
std_delta,
|
||||
str(len(ft_stats.outlier_frames)),
|
||||
quality_text
|
||||
]
|
||||
|
||||
table.add_row(*row_data, key=f"flow-{flow_idx}")
|
||||
|
||||
# Update statistics panel
|
||||
self._update_stats_panel(flows_with_frametype)
|
||||
|
||||
except Exception as e:
|
||||
# Handle case where widgets aren't ready yet
|
||||
pass
|
||||
|
||||
def _calculate_quality_score(self, ft_stats) -> float:
|
||||
"""Calculate quality score for frame type stats"""
|
||||
if ft_stats.count == 0:
|
||||
return 0.0
|
||||
|
||||
# Base score on outlier rate and timing consistency
|
||||
outlier_rate = len(ft_stats.outlier_frames) / ft_stats.count
|
||||
consistency = 1.0 - min(outlier_rate * 2, 1.0) # Lower outlier rate = higher consistency
|
||||
|
||||
return consistency * 100
|
||||
|
||||
def _format_quality(self, quality_score: float) -> Text:
|
||||
"""Format quality score with color coding"""
|
||||
if quality_score >= 90:
|
||||
return Text(f"{quality_score:.0f}%", style="green")
|
||||
elif quality_score >= 70:
|
||||
return Text(f"{quality_score:.0f}%", style="yellow")
|
||||
else:
|
||||
return Text(f"{quality_score:.0f}%", style="red")
|
||||
|
||||
def _update_stats_panel(self, flows_with_frametype):
|
||||
"""Update the statistics panel with current data"""
|
||||
try:
|
||||
stats_id = f"#stats-{self.frame_type.replace('-', '_').replace(':', '_')}"
|
||||
stats_panel = self.query_one(stats_id, FrameTypeStatsPanel)
|
||||
|
||||
if not flows_with_frametype:
|
||||
stats_content = f"No flows found with {self.frame_type} frames"
|
||||
else:
|
||||
# Calculate aggregate statistics
|
||||
total_flows = len(flows_with_frametype)
|
||||
total_packets = sum(ft_stats.count for _, _, ft_stats in flows_with_frametype)
|
||||
total_outliers = sum(len(ft_stats.outlier_frames) for _, _, ft_stats in flows_with_frametype)
|
||||
|
||||
# Calculate average timing
|
||||
avg_timings = [ft_stats.avg_inter_arrival for _, _, ft_stats in flows_with_frametype if ft_stats.avg_inter_arrival > 0]
|
||||
overall_avg = sum(avg_timings) / len(avg_timings) if avg_timings else 0
|
||||
|
||||
# Format statistics
|
||||
stats_content = f"""Flows: {total_flows}
|
||||
Total Packets: {total_packets:,}
|
||||
Total Outliers: {total_outliers}
|
||||
Outlier Rate: {(total_outliers/total_packets*100):.1f}%
|
||||
Avg Inter-arrival: {overall_avg*1000:.1f}ms"""
|
||||
|
||||
# Update the panel content using the new method
|
||||
stats_panel.update_content(stats_content)
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
|
||||
class TabbedFlowView(TabbedContent):
|
||||
"""Tabbed view showing Overview + Frame Type specific tabs"""
|
||||
|
||||
active_frame_types = reactive(set())
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
self.overview_table = None
|
||||
self.frame_type_tabs = {}
|
||||
|
||||
def compose(self):
|
||||
"""Create the tabbed interface"""
|
||||
# Overview tab (always present)
|
||||
with TabPane("Overview", id="tab-overview"):
|
||||
self.overview_table = EnhancedFlowTable(self.analyzer, id="overview-flow-table")
|
||||
yield self.overview_table
|
||||
|
||||
# Create tabs for common frame types (based on detection analysis)
|
||||
common_frame_types = ["CH10-Data", "CH10-ACTTS", "TMATS", "PTP-Sync", "PTP-Signaling", "UDP", "IGMP"]
|
||||
|
||||
for frame_type in common_frame_types:
|
||||
tab_id = f"tab-{frame_type.lower().replace('-', '_').replace(':', '_')}"
|
||||
content_id = f"content-{frame_type.replace('-', '_').replace(':', '_')}"
|
||||
with TabPane(frame_type, id=tab_id):
|
||||
tab_content = FrameTypeTabContent(frame_type, self.analyzer, id=content_id)
|
||||
self.frame_type_tabs[frame_type] = tab_content
|
||||
yield tab_content
|
||||
|
||||
def _create_frame_type_tabs(self):
|
||||
"""Create tabs for detected frame types"""
|
||||
frame_types = self._get_detected_frame_types()
|
||||
|
||||
for frame_type in sorted(frame_types):
|
||||
tab_id = f"tab-{frame_type.lower().replace('-', '_').replace(':', '_')}"
|
||||
with TabPane(frame_type, id=tab_id):
|
||||
tab_content = FrameTypeTabContent(frame_type, self.analyzer, id=f"content-{frame_type}")
|
||||
self.frame_type_tabs[frame_type] = tab_content
|
||||
yield tab_content
|
||||
|
||||
def _get_detected_frame_types(self) -> Set[str]:
|
||||
"""Get all detected frame types from current flows"""
|
||||
frame_types = set()
|
||||
|
||||
for flow in self.analyzer.flows.values():
|
||||
frame_types.update(flow.frame_types.keys())
|
||||
|
||||
return frame_types
|
||||
|
||||
def on_mount(self):
|
||||
"""Initialize tabs"""
|
||||
self.refresh_all_tabs()
|
||||
|
||||
def refresh_all_tabs(self):
|
||||
"""Refresh data in all tabs"""
|
||||
# Refresh overview tab
|
||||
if self.overview_table:
|
||||
self.overview_table.refresh_data()
|
||||
|
||||
# Get detected frame types
|
||||
detected_frame_types = self._get_detected_frame_types()
|
||||
|
||||
# Refresh frame type tabs that have data
|
||||
for frame_type, tab_content in self.frame_type_tabs.items():
|
||||
if frame_type in detected_frame_types:
|
||||
tab_content.refresh_data()
|
||||
# Tab has data, it will show content when selected
|
||||
pass
|
||||
else:
|
||||
# Tab has no data, it will show empty when selected
|
||||
pass
|
||||
|
||||
def update_tabs(self):
|
||||
"""Update tabs based on newly detected frame types"""
|
||||
current_frame_types = self._get_detected_frame_types()
|
||||
|
||||
# Check if we need to add new tabs
|
||||
new_frame_types = current_frame_types - self.active_frame_types
|
||||
if new_frame_types:
|
||||
# This would require rebuilding the widget
|
||||
# For now, just refresh existing tabs
|
||||
self.refresh_all_tabs()
|
||||
|
||||
self.active_frame_types = current_frame_types
|
||||
Reference in New Issue
Block a user