621 lines
27 KiB
Plaintext
621 lines
27 KiB
Plaintext
|
|
"""
|
|||
|
|
StreamLens Textual Application V2 - TipTop-Inspired Design
|
|||
|
|
Modern TUI with real-time metrics, sparklines, and professional monitoring aesthetic
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
from textual.app import App, ComposeResult
|
|||
|
|
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
|
|||
|
|
from textual.widgets import Header, Footer, Static, DataTable, Label, TabPane
|
|||
|
|
from textual.reactive import reactive
|
|||
|
|
from textual.timer import Timer
|
|||
|
|
from textual.events import MouseDown, MouseMove
|
|||
|
|
from typing import TYPE_CHECKING
|
|||
|
|
from rich.text import Text
|
|||
|
|
from rich.console import Group
|
|||
|
|
from rich.panel import Panel
|
|||
|
|
from rich.table import Table
|
|||
|
|
import time
|
|||
|
|
import signal
|
|||
|
|
import sys
|
|||
|
|
import datetime
|
|||
|
|
from pathlib import Path
|
|||
|
|
import subprocess
|
|||
|
|
import platform
|
|||
|
|
|
|||
|
|
from .widgets.sparkline import SparklineWidget
|
|||
|
|
from .widgets.metric_card import MetricCard
|
|||
|
|
from .widgets.flow_table_v2 import EnhancedFlowTable
|
|||
|
|
from .widgets.filtered_flow_view import FilteredFlowView
|
|||
|
|
from ...reporting import FlowReportGenerator
|
|||
|
|
from .widgets.split_flow_details import FlowMainDetailsPanel, SubFlowDetailsPanel
|
|||
|
|
from .widgets.debug_panel import DebugPanel
|
|||
|
|
from .widgets.progress_bar import ParsingProgressBar
|
|||
|
|
from ...analysis.background_analyzer import BackgroundAnalyzer
|
|||
|
|
|
|||
|
|
if TYPE_CHECKING:
|
|||
|
|
from ...analysis.core import EthernetAnalyzer
|
|||
|
|
|
|||
|
|
|
|||
|
|
class StreamLensAppV2(App):
|
|||
|
|
"""
|
|||
|
|
StreamLens TipTop-Inspired Interface
|
|||
|
|
|
|||
|
|
Features:
|
|||
|
|
- Real-time metrics with sparklines
|
|||
|
|
- Color-coded quality indicators
|
|||
|
|
- Compact information display
|
|||
|
|
- Multi-column layout
|
|||
|
|
- Smooth live updates
|
|||
|
|
"""
|
|||
|
|
|
|||
|
|
CSS_PATH = "styles/streamlens_v2.tcss"
|
|||
|
|
ENABLE_COMMAND_PALETTE = False
|
|||
|
|
AUTO_FOCUS = None
|
|||
|
|
|
|||
|
|
BINDINGS = [
|
|||
|
|
("q", "quit", "Quit"),
|
|||
|
|
("1", "select_filter('1')", "Overview"),
|
|||
|
|
("2", "select_filter('2')", "Frame Type 2"),
|
|||
|
|
("3", "select_filter('3')", "Frame Type 3"),
|
|||
|
|
("4", "select_filter('4')", "Frame Type 4"),
|
|||
|
|
("5", "select_filter('5')", "Frame Type 5"),
|
|||
|
|
("6", "select_filter('6')", "Frame Type 6"),
|
|||
|
|
("7", "select_filter('7')", "Frame Type 7"),
|
|||
|
|
("8", "select_filter('8')", "Frame Type 8"),
|
|||
|
|
("9", "select_filter('9')", "Frame Type 9"),
|
|||
|
|
("0", "select_filter('0')", "Frame Type 10"),
|
|||
|
|
("alt+1", "sort_table_column(0)", "Sort by column 1"),
|
|||
|
|
("alt+2", "sort_table_column(1)", "Sort by column 2"),
|
|||
|
|
("alt+3", "sort_table_column(2)", "Sort by column 3"),
|
|||
|
|
("alt+4", "sort_table_column(3)", "Sort by column 4"),
|
|||
|
|
("alt+5", "sort_table_column(4)", "Sort by column 5"),
|
|||
|
|
("alt+6", "sort_table_column(5)", "Sort by column 6"),
|
|||
|
|
("alt+7", "sort_table_column(6)", "Sort by column 7"),
|
|||
|
|
("alt+8", "sort_table_column(7)", "Sort by column 8"),
|
|||
|
|
("alt+9", "sort_table_column(8)", "Sort by column 9"),
|
|||
|
|
("alt+0", "sort_table_column(9)", "Sort by column 10"),
|
|||
|
|
("p", "toggle_pause", "Pause"),
|
|||
|
|
("d", "show_details", "Details"),
|
|||
|
|
("v", "toggle_view_mode", "Toggle View"),
|
|||
|
|
("r", "generate_report", "Generate Report"),
|
|||
|
|
("o", "copy_outliers", "Copy Outliers"),
|
|||
|
|
("?", "toggle_help", "Help"),
|
|||
|
|
]
|
|||
|
|
|
|||
|
|
# Reactive attributes
|
|||
|
|
total_flows = reactive(0)
|
|||
|
|
total_packets = reactive(0)
|
|||
|
|
packets_per_sec = reactive(0.0)
|
|||
|
|
bytes_per_sec = reactive(0.0)
|
|||
|
|
enhanced_flows = reactive(0)
|
|||
|
|
outlier_count = reactive(0)
|
|||
|
|
debug_visible = reactive(False) # Hide debug panel for now
|
|||
|
|
|
|||
|
|
# Update timers
|
|||
|
|
metric_timer: Timer = None
|
|||
|
|
flow_timer: Timer = None
|
|||
|
|
|
|||
|
|
def __init__(self, analyzer: 'EthernetAnalyzer'):
|
|||
|
|
super().__init__()
|
|||
|
|
self.analyzer = analyzer
|
|||
|
|
self.title = "StreamLens"
|
|||
|
|
self.sub_title = "Network Flow Analysis"
|
|||
|
|
self.paused = False
|
|||
|
|
|
|||
|
|
# Background parsing support - Use single thread to avoid race conditions in frame reference tracking
|
|||
|
|
self.background_analyzer = BackgroundAnalyzer(
|
|||
|
|
analyzer=analyzer,
|
|||
|
|
num_threads=1, # Single-threaded to prevent race conditions in outlier frame references
|
|||
|
|
batch_size=1000,
|
|||
|
|
progress_callback=self._on_progress_update,
|
|||
|
|
flow_update_callback=self._on_flow_update
|
|||
|
|
)
|
|||
|
|
self.pcap_file = None
|
|||
|
|
|
|||
|
|
|
|||
|
|
# Metrics history for sparklines
|
|||
|
|
self.packets_history = []
|
|||
|
|
self.bytes_history = []
|
|||
|
|
self.flows_history = []
|
|||
|
|
self.max_history = 60 # 60 seconds of history
|
|||
|
|
|
|||
|
|
def compose(self) -> ComposeResult:
|
|||
|
|
"""Create TipTop-inspired layout"""
|
|||
|
|
yield Header()
|
|||
|
|
|
|||
|
|
with Container(id="main-container"):
|
|||
|
|
# Progress bar for PCAP loading (initially hidden)
|
|||
|
|
yield ParsingProgressBar(id="progress-bar")
|
|||
|
|
|
|||
|
|
# Ultra-compact metrics bar
|
|||
|
|
with Horizontal(id="metrics-bar"):
|
|||
|
|
yield MetricCard("Flows", f"{self.total_flows}", id="flows-metric")
|
|||
|
|
yield MetricCard("Pkts/s", f"{self.packets_per_sec:.0f}", id="packets-metric")
|
|||
|
|
yield MetricCard("Vol/s", self._format_bytes_per_sec(self.bytes_per_sec), id="volume-metric")
|
|||
|
|
yield MetricCard("Enhanced", f"{self.enhanced_flows}", color="success", id="enhanced-metric")
|
|||
|
|
yield MetricCard("Outliers", f"{self.outlier_count}", color="warning" if self.outlier_count > 0 else "normal", id="outliers-metric")
|
|||
|
|
|
|||
|
|
# Main content area with conditional debug panel
|
|||
|
|
with Horizontal(id="content-area"):
|
|||
|
|
# Left - Filtered flow view with frame type buttons
|
|||
|
|
yield FilteredFlowView(
|
|||
|
|
self.analyzer,
|
|||
|
|
id="filtered-flow-view",
|
|||
|
|
classes="panel-wide"
|
|||
|
|
)
|
|||
|
|
|
|||
|
|
# Middle - Flow details
|
|||
|
|
with Vertical(id="flow-panels"):
|
|||
|
|
yield FlowMainDetailsPanel(id="main-flow-details")
|
|||
|
|
yield SubFlowDetailsPanel(id="sub-flow-details")
|
|||
|
|
|
|||
|
|
# Right - Debug panel (conditionally visible)
|
|||
|
|
if self.debug_visible:
|
|||
|
|
yield DebugPanel(id="debug-panel")
|
|||
|
|
|
|||
|
|
yield Footer()
|
|||
|
|
|
|||
|
|
def on_mount(self) -> None:
|
|||
|
|
"""Initialize the application with TipTop-style updates"""
|
|||
|
|
try:
|
|||
|
|
debug_panel = self.query_one("#debug-panel", DebugPanel)
|
|||
|
|
debug_panel.add_debug_message("APP: Application mounted, checking panels...")
|
|||
|
|
|
|||
|
|
try:
|
|||
|
|
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
|
|||
|
|
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
|
|||
|
|
debug_panel.add_debug_message("APP: Both panels found successfully")
|
|||
|
|
except Exception as e:
|
|||
|
|
debug_panel.add_debug_message(f"APP: Panel query failed: {e}")
|
|||
|
|
except:
|
|||
|
|
pass # Debug panel not visible
|
|||
|
|
|
|||
|
|
# Set initial subtitle with view mode
|
|||
|
|
try:
|
|||
|
|
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
|
|||
|
|
view_mode = flow_table.get_current_view_mode()
|
|||
|
|
status = "PAUSED" if self.paused else "LIVE"
|
|||
|
|
self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW"
|
|||
|
|
except:
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
self.update_metrics()
|
|||
|
|
|
|||
|
|
# Set up update intervals (slower during parsing to reduce CPU usage)
|
|||
|
|
self.metric_timer = self.set_interval(5.0, self.update_metrics) # 0.2Hz for slower background updates
|
|||
|
|
self.flow_timer = self.set_interval(10.0, self.update_flows) # 0.1Hz for slower fallback flow updates
|
|||
|
|
|
|||
|
|
# Initialize sparkline history
|
|||
|
|
self._initialize_history()
|
|||
|
|
|
|||
|
|
# Set initial focus to the flow table for immediate keyboard navigation
|
|||
|
|
self.call_after_refresh(self._set_initial_focus)
|
|||
|
|
|
|||
|
|
def _set_initial_focus(self):
|
|||
|
|
"""Set initial focus to the filtered flow view after widgets are ready"""
|
|||
|
|
try:
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
flow_view.flow_table.focus()
|
|||
|
|
except Exception:
|
|||
|
|
# If flow view isn't ready yet, try again after a short delay
|
|||
|
|
self.set_timer(0.1, self._set_initial_focus)
|
|||
|
|
|
|||
|
|
def _initialize_history(self):
|
|||
|
|
"""Initialize metrics history arrays"""
|
|||
|
|
current_time = time.time()
|
|||
|
|
for _ in range(self.max_history):
|
|||
|
|
self.packets_history.append(0)
|
|||
|
|
self.bytes_history.append(0)
|
|||
|
|
self.flows_history.append(0)
|
|||
|
|
|
|||
|
|
def update_metrics(self) -> None:
|
|||
|
|
"""Update real-time metrics and sparklines"""
|
|||
|
|
if self.paused:
|
|||
|
|
return
|
|||
|
|
|
|||
|
|
# Get current metrics
|
|||
|
|
summary = self.analyzer.get_summary()
|
|||
|
|
self.total_flows = summary.get('unique_flows', 0)
|
|||
|
|
self.total_packets = summary.get('total_packets', 0)
|
|||
|
|
|
|||
|
|
# Calculate rates (simplified for now)
|
|||
|
|
# In real implementation, track deltas over time
|
|||
|
|
current_time = time.time()
|
|||
|
|
if not hasattr(self, '_start_time'):
|
|||
|
|
self._start_time = current_time
|
|||
|
|
|
|||
|
|
elapsed = max(1, current_time - self._start_time)
|
|||
|
|
self.packets_per_sec = self.total_packets / elapsed
|
|||
|
|
self.bytes_per_sec = summary.get('total_bytes', 0) / elapsed
|
|||
|
|
|
|||
|
|
# Count enhanced and outliers (thread-safe access)
|
|||
|
|
enhanced = 0
|
|||
|
|
outliers = 0
|
|||
|
|
try:
|
|||
|
|
# Use background analyzer's thread-safe flow access
|
|||
|
|
flows = self.background_analyzer.get_current_flows()
|
|||
|
|
for flow in flows.values():
|
|||
|
|
if flow.enhanced_analysis.decoder_type != "Standard":
|
|||
|
|
enhanced += 1
|
|||
|
|
# Use frame-type-specific outliers instead of flow-level outliers
|
|||
|
|
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
|||
|
|
except Exception:
|
|||
|
|
# Fallback to direct access if background analyzer not available
|
|||
|
|
for flow in self.analyzer.flows.values():
|
|||
|
|
if flow.enhanced_analysis.decoder_type != "Standard":
|
|||
|
|
enhanced += 1
|
|||
|
|
# Use frame-type-specific outliers instead of flow-level outliers
|
|||
|
|
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
|
|||
|
|
|
|||
|
|
self.enhanced_flows = enhanced
|
|||
|
|
self.outlier_count = outliers
|
|||
|
|
|
|||
|
|
# Update metric cards
|
|||
|
|
self._update_metric_cards()
|
|||
|
|
|
|||
|
|
# Update sparklines (removed - no longer in left panel)
|
|||
|
|
# self._update_sparklines()
|
|||
|
|
|
|||
|
|
def _update_metric_cards(self):
|
|||
|
|
"""Update the metric card displays"""
|
|||
|
|
# Update flows metric
|
|||
|
|
flows_card = self.query_one("#flows-metric", MetricCard)
|
|||
|
|
flows_card.update_value(f"{self.total_flows}")
|
|||
|
|
|
|||
|
|
# Update packets/s with color coding
|
|||
|
|
packets_card = self.query_one("#packets-metric", MetricCard)
|
|||
|
|
packets_card.update_value(f"{self.packets_per_sec:.1f}")
|
|||
|
|
if self.packets_per_sec > 10000:
|
|||
|
|
packets_card.color = "warning"
|
|||
|
|
elif self.packets_per_sec > 50000:
|
|||
|
|
packets_card.color = "error"
|
|||
|
|
else:
|
|||
|
|
packets_card.color = "success"
|
|||
|
|
|
|||
|
|
# Update volume/s
|
|||
|
|
volume_card = self.query_one("#volume-metric", MetricCard)
|
|||
|
|
volume_card.update_value(self._format_bytes_per_sec(self.bytes_per_sec))
|
|||
|
|
|
|||
|
|
# Update enhanced flows
|
|||
|
|
enhanced_card = self.query_one("#enhanced-metric", MetricCard)
|
|||
|
|
enhanced_card.update_value(f"{self.enhanced_flows}")
|
|||
|
|
|
|||
|
|
# Update outliers with color
|
|||
|
|
outliers_card = self.query_one("#outliers-metric", MetricCard)
|
|||
|
|
outliers_card.update_value(f"{self.outlier_count}")
|
|||
|
|
if self.outlier_count > 100:
|
|||
|
|
outliers_card.color = "error"
|
|||
|
|
elif self.outlier_count > 10:
|
|||
|
|
outliers_card.color = "warning"
|
|||
|
|
else:
|
|||
|
|
outliers_card.color = "normal"
|
|||
|
|
|
|||
|
|
def _update_sparklines(self):
|
|||
|
|
"""Update sparkline charts with latest data"""
|
|||
|
|
# Add new data points
|
|||
|
|
self.packets_history.append(self.packets_per_sec)
|
|||
|
|
self.bytes_history.append(self.bytes_per_sec)
|
|||
|
|
self.flows_history.append(self.total_flows)
|
|||
|
|
|
|||
|
|
# Keep only recent history
|
|||
|
|
if len(self.packets_history) > self.max_history:
|
|||
|
|
self.packets_history.pop(0)
|
|||
|
|
self.bytes_history.pop(0)
|
|||
|
|
self.flows_history.pop(0)
|
|||
|
|
|
|||
|
|
# Update sparkline widgets
|
|||
|
|
flow_spark = self.query_one("#flow-rate-spark", SparklineWidget)
|
|||
|
|
flow_spark.update_data(self.flows_history)
|
|||
|
|
|
|||
|
|
packet_spark = self.query_one("#packet-rate-spark", SparklineWidget)
|
|||
|
|
packet_spark.update_data(self.packets_history)
|
|||
|
|
|
|||
|
|
def update_flows(self) -> None:
|
|||
|
|
"""Update flow table data"""
|
|||
|
|
if self.paused:
|
|||
|
|
return
|
|||
|
|
|
|||
|
|
# Update filtered flow view
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
flow_view.refresh_frame_types()
|
|||
|
|
flow_view.refresh_flow_data()
|
|||
|
|
def _on_progress_update(self, progress):
|
|||
|
|
"""Handle progress updates from background parser"""
|
|||
|
|
try:
|
|||
|
|
# Use call_from_thread to safely update UI from background thread
|
|||
|
|
self.call_from_thread(self._update_progress_ui, progress)
|
|||
|
|
except Exception:
|
|||
|
|
# Ignore errors during shutdown
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def _update_progress_ui(self, progress):
|
|||
|
|
"""Update progress UI (called from main thread)"""
|
|||
|
|
try:
|
|||
|
|
progress_bar = self.query_one("#progress-bar", ParsingProgressBar)
|
|||
|
|
|
|||
|
|
if progress.error:
|
|||
|
|
progress_bar.show_error(progress.error)
|
|||
|
|
elif progress.is_complete:
|
|||
|
|
progress_bar.complete_parsing()
|
|||
|
|
# Trigger frame type button creation now that parsing is complete
|
|||
|
|
self._create_frame_type_buttons()
|
|||
|
|
else:
|
|||
|
|
# Start progress if this is the first update
|
|||
|
|
if not progress_bar.is_visible and progress.total_packets > 0:
|
|||
|
|
progress_bar.start_parsing(progress.total_packets)
|
|||
|
|
|
|||
|
|
# Update progress
|
|||
|
|
progress_bar.update_progress(
|
|||
|
|
progress.processed_packets,
|
|||
|
|
progress.total_packets,
|
|||
|
|
progress.packets_per_second,
|
|||
|
|
progress.estimated_time_remaining
|
|||
|
|
)
|
|||
|
|
except Exception as e:
|
|||
|
|
# Progress bar widget may not be available yet
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def _on_flow_update(self):
|
|||
|
|
"""Handle flow data updates from background parser"""
|
|||
|
|
try:
|
|||
|
|
# Use call_from_thread to safely update UI from background thread
|
|||
|
|
self.call_from_thread(self._update_flow_ui)
|
|||
|
|
except Exception:
|
|||
|
|
# Ignore errors during shutdown
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def _update_flow_ui(self):
|
|||
|
|
"""Update flow UI (called from main thread)"""
|
|||
|
|
try:
|
|||
|
|
# Update filtered flow view - frame types first for dynamic button creation
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
flow_view.refresh_frame_types() # This will create buttons as frame types are detected
|
|||
|
|
flow_view.refresh_flow_data()
|
|||
|
|
|
|||
|
|
# Also trigger button creation if parsing is complete but buttons haven't been created yet
|
|||
|
|
if not self.analyzer.is_parsing and not getattr(flow_view, '_buttons_created', False):
|
|||
|
|
self._create_frame_type_buttons()
|
|||
|
|
|
|||
|
|
# Also update metrics in real-time
|
|||
|
|
self.update_metrics()
|
|||
|
|
except Exception:
|
|||
|
|
# Flow view widget may not be available yet
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def _create_frame_type_buttons(self):
|
|||
|
|
"""Create frame type buttons now that parsing is complete"""
|
|||
|
|
try:
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
# Force refresh of frame types now that parsing is complete
|
|||
|
|
flow_view.refresh_frame_types()
|
|||
|
|
flow_view.refresh_flow_data()
|
|||
|
|
except Exception as e:
|
|||
|
|
# Flow view widget may not be available yet
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def start_background_parsing(self, pcap_file: str):
|
|||
|
|
"""Start parsing PCAP file in background"""
|
|||
|
|
self.pcap_file = pcap_file
|
|||
|
|
|
|||
|
|
# Start background parsing
|
|||
|
|
self.background_analyzer.start_parsing(pcap_file)
|
|||
|
|
|
|||
|
|
def stop_background_parsing(self):
|
|||
|
|
"""Stop background parsing"""
|
|||
|
|
self.background_analyzer.stop_parsing()
|
|||
|
|
|
|||
|
|
def cleanup(self):
|
|||
|
|
"""Cleanup resources when app shuts down"""
|
|||
|
|
try:
|
|||
|
|
self.background_analyzer.cleanup()
|
|||
|
|
# Cancel any pending timers
|
|||
|
|
if self.metric_timer:
|
|||
|
|
self.metric_timer.stop()
|
|||
|
|
if self.flow_timer:
|
|||
|
|
self.flow_timer.stop()
|
|||
|
|
except Exception as e:
|
|||
|
|
# Don't let cleanup errors prevent shutdown
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def on_enhanced_flow_table_flow_selected(self, event: EnhancedFlowTable.FlowSelected) -> None:
|
|||
|
|
"""Handle flow selection events"""
|
|||
|
|
try:
|
|||
|
|
debug_panel = self.query_one("#debug-panel", DebugPanel)
|
|||
|
|
flow_info = f"{event.flow.src_ip}:{event.flow.src_port}" if event.flow else "None"
|
|||
|
|
debug_panel.add_debug_message(f"APP: Flow selected - {flow_info}, subflow={event.subflow_type}")
|
|||
|
|
except:
|
|||
|
|
pass # Debug panel not visible
|
|||
|
|
|
|||
|
|
if event.flow:
|
|||
|
|
# Update main flow details panel
|
|||
|
|
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
|
|||
|
|
main_panel.update_flow(event.flow)
|
|||
|
|
|
|||
|
|
# Update sub-flow details panel
|
|||
|
|
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
|
|||
|
|
sub_panel.update_flow(event.flow, event.subflow_type)
|
|||
|
|
|
|||
|
|
|
|||
|
|
def _format_bytes_per_sec(self, bps: float) -> str:
|
|||
|
|
"""Format bytes per second with appropriate units"""
|
|||
|
|
if bps >= 1_000_000_000:
|
|||
|
|
return f"{bps / 1_000_000_000:.1f} GB/s"
|
|||
|
|
elif bps >= 1_000_000:
|
|||
|
|
return f"{bps / 1_000_000:.1f} MB/s"
|
|||
|
|
elif bps >= 1_000:
|
|||
|
|
return f"{bps / 1_000:.1f} KB/s"
|
|||
|
|
else:
|
|||
|
|
return f"{bps:.0f} B/s"
|
|||
|
|
|
|||
|
|
def action_toggle_pause(self) -> None:
|
|||
|
|
"""Toggle pause state"""
|
|||
|
|
self.paused = not self.paused
|
|||
|
|
status = "PAUSED" if self.paused else "LIVE"
|
|||
|
|
|
|||
|
|
# Update subtitle
|
|||
|
|
self.sub_title = f"Network Flow Analysis - {status}"
|
|||
|
|
|
|||
|
|
def action_select_filter(self, number: str) -> None:
|
|||
|
|
"""Select frame type filter by number key"""
|
|||
|
|
try:
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
flow_view.action_select_filter(number)
|
|||
|
|
except Exception:
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def action_sort_table_column(self, column_index: int) -> None:
|
|||
|
|
"""Sort table by column index"""
|
|||
|
|
try:
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
flow_view.action_sort_column(column_index)
|
|||
|
|
except Exception:
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def action_show_details(self) -> None:
|
|||
|
|
"""Show detailed view for selected flow"""
|
|||
|
|
# TODO: Implement detailed flow modal
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
def action_toggle_view_mode(self) -> None:
|
|||
|
|
"""Toggle between different display modes"""
|
|||
|
|
# For now, this could cycle through different column layouts
|
|||
|
|
# or show more/less detail in the frame type views
|
|||
|
|
pass
|
|||
|
|
|
|||
|
|
|
|||
|
|
def on_mouse_down(self, event: MouseDown) -> None:
|
|||
|
|
"""Prevent default mouse down behavior to disable mouse interaction."""
|
|||
|
|
event.prevent_default()
|
|||
|
|
|
|||
|
|
def on_mouse_move(self, event: MouseMove) -> None:
|
|||
|
|
"""Prevent default mouse move behavior to disable mouse interaction."""
|
|||
|
|
event.prevent_default()
|
|||
|
|
|
|||
|
|
def action_generate_report(self) -> None:
|
|||
|
|
"""Generate comprehensive flow analysis report"""
|
|||
|
|
try:
|
|||
|
|
# Generate timestamp-based filename
|
|||
|
|
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
|||
|
|
output_file = f"streamlens_flow_report_{timestamp}.md"
|
|||
|
|
|
|||
|
|
# Create report generator
|
|||
|
|
report_generator = FlowReportGenerator(self.analyzer)
|
|||
|
|
|
|||
|
|
# Generate report (markdown format)
|
|||
|
|
report_content = report_generator.generate_report(output_file, "markdown")
|
|||
|
|
|
|||
|
|
# Show success notification in the footer
|
|||
|
|
self.sub_title = f"✅ Report generated: {output_file}"
|
|||
|
|
|
|||
|
|
# Set a timer to restore the original subtitle
|
|||
|
|
self.set_timer(3.0, self._restore_subtitle)
|
|||
|
|
|
|||
|
|
except Exception as e:
|
|||
|
|
# Show error notification
|
|||
|
|
self.sub_title = f"❌ Report generation failed: {str(e)}"
|
|||
|
|
self.set_timer(3.0, self._restore_subtitle)
|
|||
|
|
|
|||
|
|
def _restore_subtitle(self) -> None:
|
|||
|
|
"""Restore the original subtitle"""
|
|||
|
|
status = "PAUSED" if self.paused else "LIVE"
|
|||
|
|
self.sub_title = f"Network Flow Analysis - {status}"
|
|||
|
|
|
|||
|
|
def action_copy_outliers(self) -> None:
|
|||
|
|
"""Copy outlier frame information to clipboard"""
|
|||
|
|
try:
|
|||
|
|
# Get selected flow from the filtered view
|
|||
|
|
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
|
|||
|
|
# For now, get the first flow (could be improved to use actual selection)
|
|||
|
|
flows = list(self.analyzer.flows.values())
|
|||
|
|
selected_flow = flows[0] if flows else None
|
|||
|
|
|
|||
|
|
if not selected_flow:
|
|||
|
|
self.sub_title = "⚠️ No flow selected"
|
|||
|
|
self.set_timer(2.0, self._restore_subtitle)
|
|||
|
|
return
|
|||
|
|
|
|||
|
|
# Build frame-type-specific outlier information
|
|||
|
|
outlier_info = []
|
|||
|
|
outlier_info.append(f"Flow: {selected_flow.src_ip}:{selected_flow.src_port} → {selected_flow.dst_ip}:{selected_flow.dst_port}")
|
|||
|
|
outlier_info.append(f"Protocol: {selected_flow.transport_protocol}")
|
|||
|
|
outlier_info.append(f"Total Packets: {selected_flow.frame_count}")
|
|||
|
|
|
|||
|
|
# Calculate total frame-type-specific outliers
|
|||
|
|
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
|||
|
|
outlier_info.append(f"Total Frame-Type Outliers: {total_frame_type_outliers}")
|
|||
|
|
|
|||
|
|
if total_frame_type_outliers > 0:
|
|||
|
|
outlier_info.append(f"\n=== Frame Type Outlier Analysis ===")
|
|||
|
|
|
|||
|
|
# Show outliers per frame type
|
|||
|
|
for frame_type, ft_stats in sorted(selected_flow.frame_types.items(), key=lambda x: len(x[1].outlier_frames), reverse=True):
|
|||
|
|
if ft_stats.outlier_frames:
|
|||
|
|
outlier_info.append(f"\n{frame_type}: {len(ft_stats.outlier_frames)} outliers")
|
|||
|
|
outlier_info.append(f" Frames: {', '.join(map(str, sorted(ft_stats.outlier_frames)))}")
|
|||
|
|
outlier_info.append(f" Avg ΔT: {ft_stats.avg_inter_arrival * 1000:.3f} ms")
|
|||
|
|
outlier_info.append(f" Std σ: {ft_stats.std_inter_arrival * 1000:.3f} ms")
|
|||
|
|
outlier_info.append(f" 3σ Threshold: {(ft_stats.avg_inter_arrival + 3 * ft_stats.std_inter_arrival) * 1000:.3f} ms")
|
|||
|
|
|
|||
|
|
# Show enhanced outlier information for this frame type
|
|||
|
|
if hasattr(ft_stats, 'enhanced_outlier_details') and ft_stats.enhanced_outlier_details:
|
|||
|
|
outlier_info.append(f" Enhanced Outlier Details:")
|
|||
|
|
for frame_num, prev_frame_num, inter_time in sorted(ft_stats.enhanced_outlier_details[:5]):
|
|||
|
|
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
|||
|
|
outlier_info.append(f" Frame {frame_num} (from {prev_frame_num}): {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
|||
|
|
if len(ft_stats.enhanced_outlier_details) > 5:
|
|||
|
|
outlier_info.append(f" ... and {len(ft_stats.enhanced_outlier_details) - 5} more")
|
|||
|
|
elif ft_stats.outlier_details:
|
|||
|
|
outlier_info.append(f" Outlier Details:")
|
|||
|
|
for frame_num, inter_time in sorted(ft_stats.outlier_details[:5]):
|
|||
|
|
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
|
|||
|
|
outlier_info.append(f" Frame {frame_num}: {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
|
|||
|
|
if len(ft_stats.outlier_details) > 5:
|
|||
|
|
outlier_info.append(f" ... and {len(ft_stats.outlier_details) - 5} more")
|
|||
|
|
else:
|
|||
|
|
outlier_info.append("\nNo frame-type-specific timing outliers detected.")
|
|||
|
|
|
|||
|
|
# Copy to clipboard
|
|||
|
|
clipboard_text = "\n".join(outlier_info)
|
|||
|
|
self._copy_to_clipboard(clipboard_text)
|
|||
|
|
|
|||
|
|
# Show success notification
|
|||
|
|
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
|
|||
|
|
self.sub_title = f"✅ Copied {total_frame_type_outliers} frame-type outliers to clipboard"
|
|||
|
|
self.set_timer(2.0, self._restore_subtitle)
|
|||
|
|
|
|||
|
|
except Exception as e:
|
|||
|
|
self.sub_title = f"❌ Failed to copy: {str(e)}"
|
|||
|
|
self.set_timer(2.0, self._restore_subtitle)
|
|||
|
|
|
|||
|
|
def _copy_to_clipboard(self, text: str) -> None:
|
|||
|
|
"""Copy text to system clipboard"""
|
|||
|
|
system = platform.system()
|
|||
|
|
|
|||
|
|
if system == "Darwin": # macOS
|
|||
|
|
process = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
|
|||
|
|
process.communicate(text.encode('utf-8'))
|
|||
|
|
elif system == "Linux":
|
|||
|
|
# Try xclip first, then xsel
|
|||
|
|
try:
|
|||
|
|
process = subprocess.Popen(['xclip', '-selection', 'clipboard'], stdin=subprocess.PIPE)
|
|||
|
|
process.communicate(text.encode('utf-8'))
|
|||
|
|
except FileNotFoundError:
|
|||
|
|
try:
|
|||
|
|
process = subprocess.Popen(['xsel', '--clipboard', '--input'], stdin=subprocess.PIPE)
|
|||
|
|
process.communicate(text.encode('utf-8'))
|
|||
|
|
except FileNotFoundError:
|
|||
|
|
raise Exception("Neither xclip nor xsel found. Please install one.")
|
|||
|
|
elif system == "Windows":
|
|||
|
|
process = subprocess.Popen(['clip'], stdin=subprocess.PIPE, shell=True)
|
|||
|
|
process.communicate(text.encode('utf-8'))
|
|||
|
|
else:
|
|||
|
|
raise Exception(f"Unsupported platform: {system}")
|
|||
|
|
|
|||
|
|
def action_quit(self) -> None:
|
|||
|
|
"""Quit the application with proper cleanup"""
|
|||
|
|
self.cleanup()
|
|||
|
|
self.exit()
|
|||
|
|
|
|||
|
|
def on_unmount(self) -> None:
|
|||
|
|
"""Called when app is being unmounted - ensure cleanup"""
|
|||
|
|
self.cleanup()
|