GUI
This commit is contained in:
16
analyzer/gui/__init__.py
Normal file
16
analyzer/gui/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
GUI module for StreamLens Ethernet Traffic Analyzer
|
||||
"""
|
||||
|
||||
try:
|
||||
from PySide6.QtWidgets import QApplication
|
||||
GUI_AVAILABLE = True
|
||||
except ImportError:
|
||||
GUI_AVAILABLE = False
|
||||
print("Warning: PySide6 not available. GUI mode disabled.")
|
||||
|
||||
__all__ = ['GUI_AVAILABLE']
|
||||
|
||||
if GUI_AVAILABLE:
|
||||
from .main_window import StreamLensMainWindow
|
||||
__all__.append('StreamLensMainWindow')
|
||||
568
analyzer/gui/main_window.py
Normal file
568
analyzer/gui/main_window.py
Normal file
@@ -0,0 +1,568 @@
|
||||
"""
|
||||
Main GUI window for StreamLens
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from typing import Optional, List, TYPE_CHECKING
|
||||
|
||||
try:
|
||||
from PySide6.QtWidgets import (
|
||||
QMainWindow, QWidget, QVBoxLayout, QHBoxLayout, QSplitter,
|
||||
QTableWidget, QTableWidgetItem, QTextEdit, QMenuBar, QMenu,
|
||||
QFileDialog, QMessageBox, QProgressBar, QStatusBar, QLabel,
|
||||
QHeaderView, QPushButton, QGroupBox
|
||||
)
|
||||
from PySide6.QtCore import Qt, QThread, Signal, QTimer
|
||||
from PySide6.QtGui import QAction, QIcon, QFont
|
||||
|
||||
# Matplotlib integration - lazy loaded
|
||||
matplotlib = None
|
||||
FigureCanvas = None
|
||||
NavigationToolbar = None
|
||||
Figure = None
|
||||
plt = None
|
||||
|
||||
def _ensure_matplotlib_gui_loaded():
|
||||
"""Lazy load matplotlib for GUI mode"""
|
||||
global matplotlib, FigureCanvas, NavigationToolbar, Figure, plt
|
||||
|
||||
if matplotlib is not None:
|
||||
return True
|
||||
|
||||
try:
|
||||
import matplotlib as mpl
|
||||
matplotlib = mpl
|
||||
matplotlib.use('Qt5Agg') # Use Qt backend for matplotlib
|
||||
|
||||
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FC
|
||||
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NT
|
||||
from matplotlib.figure import Figure as Fig
|
||||
import matplotlib.pyplot as pyplot
|
||||
|
||||
FigureCanvas = FC
|
||||
NavigationToolbar = NT
|
||||
Figure = Fig
|
||||
plt = pyplot
|
||||
|
||||
return True
|
||||
except ImportError as e:
|
||||
print(f"Matplotlib GUI integration not available: {e}")
|
||||
return False
|
||||
|
||||
except ImportError as e:
|
||||
print(f"GUI dependencies not available: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..analysis.core import EthernetAnalyzer
|
||||
from ..models.flow_stats import FlowStats
|
||||
|
||||
|
||||
class PlotWidget(QWidget):
|
||||
"""Widget containing matplotlib plot with toolbar"""
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
self.figure = None
|
||||
self.canvas = None
|
||||
self.toolbar = None
|
||||
self._initialized = False
|
||||
|
||||
# Just create basic layout initially
|
||||
self.layout = QVBoxLayout()
|
||||
self.setLayout(self.layout)
|
||||
|
||||
def _ensure_initialized(self):
|
||||
"""Initialize matplotlib components when first needed"""
|
||||
if self._initialized:
|
||||
return True
|
||||
|
||||
if not _ensure_matplotlib_gui_loaded():
|
||||
return False
|
||||
|
||||
self.figure = Figure(figsize=(12, 8))
|
||||
self.canvas = FigureCanvas(self.figure)
|
||||
self.toolbar = NavigationToolbar(self.canvas, self)
|
||||
|
||||
self.layout.addWidget(self.toolbar)
|
||||
self.layout.addWidget(self.canvas)
|
||||
|
||||
self._initialized = True
|
||||
self.clear_plot()
|
||||
return True
|
||||
|
||||
def clear_plot(self):
|
||||
"""Clear the plot and show initial message"""
|
||||
if not self._ensure_initialized():
|
||||
return
|
||||
self.figure.clear()
|
||||
|
||||
# Add a centered message when no plot is shown
|
||||
ax = self.figure.add_subplot(111)
|
||||
ax.text(0.5, 0.5, 'Select a flow with Chapter 10 data to view signal visualization',
|
||||
ha='center', va='center', transform=ax.transAxes, fontsize=12,
|
||||
color='gray', style='italic')
|
||||
ax.set_xlim(0, 1)
|
||||
ax.set_ylim(0, 1)
|
||||
ax.axis('off')
|
||||
|
||||
self.canvas.draw()
|
||||
|
||||
def plot_flow_signals(self, flow: 'FlowStats', signal_data_list: List, flow_key: str):
|
||||
"""Plot Chapter 10 signals for a flow"""
|
||||
if not self._ensure_initialized():
|
||||
return
|
||||
self.figure.clear()
|
||||
|
||||
if not signal_data_list:
|
||||
# Show message when no data
|
||||
ax = self.figure.add_subplot(111)
|
||||
ax.text(0.5, 0.5, 'No Chapter 10 signal data found in selected flow',
|
||||
ha='center', va='center', transform=ax.transAxes, fontsize=14)
|
||||
ax.set_xlim(0, 1)
|
||||
ax.set_ylim(0, 1)
|
||||
ax.axis('off')
|
||||
self.canvas.draw()
|
||||
return
|
||||
|
||||
# Create subplots for each signal
|
||||
n_signals = len(signal_data_list)
|
||||
axes = []
|
||||
|
||||
for idx, signal_data in enumerate(signal_data_list):
|
||||
ax = self.figure.add_subplot(n_signals, 1, idx + 1)
|
||||
axes.append(ax)
|
||||
|
||||
# Plot each channel in the signal data
|
||||
for channel_name, data in signal_data.channels.items():
|
||||
ax.plot(signal_data.timestamps, data, label=channel_name, linewidth=0.8)
|
||||
|
||||
ax.set_xlabel('Time (s)')
|
||||
ax.set_ylabel('Amplitude')
|
||||
ax.grid(True, alpha=0.3)
|
||||
ax.legend()
|
||||
|
||||
# Add metadata info
|
||||
if signal_data.metadata and signal_data.metadata.channel_configs:
|
||||
config_info = []
|
||||
for ch_id, config in signal_data.metadata.channel_configs.items():
|
||||
if 'units' in config:
|
||||
config_info.append(f"CH{ch_id}: {config.get('units', 'Unknown')}")
|
||||
if config_info:
|
||||
ax.set_title(f"Channels: {', '.join(config_info)}")
|
||||
|
||||
self.figure.suptitle(f'Chapter 10 Signals - Flow: {flow_key}', fontsize=14)
|
||||
self.figure.tight_layout()
|
||||
self.canvas.draw()
|
||||
|
||||
|
||||
class PCAPLoadThread(QThread):
|
||||
"""Thread for loading PCAP files without blocking UI"""
|
||||
|
||||
progress_updated = Signal(int)
|
||||
loading_finished = Signal(object) # Emits the analyzer
|
||||
error_occurred = Signal(str)
|
||||
|
||||
def __init__(self, file_path: str, parent=None):
|
||||
super().__init__(parent)
|
||||
self.file_path = file_path
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
from ..analysis.core import EthernetAnalyzer
|
||||
from ..utils.pcap_loader import PCAPLoader
|
||||
|
||||
# Create analyzer
|
||||
analyzer = EthernetAnalyzer()
|
||||
|
||||
# Load PCAP
|
||||
loader = PCAPLoader(self.file_path)
|
||||
if not loader.validate_file():
|
||||
self.error_occurred.emit(f"Invalid PCAP file: {self.file_path}")
|
||||
return
|
||||
|
||||
packets = loader.load_all()
|
||||
analyzer.all_packets = packets
|
||||
|
||||
# Process packets with progress updates
|
||||
total_packets = len(packets)
|
||||
for i, packet in enumerate(packets, 1):
|
||||
analyzer._process_single_packet(packet, i)
|
||||
|
||||
# Update progress every 1000 packets
|
||||
if i % 1000 == 0 or i == total_packets:
|
||||
progress = int((i / total_packets) * 100)
|
||||
self.progress_updated.emit(progress)
|
||||
|
||||
# Calculate statistics
|
||||
analyzer.calculate_statistics()
|
||||
|
||||
self.loading_finished.emit(analyzer)
|
||||
|
||||
except Exception as e:
|
||||
self.error_occurred.emit(str(e))
|
||||
|
||||
|
||||
class StreamLensMainWindow(QMainWindow):
|
||||
"""Main application window"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.analyzer: Optional['EthernetAnalyzer'] = None
|
||||
self.current_file = None
|
||||
self.loading_thread = None
|
||||
|
||||
self.setWindowTitle("StreamLens - Ethernet Traffic Analyzer")
|
||||
self.setGeometry(100, 100, 1400, 900)
|
||||
|
||||
self.setup_ui()
|
||||
self.setup_menus()
|
||||
self.setup_status_bar()
|
||||
|
||||
def setup_ui(self):
|
||||
"""Set up the main UI layout"""
|
||||
central_widget = QWidget()
|
||||
self.setCentralWidget(central_widget)
|
||||
|
||||
# Create main horizontal splitter
|
||||
main_splitter = QSplitter(Qt.Horizontal)
|
||||
|
||||
# Left panel - Flow list and controls
|
||||
left_panel = self.create_left_panel()
|
||||
left_panel.setMaximumWidth(600)
|
||||
left_panel.setMinimumWidth(400)
|
||||
|
||||
# Right panel - Plot area
|
||||
right_panel = self.create_right_panel()
|
||||
|
||||
main_splitter.addWidget(left_panel)
|
||||
main_splitter.addWidget(right_panel)
|
||||
main_splitter.setStretchFactor(0, 0) # Don't stretch left panel
|
||||
main_splitter.setStretchFactor(1, 1) # Stretch right panel
|
||||
|
||||
layout = QHBoxLayout()
|
||||
layout.addWidget(main_splitter)
|
||||
central_widget.setLayout(layout)
|
||||
|
||||
def create_left_panel(self):
|
||||
"""Create the left panel with flow list and info"""
|
||||
widget = QWidget()
|
||||
layout = QVBoxLayout()
|
||||
|
||||
# File info group
|
||||
file_group = QGroupBox("File Information")
|
||||
file_layout = QVBoxLayout()
|
||||
self.file_info_label = QLabel("No file loaded")
|
||||
self.file_info_label.setWordWrap(True)
|
||||
file_layout.addWidget(self.file_info_label)
|
||||
file_group.setLayout(file_layout)
|
||||
layout.addWidget(file_group)
|
||||
|
||||
# Flow list group
|
||||
flows_group = QGroupBox("IP Flows (sorted by max sigma deviation)")
|
||||
flows_layout = QVBoxLayout()
|
||||
|
||||
self.flows_table = QTableWidget()
|
||||
self.flows_table.setColumnCount(5)
|
||||
self.flows_table.setHorizontalHeaderLabels([
|
||||
"Source → Destination", "Packets", "Max σ", "Protocols", "Frame Types"
|
||||
])
|
||||
|
||||
# Configure table
|
||||
header = self.flows_table.horizontalHeader()
|
||||
header.setStretchLastSection(True)
|
||||
header.setSectionResizeMode(0, QHeaderView.Stretch)
|
||||
|
||||
self.flows_table.setSelectionBehavior(QTableWidget.SelectRows)
|
||||
self.flows_table.setAlternatingRowColors(True)
|
||||
self.flows_table.itemSelectionChanged.connect(self.on_flow_selected)
|
||||
|
||||
flows_layout.addWidget(self.flows_table)
|
||||
|
||||
# Status label for visualization feedback
|
||||
self.viz_status_label = QLabel("Select a flow to view signal visualization")
|
||||
self.viz_status_label.setStyleSheet("color: gray; font-style: italic;")
|
||||
flows_layout.addWidget(self.viz_status_label)
|
||||
|
||||
flows_group.setLayout(flows_layout)
|
||||
layout.addWidget(flows_group)
|
||||
|
||||
widget.setLayout(layout)
|
||||
return widget
|
||||
|
||||
def create_right_panel(self):
|
||||
"""Create the right panel with plot area"""
|
||||
widget = QWidget()
|
||||
layout = QVBoxLayout()
|
||||
|
||||
plot_group = QGroupBox("Signal Visualization")
|
||||
plot_layout = QVBoxLayout()
|
||||
|
||||
self.plot_widget = PlotWidget()
|
||||
plot_layout.addWidget(self.plot_widget)
|
||||
|
||||
plot_group.setLayout(plot_layout)
|
||||
layout.addWidget(plot_group)
|
||||
|
||||
widget.setLayout(layout)
|
||||
return widget
|
||||
|
||||
def setup_menus(self):
|
||||
"""Set up application menus"""
|
||||
menubar = self.menuBar()
|
||||
|
||||
# File menu
|
||||
file_menu = menubar.addMenu("File")
|
||||
|
||||
open_action = QAction("Open PCAP...", self)
|
||||
open_action.setShortcut("Ctrl+O")
|
||||
open_action.triggered.connect(self.open_pcap_file)
|
||||
file_menu.addAction(open_action)
|
||||
|
||||
file_menu.addSeparator()
|
||||
|
||||
exit_action = QAction("Exit", self)
|
||||
exit_action.setShortcut("Ctrl+Q")
|
||||
exit_action.triggered.connect(self.close)
|
||||
file_menu.addAction(exit_action)
|
||||
|
||||
# View menu
|
||||
view_menu = menubar.addMenu("View")
|
||||
|
||||
refresh_action = QAction("Refresh", self)
|
||||
refresh_action.setShortcut("F5")
|
||||
refresh_action.triggered.connect(self.refresh_data)
|
||||
view_menu.addAction(refresh_action)
|
||||
|
||||
def setup_status_bar(self):
|
||||
"""Set up status bar"""
|
||||
self.status_bar = QStatusBar()
|
||||
self.setStatusBar(self.status_bar)
|
||||
|
||||
self.progress_bar = QProgressBar()
|
||||
self.progress_bar.setVisible(False)
|
||||
self.status_bar.addPermanentWidget(self.progress_bar)
|
||||
|
||||
self.status_bar.showMessage("Ready")
|
||||
|
||||
def open_pcap_file(self):
|
||||
"""Open PCAP file dialog"""
|
||||
file_path, _ = QFileDialog.getOpenFileName(
|
||||
self,
|
||||
"Open PCAP File",
|
||||
"",
|
||||
"PCAP Files (*.pcap *.pcapng);;All Files (*)"
|
||||
)
|
||||
|
||||
if file_path:
|
||||
self.load_pcap_file(file_path)
|
||||
|
||||
def load_pcap_file(self, file_path: str):
|
||||
"""Load PCAP file in background thread"""
|
||||
if self.loading_thread and self.loading_thread.isRunning():
|
||||
return
|
||||
|
||||
self.current_file = file_path
|
||||
self.status_bar.showMessage(f"Loading {os.path.basename(file_path)}...")
|
||||
self.progress_bar.setVisible(True)
|
||||
self.progress_bar.setValue(0)
|
||||
|
||||
# Disable UI during loading
|
||||
self.flows_table.setEnabled(False)
|
||||
|
||||
# Start loading thread
|
||||
self.loading_thread = PCAPLoadThread(file_path)
|
||||
self.loading_thread.progress_updated.connect(self.progress_bar.setValue)
|
||||
self.loading_thread.loading_finished.connect(self.on_pcap_loaded)
|
||||
self.loading_thread.error_occurred.connect(self.on_loading_error)
|
||||
self.loading_thread.start()
|
||||
|
||||
def on_pcap_loaded(self, analyzer: 'EthernetAnalyzer'):
|
||||
"""Handle PCAP loading completion"""
|
||||
self.analyzer = analyzer
|
||||
self.progress_bar.setVisible(False)
|
||||
self.flows_table.setEnabled(True)
|
||||
|
||||
# Update file info
|
||||
summary = analyzer.get_summary()
|
||||
file_info = f"File: {os.path.basename(self.current_file)}\n"
|
||||
file_info += f"Packets: {summary['total_packets']:,}\n"
|
||||
file_info += f"Flows: {summary['unique_flows']}\n"
|
||||
file_info += f"IPs: {summary['unique_ips']}"
|
||||
self.file_info_label.setText(file_info)
|
||||
|
||||
# Populate flows table
|
||||
self.populate_flows_table()
|
||||
|
||||
self.status_bar.showMessage(f"Loaded {summary['total_packets']:,} packets from {os.path.basename(self.current_file)}")
|
||||
|
||||
def on_loading_error(self, error_message: str):
|
||||
"""Handle loading error"""
|
||||
self.progress_bar.setVisible(False)
|
||||
self.flows_table.setEnabled(True)
|
||||
|
||||
QMessageBox.critical(self, "Loading Error", f"Error loading PCAP file:\n{error_message}")
|
||||
self.status_bar.showMessage("Error loading file")
|
||||
|
||||
def populate_flows_table(self):
|
||||
"""Populate the flows table with data"""
|
||||
if not self.analyzer:
|
||||
return
|
||||
|
||||
summary = self.analyzer.get_summary()
|
||||
flows_list = list(summary['flows'].values())
|
||||
|
||||
# Sort by maximum sigma deviation
|
||||
flows_list.sort(key=lambda x: (
|
||||
self.analyzer.statistics_engine.get_max_sigma_deviation(x),
|
||||
x.frame_count
|
||||
), reverse=True)
|
||||
|
||||
self.flows_table.setRowCount(len(flows_list))
|
||||
|
||||
for row, flow in enumerate(flows_list):
|
||||
# Source -> Destination
|
||||
flow_item = QTableWidgetItem(f"{flow.src_ip} → {flow.dst_ip}")
|
||||
self.flows_table.setItem(row, 0, flow_item)
|
||||
|
||||
# Packets
|
||||
packets_item = QTableWidgetItem(str(flow.frame_count))
|
||||
packets_item.setData(Qt.UserRole, flow) # Store flow object
|
||||
self.flows_table.setItem(row, 1, packets_item)
|
||||
|
||||
# Max sigma deviation
|
||||
max_sigma = self.analyzer.statistics_engine.get_max_sigma_deviation(flow)
|
||||
sigma_item = QTableWidgetItem(f"{max_sigma:.2f}σ")
|
||||
self.flows_table.setItem(row, 2, sigma_item)
|
||||
|
||||
# Protocols
|
||||
protocols = ", ".join(flow.protocols)
|
||||
if flow.detected_protocol_types:
|
||||
protocols += f" ({', '.join(flow.detected_protocol_types)})"
|
||||
protocols_item = QTableWidgetItem(protocols)
|
||||
self.flows_table.setItem(row, 3, protocols_item)
|
||||
|
||||
# Frame types
|
||||
frame_types = ", ".join(flow.frame_types.keys())
|
||||
frame_types_item = QTableWidgetItem(frame_types)
|
||||
self.flows_table.setItem(row, 4, frame_types_item)
|
||||
|
||||
# Resize columns to content
|
||||
self.flows_table.resizeColumnsToContents()
|
||||
|
||||
def on_flow_selected(self):
|
||||
"""Handle flow selection and automatically render plots"""
|
||||
selected_rows = self.flows_table.selectionModel().selectedRows()
|
||||
|
||||
if not selected_rows:
|
||||
# No selection - clear plot and show message
|
||||
self.plot_widget.clear_plot()
|
||||
self.viz_status_label.setText("Select a flow to view signal visualization")
|
||||
self.viz_status_label.setStyleSheet("color: gray; font-style: italic;")
|
||||
return
|
||||
|
||||
# Get selected flow and automatically visualize
|
||||
self.visualize_selected_flow()
|
||||
|
||||
def visualize_selected_flow(self):
|
||||
"""Visualize the selected flow"""
|
||||
if not self.analyzer:
|
||||
return
|
||||
|
||||
selected_rows = self.flows_table.selectionModel().selectedRows()
|
||||
if not selected_rows:
|
||||
return
|
||||
|
||||
# Get selected flow
|
||||
row = selected_rows[0].row()
|
||||
flow_item = self.flows_table.item(row, 1) # Packets column has flow data
|
||||
flow = flow_item.data(Qt.UserRole)
|
||||
|
||||
if not flow:
|
||||
return
|
||||
|
||||
# Check if flow has Chapter 10 data
|
||||
has_ch10 = any('CH10' in ft or 'TMATS' in ft for ft in flow.frame_types.keys())
|
||||
|
||||
if not has_ch10:
|
||||
# Clear plot and show informative message
|
||||
self.plot_widget.clear_plot()
|
||||
self.viz_status_label.setText("Selected flow does not contain Chapter 10 telemetry data")
|
||||
self.viz_status_label.setStyleSheet("color: orange; font-style: italic;")
|
||||
return
|
||||
|
||||
# Get flow packets
|
||||
flow_packets = self.get_flow_packets(flow)
|
||||
|
||||
if not flow_packets:
|
||||
# Clear plot and show error message
|
||||
self.plot_widget.clear_plot()
|
||||
self.viz_status_label.setText("No packets found for selected flow")
|
||||
self.viz_status_label.setStyleSheet("color: red; font-style: italic;")
|
||||
return
|
||||
|
||||
# Use signal visualizer to extract and process signals
|
||||
from ..utils.signal_visualizer import signal_visualizer
|
||||
|
||||
flow_key = f"{flow.src_ip} → {flow.dst_ip}"
|
||||
self.status_bar.showMessage(f"Extracting signals from flow {flow_key}...")
|
||||
self.viz_status_label.setText("Processing Chapter 10 telemetry data...")
|
||||
self.viz_status_label.setStyleSheet("color: blue; font-style: italic;")
|
||||
|
||||
try:
|
||||
# Extract signals
|
||||
tmats_metadata = signal_visualizer._extract_tmats_from_flow(flow_packets)
|
||||
signal_data_list = signal_visualizer._extract_signals_from_flow(flow_packets, tmats_metadata)
|
||||
|
||||
# Plot in GUI
|
||||
self.plot_widget.plot_flow_signals(flow, signal_data_list, flow_key)
|
||||
|
||||
if signal_data_list:
|
||||
self.status_bar.showMessage(f"Plotted {len(signal_data_list)} signal(s) from {flow_key}")
|
||||
self.viz_status_label.setText(f"Displaying {len(signal_data_list)} signal channel(s)")
|
||||
self.viz_status_label.setStyleSheet("color: green; font-weight: bold;")
|
||||
else:
|
||||
self.status_bar.showMessage(f"No decodable signal data found in {flow_key}")
|
||||
self.viz_status_label.setText("No decodable signal data found in selected flow")
|
||||
self.viz_status_label.setStyleSheet("color: orange; font-style: italic;")
|
||||
|
||||
except Exception as e:
|
||||
# Show error in status label instead of popup
|
||||
self.plot_widget.clear_plot()
|
||||
self.viz_status_label.setText(f"Visualization error: {str(e)}")
|
||||
self.viz_status_label.setStyleSheet("color: red; font-style: italic;")
|
||||
self.status_bar.showMessage("Visualization error")
|
||||
|
||||
def get_flow_packets(self, flow: 'FlowStats') -> List:
|
||||
"""Get all packets for a specific flow"""
|
||||
if not self.analyzer or not self.analyzer.all_packets:
|
||||
return []
|
||||
|
||||
flow_packets = []
|
||||
|
||||
for packet in self.analyzer.all_packets:
|
||||
try:
|
||||
if hasattr(packet, 'haslayer'):
|
||||
from scapy.all import IP
|
||||
if packet.haslayer(IP):
|
||||
ip_layer = packet[IP]
|
||||
if ip_layer.src == flow.src_ip and ip_layer.dst == flow.dst_ip:
|
||||
flow_packets.append(packet)
|
||||
except:
|
||||
continue
|
||||
|
||||
return flow_packets
|
||||
|
||||
def refresh_data(self):
|
||||
"""Refresh the current data"""
|
||||
if self.current_file:
|
||||
self.load_pcap_file(self.current_file)
|
||||
|
||||
def closeEvent(self, event):
|
||||
"""Handle application close"""
|
||||
if self.loading_thread and self.loading_thread.isRunning():
|
||||
self.loading_thread.quit()
|
||||
self.loading_thread.wait()
|
||||
|
||||
event.accept()
|
||||
@@ -26,9 +26,16 @@ def main():
|
||||
help='Outlier detection threshold in standard deviations (default: 3.0)')
|
||||
parser.add_argument('--report', action='store_true',
|
||||
help='Generate comprehensive outlier report and exit (no TUI)')
|
||||
parser.add_argument('--gui', action='store_true',
|
||||
help='Launch GUI mode (requires PySide6)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Handle GUI mode
|
||||
if args.gui:
|
||||
launch_gui(args)
|
||||
return
|
||||
|
||||
if not args.pcap and not args.live:
|
||||
print("Error: Must specify either --pcap file or --live capture")
|
||||
sys.exit(1)
|
||||
@@ -114,8 +121,13 @@ def main():
|
||||
# Give capture a moment to start
|
||||
time.sleep(1)
|
||||
|
||||
# Run TUI
|
||||
curses.wrapper(tui.run)
|
||||
# Run TUI with error handling
|
||||
try:
|
||||
curses.wrapper(tui.run)
|
||||
except curses.error as e:
|
||||
print(f"\nTUI error: {e}")
|
||||
print("Falling back to console mode...")
|
||||
print_console_results(analyzer)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nCapture interrupted by user")
|
||||
@@ -124,7 +136,12 @@ def main():
|
||||
else:
|
||||
# PCAP analysis mode
|
||||
try:
|
||||
curses.wrapper(tui.run)
|
||||
try:
|
||||
curses.wrapper(tui.run)
|
||||
except curses.error as e:
|
||||
print(f"\nTUI error: {e}")
|
||||
print("Terminal doesn't support curses. Falling back to console mode...")
|
||||
print_console_results(analyzer)
|
||||
except KeyboardInterrupt:
|
||||
print("\nAnalysis interrupted by user")
|
||||
|
||||
@@ -306,5 +323,40 @@ def generate_outlier_report(analyzer: EthernetAnalyzer, threshold_sigma: float):
|
||||
print("=" * 80)
|
||||
|
||||
|
||||
def launch_gui(args):
|
||||
"""Launch GUI mode"""
|
||||
try:
|
||||
from .gui import GUI_AVAILABLE, StreamLensMainWindow
|
||||
from PySide6.QtWidgets import QApplication
|
||||
|
||||
if not GUI_AVAILABLE:
|
||||
print("Error: PySide6 not available. Please install with: pip install PySide6")
|
||||
sys.exit(1)
|
||||
|
||||
# Create QApplication
|
||||
app = QApplication(sys.argv)
|
||||
app.setApplicationName("StreamLens")
|
||||
app.setApplicationDisplayName("StreamLens - Ethernet Traffic Analyzer")
|
||||
|
||||
# Create main window
|
||||
window = StreamLensMainWindow()
|
||||
window.show()
|
||||
|
||||
# If a PCAP file was specified, load it
|
||||
if args.pcap:
|
||||
window.load_pcap_file(args.pcap)
|
||||
|
||||
# Start event loop
|
||||
sys.exit(app.exec())
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Error: GUI dependencies not available: {e}")
|
||||
print("Please install PySide6: pip install PySide6")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"Error launching GUI: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -7,6 +7,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from .navigation import NavigationHandler
|
||||
from .panels import FlowListPanel, DetailPanel, TimelinePanel
|
||||
from ..utils.signal_visualizer import signal_visualizer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..analysis.core import EthernetAnalyzer
|
||||
@@ -61,6 +62,8 @@ class TUIInterface:
|
||||
if self.analyzer.is_live:
|
||||
self.analyzer.stop_capture = True
|
||||
break
|
||||
elif action == 'visualize':
|
||||
self._handle_visualization()
|
||||
|
||||
def _draw_main_view(self, stdscr):
|
||||
"""Draw three-panel main view: flows list, details, and timeline"""
|
||||
@@ -183,4 +186,55 @@ class TUIInterface:
|
||||
self.analyzer.statistics_engine.get_max_sigma_deviation(x),
|
||||
x.frame_count
|
||||
), reverse=True)
|
||||
return flows_list
|
||||
return flows_list
|
||||
|
||||
def _handle_visualization(self):
|
||||
"""Handle Chapter 10 signal visualization for selected flow"""
|
||||
flows_list = self._get_flows_list()
|
||||
|
||||
if not flows_list or self.navigation.selected_flow >= len(flows_list):
|
||||
return
|
||||
|
||||
selected_flow = flows_list[self.navigation.selected_flow]
|
||||
flow_key = f"{selected_flow.src_ip}->{selected_flow.dst_ip}"
|
||||
|
||||
# Check if this flow has Chapter 10 data
|
||||
if not self.navigation.has_chapter10_data(selected_flow):
|
||||
return
|
||||
|
||||
# Get packets for this flow
|
||||
flow_packets = self._get_flow_packets(selected_flow)
|
||||
|
||||
if not flow_packets:
|
||||
return
|
||||
|
||||
# Launch visualization in TUI mode (will save plots to files)
|
||||
try:
|
||||
# Set TUI context to avoid GUI windows
|
||||
signal_visualizer._in_tui_context = True
|
||||
# Temporarily show status (will be overwritten by next TUI refresh)
|
||||
print(f"Generating signal visualization for flow {flow_key}...")
|
||||
signal_visualizer.visualize_flow_signals(selected_flow, flow_packets, gui_mode=False)
|
||||
except Exception as e:
|
||||
# Log error but don't disrupt TUI
|
||||
print(f"Visualization error: {e}")
|
||||
pass
|
||||
|
||||
def _get_flow_packets(self, flow):
|
||||
"""Get all packets belonging to a specific flow"""
|
||||
flow_packets = []
|
||||
|
||||
# Iterate through all packets and filter by source/destination
|
||||
for packet in self.analyzer.all_packets:
|
||||
try:
|
||||
# Check if packet matches this flow
|
||||
if hasattr(packet, 'haslayer'):
|
||||
from scapy.all import IP
|
||||
if packet.haslayer(IP):
|
||||
ip_layer = packet[IP]
|
||||
if ip_layer.src == flow.src_ip and ip_layer.dst == flow.dst_ip:
|
||||
flow_packets.append(packet)
|
||||
except:
|
||||
continue
|
||||
|
||||
return flow_packets
|
||||
@@ -55,6 +55,8 @@ class NavigationHandler:
|
||||
max_items = self._get_total_display_items(flows_list)
|
||||
self.selected_flow = max_items - 1
|
||||
return 'selection_change'
|
||||
elif key == ord('v') and self.current_view == 'main': # Visualize Chapter 10 signals
|
||||
return 'visualize'
|
||||
|
||||
return 'none'
|
||||
|
||||
@@ -70,8 +72,21 @@ class NavigationHandler:
|
||||
"""Get status bar text based on current view"""
|
||||
if self.current_view == 'main':
|
||||
timeline_status = "ON" if self.show_timeline else "OFF"
|
||||
return f"[↑↓]navigate [PgUp/PgDn]scroll [t]imeline:{timeline_status} [d]issection [q]uit"
|
||||
return f"[↑↓]navigate [PgUp/PgDn]scroll [t]imeline:{timeline_status} [v]isualize CH10 [d]issection [q]uit"
|
||||
elif self.current_view == 'dissection':
|
||||
return "[m]ain view [q]uit"
|
||||
else:
|
||||
return "[m]ain [d]issection [q]uit"
|
||||
return "[m]ain [d]issection [q]uit"
|
||||
|
||||
def has_chapter10_data(self, flow: FlowStats) -> bool:
|
||||
"""Check if a flow contains Chapter 10 data"""
|
||||
# Check if any frame types in the flow are Chapter 10 related
|
||||
for frame_type in flow.frame_types.keys():
|
||||
if 'CH10' in frame_type.upper() or 'TMATS' in frame_type.upper():
|
||||
return True
|
||||
|
||||
# Check detected protocol types
|
||||
if 'CHAPTER10' in flow.detected_protocol_types or 'CH10' in flow.detected_protocol_types:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -4,5 +4,6 @@ Utility modules for the Ethernet Traffic Analyzer
|
||||
|
||||
from .pcap_loader import PCAPLoader
|
||||
from .live_capture import LiveCapture
|
||||
from .signal_visualizer import SignalVisualizer, signal_visualizer
|
||||
|
||||
__all__ = ['PCAPLoader', 'LiveCapture']
|
||||
__all__ = ['PCAPLoader', 'LiveCapture', 'SignalVisualizer', 'signal_visualizer']
|
||||
@@ -20,11 +20,27 @@ class PCAPLoader:
|
||||
self._packet_count: Optional[int] = None
|
||||
|
||||
def load_all(self) -> List[Packet]:
|
||||
"""Load all packets from the PCAP file"""
|
||||
"""Load all packets from the PCAP file with memory management"""
|
||||
try:
|
||||
# Check file size and warn for large files
|
||||
import os
|
||||
file_size = os.path.getsize(self.file_path)
|
||||
if file_size > 100 * 1024 * 1024: # > 100MB
|
||||
print(f"Warning: Large PCAP file ({file_size / (1024*1024):.1f} MB)")
|
||||
print("Consider using streaming mode for better memory management")
|
||||
|
||||
# Force garbage collection before loading
|
||||
import gc
|
||||
gc.collect()
|
||||
|
||||
packets = rdpcap(self.file_path)
|
||||
self._packet_count = len(packets)
|
||||
|
||||
print(f"Loaded {len(packets)} packets ({file_size / (1024*1024):.1f} MB)")
|
||||
return packets
|
||||
|
||||
except MemoryError:
|
||||
raise IOError(f"Out of memory loading PCAP file {self.file_path}. Try using streaming mode.")
|
||||
except Exception as e:
|
||||
raise IOError(f"Error loading PCAP file {self.file_path}: {e}")
|
||||
|
||||
|
||||
620
analyzer/utils/signal_visualizer.py
Normal file
620
analyzer/utils/signal_visualizer.py
Normal file
@@ -0,0 +1,620 @@
|
||||
"""
|
||||
Signal visualization module for Chapter 10 telemetry data
|
||||
"""
|
||||
|
||||
import threading
|
||||
import queue
|
||||
import time
|
||||
from typing import Dict, List, Optional, Tuple, Any, TYPE_CHECKING
|
||||
from dataclasses import dataclass
|
||||
import struct
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
except ImportError:
|
||||
print("Error: numpy library required. Install with: pip install numpy")
|
||||
import sys
|
||||
sys.exit(1)
|
||||
|
||||
# Lazy loading flags - matplotlib will only be imported when needed
|
||||
MATPLOTLIB_AVAILABLE = None
|
||||
matplotlib = None
|
||||
plt = None
|
||||
animation = None
|
||||
|
||||
def _ensure_matplotlib_loaded(force_backend=None):
|
||||
"""Lazy load matplotlib only when needed"""
|
||||
global MATPLOTLIB_AVAILABLE, matplotlib, plt, animation
|
||||
|
||||
if MATPLOTLIB_AVAILABLE is not None:
|
||||
return MATPLOTLIB_AVAILABLE
|
||||
|
||||
try:
|
||||
# Check if visualization is disabled via environment variable
|
||||
import os
|
||||
if os.environ.get('STREAMLENS_DISABLE_VISUALIZATION'):
|
||||
raise ImportError("Visualization disabled via environment variable")
|
||||
|
||||
import matplotlib as mpl
|
||||
matplotlib = mpl
|
||||
|
||||
# If a specific backend is requested (e.g., for GUI mode), use it
|
||||
if force_backend:
|
||||
matplotlib.use(force_backend)
|
||||
else:
|
||||
# Try backends in order of preference for TUI mode
|
||||
backend_success = False
|
||||
|
||||
# For TUI mode, prefer Agg (non-interactive) to avoid GUI windows
|
||||
try:
|
||||
matplotlib.use('Agg')
|
||||
backend_success = True
|
||||
except Exception as e1:
|
||||
try:
|
||||
matplotlib.use('TkAgg')
|
||||
backend_success = True
|
||||
except Exception as e2:
|
||||
try:
|
||||
matplotlib.use('Qt5Agg')
|
||||
backend_success = True
|
||||
except Exception as e3:
|
||||
raise ImportError(f"No suitable matplotlib backend available. Tried Agg ({e1}), TkAgg ({e2}), Qt5Agg ({e3})")
|
||||
|
||||
import matplotlib.pyplot as mplot
|
||||
import matplotlib.animation as manim
|
||||
plt = mplot
|
||||
animation = manim
|
||||
|
||||
MATPLOTLIB_AVAILABLE = True
|
||||
return True
|
||||
|
||||
except ImportError as e:
|
||||
MATPLOTLIB_AVAILABLE = False
|
||||
print(f"Warning: matplotlib not available: {e}. Signal visualization disabled.")
|
||||
return False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..models.flow_stats import FlowStats
|
||||
from scapy.all import Packet
|
||||
|
||||
|
||||
@dataclass
|
||||
class TMATSMetadata:
|
||||
"""TMATS metadata for signal reconstruction"""
|
||||
channel_configs: Dict[str, Dict[str, Any]]
|
||||
sample_rate: Optional[float] = None
|
||||
data_format: Optional[str] = None
|
||||
channels: List[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.channels is None:
|
||||
self.channels = list(self.channel_configs.keys())
|
||||
|
||||
|
||||
@dataclass
|
||||
class SignalData:
|
||||
"""Represents decoded signal data"""
|
||||
timestamps: np.ndarray
|
||||
channels: Dict[str, np.ndarray] # channel_name -> data array
|
||||
metadata: TMATSMetadata
|
||||
sample_rate: float
|
||||
|
||||
|
||||
class TMATSParser:
|
||||
"""Parser for TMATS (Telemetry Attributes Transfer Standard) data"""
|
||||
|
||||
def __init__(self):
|
||||
self.tmats_data = {}
|
||||
|
||||
def parse_tmats_frame(self, payload: bytes) -> Optional[TMATSMetadata]:
|
||||
"""Parse TMATS data from Chapter 10 payload"""
|
||||
try:
|
||||
# Skip Chapter 10 header and look for ASCII text
|
||||
text_start = self._find_text_start(payload)
|
||||
if text_start is None:
|
||||
return None
|
||||
|
||||
text_data = payload[text_start:].decode('ascii', errors='ignore')
|
||||
return self._parse_tmats_text(text_data)
|
||||
|
||||
except Exception as e:
|
||||
print(f"TMATS parsing error: {e}")
|
||||
return None
|
||||
|
||||
def _find_text_start(self, payload: bytes) -> Optional[int]:
|
||||
"""Find start of ASCII text in payload"""
|
||||
# Look for patterns that indicate start of TMATS text
|
||||
for i in range(min(100, len(payload) - 10)):
|
||||
# Look for ASCII text with TMATS-style patterns
|
||||
try:
|
||||
sample = payload[i:i+20].decode('ascii', errors='strict')
|
||||
if '\\' in sample and (':' in sample or ';' in sample):
|
||||
return i
|
||||
except:
|
||||
continue
|
||||
return None
|
||||
|
||||
def _parse_tmats_text(self, text: str) -> TMATSMetadata:
|
||||
"""Parse TMATS text format"""
|
||||
channel_configs = {}
|
||||
sample_rate = None
|
||||
data_format = None
|
||||
|
||||
# Split into lines and parse key-value pairs
|
||||
lines = text.split('\\')[1:] # Split on backslash, skip first empty element
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Look for key-value pairs separated by colon or semicolon
|
||||
if ':' in line:
|
||||
key, value = line.split(':', 1)
|
||||
elif ';' in line:
|
||||
key, value = line.split(';', 1)
|
||||
else:
|
||||
continue
|
||||
|
||||
key = key.strip()
|
||||
value = value.strip().rstrip(';')
|
||||
|
||||
# Parse channel-specific parameters
|
||||
if key.startswith('R-'):
|
||||
# R-parameters are channel-related
|
||||
self._parse_channel_parameter(key, value, channel_configs)
|
||||
elif key.startswith('G-'):
|
||||
# G-parameters are global
|
||||
if 'SAMPLE' in key.upper() or 'RATE' in key.upper():
|
||||
try:
|
||||
sample_rate = float(value)
|
||||
except:
|
||||
pass
|
||||
elif 'FORMAT' in key.upper():
|
||||
data_format = value
|
||||
|
||||
return TMATSMetadata(
|
||||
channel_configs=channel_configs,
|
||||
sample_rate=sample_rate,
|
||||
data_format=data_format
|
||||
)
|
||||
|
||||
def _parse_channel_parameter(self, key: str, value: str, configs: Dict):
|
||||
"""Parse channel-specific TMATS parameters"""
|
||||
# Extract channel number/ID from key like "R-1\G" or "R-CH1\N"
|
||||
parts = key.split('\\')
|
||||
if len(parts) < 2:
|
||||
return
|
||||
|
||||
channel_part = parts[0] # e.g., "R-1" or "R-CH1"
|
||||
param_part = parts[1] # e.g., "G", "N", "EU"
|
||||
|
||||
# Extract channel identifier
|
||||
if channel_part.startswith('R-'):
|
||||
channel_id = channel_part[2:]
|
||||
else:
|
||||
return
|
||||
|
||||
if channel_id not in configs:
|
||||
configs[channel_id] = {}
|
||||
|
||||
# Map parameter codes to meaningful names
|
||||
param_map = {
|
||||
'G': 'gain',
|
||||
'N': 'name',
|
||||
'EU': 'units',
|
||||
'MN': 'min_value',
|
||||
'MX': 'max_value',
|
||||
'OF': 'offset',
|
||||
'FS': 'full_scale'
|
||||
}
|
||||
|
||||
param_name = param_map.get(param_part, param_part.lower())
|
||||
|
||||
# Try to convert numeric values
|
||||
try:
|
||||
if param_name in ['gain', 'min_value', 'max_value', 'offset', 'full_scale']:
|
||||
value = float(value)
|
||||
except:
|
||||
pass
|
||||
|
||||
configs[channel_id][param_name] = value
|
||||
|
||||
|
||||
class Chapter10SignalDecoder:
|
||||
"""Decoder for Chapter 10 analog and PCM data"""
|
||||
|
||||
def __init__(self, tmats_metadata: Optional[TMATSMetadata] = None):
|
||||
self.tmats_metadata = tmats_metadata
|
||||
|
||||
def decode_analog_data(self, payload: bytes, channel_id: int, data_type: int) -> Optional[SignalData]:
|
||||
"""Decode analog format data"""
|
||||
try:
|
||||
# Skip Chapter 10 header (24 bytes) and look for data
|
||||
data_start = 24
|
||||
if len(payload) <= data_start:
|
||||
return None
|
||||
|
||||
raw_data = payload[data_start:]
|
||||
|
||||
# Determine data format based on data_type
|
||||
if data_type == 0x72: # Analog Format 2
|
||||
return self._decode_analog_format2(raw_data, channel_id)
|
||||
elif data_type in [0x73, 0x74, 0x75]: # Other analog formats
|
||||
return self._decode_analog_generic(raw_data, channel_id, data_type)
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
print(f"Analog decode error: {e}")
|
||||
return None
|
||||
|
||||
def decode_pcm_data(self, payload: bytes, channel_id: int) -> Optional[SignalData]:
|
||||
"""Decode PCM format data"""
|
||||
try:
|
||||
# Skip Chapter 10 header
|
||||
data_start = 24
|
||||
if len(payload) <= data_start:
|
||||
return None
|
||||
|
||||
raw_data = payload[data_start:]
|
||||
|
||||
# Basic PCM decoding - this would need to be enhanced based on specific format
|
||||
return self._decode_pcm_generic(raw_data, channel_id)
|
||||
|
||||
except Exception as e:
|
||||
print(f"PCM decode error: {e}")
|
||||
return None
|
||||
|
||||
def _decode_analog_format2(self, raw_data: bytes, channel_id: int) -> Optional[SignalData]:
|
||||
"""Decode Analog Format 2 data"""
|
||||
if len(raw_data) < 4:
|
||||
return None
|
||||
|
||||
# Parse analog format header (simplified)
|
||||
try:
|
||||
# Assume 16-bit samples for now
|
||||
num_samples = len(raw_data) // 2
|
||||
samples = struct.unpack(f'<{num_samples}h', raw_data)
|
||||
|
||||
# Convert to numpy array
|
||||
data_array = np.array(samples, dtype=np.float32)
|
||||
|
||||
# Apply TMATS scaling if available
|
||||
if self.tmats_metadata and str(channel_id) in self.tmats_metadata.channel_configs:
|
||||
config = self.tmats_metadata.channel_configs[str(channel_id)]
|
||||
gain = config.get('gain', 1.0)
|
||||
offset = config.get('offset', 0.0)
|
||||
data_array = data_array * gain + offset
|
||||
|
||||
# Generate timestamps (would be more sophisticated in real implementation)
|
||||
sample_rate = self.tmats_metadata.sample_rate if self.tmats_metadata else 1000.0
|
||||
timestamps = np.arange(len(data_array)) / sample_rate
|
||||
|
||||
channel_name = f"CH{channel_id}"
|
||||
if self.tmats_metadata and str(channel_id) in self.tmats_metadata.channel_configs:
|
||||
channel_name = self.tmats_metadata.channel_configs[str(channel_id)].get('name', channel_name)
|
||||
|
||||
return SignalData(
|
||||
timestamps=timestamps,
|
||||
channels={channel_name: data_array},
|
||||
metadata=self.tmats_metadata or TMATSMetadata({}),
|
||||
sample_rate=sample_rate
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Format 2 decode error: {e}")
|
||||
return None
|
||||
|
||||
def _decode_analog_generic(self, raw_data: bytes, channel_id: int, data_type: int) -> Optional[SignalData]:
|
||||
"""Generic analog data decoder"""
|
||||
# This would be implemented based on specific format requirements
|
||||
return self._decode_analog_format2(raw_data, channel_id) # Fallback for now
|
||||
|
||||
def _decode_pcm_generic(self, raw_data: bytes, channel_id: int) -> Optional[SignalData]:
|
||||
"""Generic PCM decoder"""
|
||||
# Basic PCM implementation - would need format-specific handling
|
||||
try:
|
||||
num_samples = len(raw_data) // 2
|
||||
samples = struct.unpack(f'<{num_samples}H', raw_data) # Unsigned 16-bit
|
||||
|
||||
data_array = np.array(samples, dtype=np.float32)
|
||||
|
||||
sample_rate = self.tmats_metadata.sample_rate if self.tmats_metadata else 1000.0
|
||||
timestamps = np.arange(len(data_array)) / sample_rate
|
||||
|
||||
channel_name = f"PCM_CH{channel_id}"
|
||||
|
||||
return SignalData(
|
||||
timestamps=timestamps,
|
||||
channels={channel_name: data_array},
|
||||
metadata=self.tmats_metadata or TMATSMetadata({}),
|
||||
sample_rate=sample_rate
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(f"PCM decode error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
class SignalVisualizer:
|
||||
"""Thread-safe matplotlib-based signal visualizer for Chapter 10 data"""
|
||||
|
||||
def __init__(self):
|
||||
self.active_windows = {}
|
||||
self.tmats_cache = {}
|
||||
self.visualization_queue = queue.Queue()
|
||||
self._processing_visualizations = False
|
||||
self._force_file_output = False # Can be set externally to force file output
|
||||
self._in_tui_context = False # Track if we're in TUI context
|
||||
|
||||
def visualize_flow_signals(self, flow: 'FlowStats', packets: List['Packet'], gui_mode: bool = False) -> None:
|
||||
"""Visualize signals from a Chapter 10 flow"""
|
||||
# Lazy load matplotlib with appropriate backend
|
||||
if gui_mode:
|
||||
# For GUI mode, use Qt backend for embedded plots
|
||||
if not _ensure_matplotlib_loaded('Qt5Agg'):
|
||||
print("Matplotlib not available - cannot visualize signals")
|
||||
return
|
||||
else:
|
||||
# For TUI mode, use Agg backend to avoid GUI windows
|
||||
if not _ensure_matplotlib_loaded():
|
||||
print("Matplotlib not available - cannot visualize signals")
|
||||
return
|
||||
|
||||
flow_key = f"{flow.src_ip}->{flow.dst_ip}"
|
||||
|
||||
# Extract TMATS metadata from flow
|
||||
tmats_metadata = self._extract_tmats_from_flow(packets)
|
||||
|
||||
# Extract and decode signal data
|
||||
signal_data = self._extract_signals_from_flow(packets, tmats_metadata)
|
||||
|
||||
if not signal_data:
|
||||
print(f"No decodable Chapter 10 signal data found in flow {flow_key}")
|
||||
return
|
||||
|
||||
# Create or update visualization window
|
||||
self._create_signal_window(flow_key, signal_data, flow)
|
||||
|
||||
def _extract_tmats_from_flow(self, packets: List['Packet']) -> Optional[TMATSMetadata]:
|
||||
"""Extract TMATS metadata from Chapter 10 packets in flow"""
|
||||
parser = TMATSParser()
|
||||
|
||||
for packet in packets:
|
||||
if not hasattr(packet, 'haslayer') or not packet.haslayer('Raw'):
|
||||
continue
|
||||
|
||||
from scapy.all import Raw
|
||||
raw_data = bytes(packet[Raw])
|
||||
|
||||
# Look for TMATS patterns
|
||||
if b'TMATS' in raw_data or b'R-' in raw_data:
|
||||
tmats_metadata = parser.parse_tmats_frame(raw_data)
|
||||
if tmats_metadata:
|
||||
return tmats_metadata
|
||||
|
||||
return None
|
||||
|
||||
def _extract_signals_from_flow(self, packets: List['Packet'], tmats_metadata: Optional[TMATSMetadata]) -> List[SignalData]:
|
||||
"""Extract signal data from Chapter 10 packets and consolidate by channel"""
|
||||
decoder = Chapter10SignalDecoder(tmats_metadata)
|
||||
|
||||
# Dictionary to collect signal data by channel and data type
|
||||
channel_data = {} # key: (channel_id, data_type), value: list of signal_data objects
|
||||
|
||||
for packet in packets:
|
||||
if not hasattr(packet, 'haslayer') or not packet.haslayer('Raw'):
|
||||
continue
|
||||
|
||||
from scapy.all import Raw
|
||||
raw_data = bytes(packet[Raw])
|
||||
|
||||
# Try to parse as Chapter 10
|
||||
ch10_offset = self._find_chapter10_sync(raw_data)
|
||||
if ch10_offset is None:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Parse header to get data type and channel
|
||||
header_start = ch10_offset
|
||||
if len(raw_data) < header_start + 24:
|
||||
continue
|
||||
|
||||
channel_id = struct.unpack('<H', raw_data[header_start+2:header_start+4])[0]
|
||||
data_type = struct.unpack('<H', raw_data[header_start+12:header_start+14])[0]
|
||||
|
||||
# Decode based on data type
|
||||
if data_type in [0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78]: # Analog formats
|
||||
signal_data = decoder.decode_analog_data(raw_data[ch10_offset:], channel_id, data_type)
|
||||
elif data_type in [0x04, 0x08]: # PCM formats
|
||||
signal_data = decoder.decode_pcm_data(raw_data[ch10_offset:], channel_id)
|
||||
else:
|
||||
continue
|
||||
|
||||
if signal_data:
|
||||
key = (channel_id, data_type)
|
||||
if key not in channel_data:
|
||||
channel_data[key] = []
|
||||
channel_data[key].append(signal_data)
|
||||
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
# Consolidate signals by channel
|
||||
consolidated_signals = []
|
||||
for (channel_id, data_type), signal_list in channel_data.items():
|
||||
if not signal_list:
|
||||
continue
|
||||
|
||||
consolidated_signal = self._consolidate_channel_signals(signal_list, channel_id, data_type)
|
||||
if consolidated_signal:
|
||||
consolidated_signals.append(consolidated_signal)
|
||||
|
||||
return consolidated_signals
|
||||
|
||||
def _consolidate_channel_signals(self, signal_list: List[SignalData], channel_id: int, data_type: int) -> Optional[SignalData]:
|
||||
"""Consolidate multiple SignalData objects from the same channel into one continuous signal"""
|
||||
if not signal_list:
|
||||
return None
|
||||
|
||||
# Use the first signal's metadata as the base
|
||||
base_signal = signal_list[0]
|
||||
|
||||
# Concatenate all data from the same channel
|
||||
all_timestamps = []
|
||||
all_channel_data = {}
|
||||
|
||||
# Initialize channel data dictionaries
|
||||
for channel_name in base_signal.channels.keys():
|
||||
all_channel_data[channel_name] = []
|
||||
|
||||
# Sort signals by their first timestamp to maintain chronological order
|
||||
signal_list.sort(key=lambda s: s.timestamps[0] if len(s.timestamps) > 0 else 0)
|
||||
|
||||
# Track time offset for continuous timeline
|
||||
time_offset = 0.0
|
||||
|
||||
for i, signal_data in enumerate(signal_list):
|
||||
if i == 0:
|
||||
# First signal - use timestamps as-is
|
||||
all_timestamps.extend(signal_data.timestamps)
|
||||
else:
|
||||
# Subsequent signals - add time offset to create continuous timeline
|
||||
if len(all_timestamps) > 0:
|
||||
time_offset = all_timestamps[-1] + (1.0 / signal_data.sample_rate)
|
||||
|
||||
# Add offset timestamps
|
||||
offset_timestamps = signal_data.timestamps + time_offset
|
||||
all_timestamps.extend(offset_timestamps)
|
||||
|
||||
# Concatenate channel data
|
||||
for channel_name, data in signal_data.channels.items():
|
||||
if channel_name in all_channel_data:
|
||||
all_channel_data[channel_name].extend(data)
|
||||
|
||||
# Convert lists to numpy arrays
|
||||
consolidated_timestamps = np.array(all_timestamps)
|
||||
consolidated_channels = {}
|
||||
for channel_name, data_list in all_channel_data.items():
|
||||
if data_list: # Only include channels that have data
|
||||
consolidated_channels[channel_name] = np.array(data_list)
|
||||
|
||||
if not consolidated_channels:
|
||||
return None
|
||||
|
||||
# Create consolidated SignalData object
|
||||
return SignalData(
|
||||
timestamps=consolidated_timestamps,
|
||||
channels=consolidated_channels,
|
||||
metadata=base_signal.metadata,
|
||||
sample_rate=base_signal.sample_rate
|
||||
)
|
||||
|
||||
def _find_chapter10_sync(self, raw_data: bytes) -> Optional[int]:
|
||||
"""Find Chapter 10 sync pattern in raw data"""
|
||||
sync_pattern = 0xEB25
|
||||
for offset in range(len(raw_data) - 1):
|
||||
if offset + 1 < len(raw_data):
|
||||
try:
|
||||
word = struct.unpack('<H', raw_data[offset:offset+2])[0]
|
||||
if word == sync_pattern:
|
||||
return offset
|
||||
except:
|
||||
continue
|
||||
return None
|
||||
|
||||
def _create_signal_window(self, flow_key: str, signal_data_list: List[SignalData], flow: 'FlowStats'):
|
||||
"""Create matplotlib window for signal visualization"""
|
||||
# Check the current backend to determine output method
|
||||
backend = matplotlib.get_backend()
|
||||
|
||||
if backend == 'Agg':
|
||||
# Non-interactive backend - always save to files
|
||||
self._run_signal_window(flow_key, signal_data_list, flow)
|
||||
elif backend in ['TkAgg', 'Qt5Agg', 'Qt4Agg', 'GTKAgg', 'MacOSX']:
|
||||
# Interactive backends - different handling for TUI vs GUI
|
||||
if self._in_tui_context:
|
||||
print(f"Note: Interactive matplotlib backend detected ({backend})")
|
||||
print("Saving plots as files to avoid threading issues with TUI")
|
||||
# Run visualization synchronously to avoid threading issues
|
||||
self._run_signal_window(flow_key, signal_data_list, flow)
|
||||
else:
|
||||
# GUI mode - can use interactive display safely
|
||||
self._run_signal_window(flow_key, signal_data_list, flow)
|
||||
else:
|
||||
# Other backends, use threading safely
|
||||
thread = threading.Thread(
|
||||
target=self._run_signal_window,
|
||||
args=(flow_key, signal_data_list, flow),
|
||||
daemon=True
|
||||
)
|
||||
thread.start()
|
||||
|
||||
def _run_signal_window(self, flow_key: str, signal_data_list: List[SignalData], flow: 'FlowStats'):
|
||||
"""Run signal visualization (thread-safe)"""
|
||||
try:
|
||||
if not signal_data_list:
|
||||
print("No signal data to visualize")
|
||||
return
|
||||
|
||||
fig, axes = plt.subplots(len(signal_data_list), 1, figsize=(12, 8))
|
||||
if len(signal_data_list) == 1:
|
||||
axes = [axes]
|
||||
|
||||
fig.suptitle(f'Chapter 10 Signals - Flow: {flow_key}', fontsize=14)
|
||||
|
||||
for idx, signal_data in enumerate(signal_data_list):
|
||||
ax = axes[idx] if idx < len(axes) else axes[-1]
|
||||
|
||||
# Plot each channel in the signal data
|
||||
for channel_name, data in signal_data.channels.items():
|
||||
ax.plot(signal_data.timestamps, data, label=channel_name, linewidth=0.8)
|
||||
|
||||
ax.set_xlabel('Time (s)')
|
||||
ax.set_ylabel('Amplitude')
|
||||
ax.grid(True, alpha=0.3)
|
||||
ax.legend()
|
||||
|
||||
# Add metadata info
|
||||
if signal_data.metadata and signal_data.metadata.channel_configs:
|
||||
config_info = []
|
||||
for ch_id, config in signal_data.metadata.channel_configs.items():
|
||||
if 'units' in config:
|
||||
config_info.append(f"CH{ch_id}: {config.get('units', 'Unknown')}")
|
||||
if config_info:
|
||||
ax.set_title(f"Channels: {', '.join(config_info)}")
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
# Handle display based on backend
|
||||
backend = matplotlib.get_backend()
|
||||
if backend == 'Agg' or self._in_tui_context:
|
||||
# Save to file for non-interactive backend or TUI context
|
||||
filename = f"signal_plot_{flow_key.replace('->', '_to_').replace('.', '_')}.png"
|
||||
plt.savefig(filename, dpi=300, bbox_inches='tight')
|
||||
print(f"Signal plot saved to {filename}")
|
||||
plt.close(fig)
|
||||
else:
|
||||
# Store reference and show interactively (GUI mode)
|
||||
self.active_windows[flow_key] = fig
|
||||
plt.show()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Signal visualization error: {e}")
|
||||
|
||||
def close_flow_window(self, flow_key: str):
|
||||
"""Close visualization window for a flow"""
|
||||
if flow_key in self.active_windows:
|
||||
plt.close(self.active_windows[flow_key])
|
||||
del self.active_windows[flow_key]
|
||||
|
||||
def close_all_windows(self):
|
||||
"""Close all visualization windows"""
|
||||
for fig in self.active_windows.values():
|
||||
plt.close(fig)
|
||||
self.active_windows.clear()
|
||||
|
||||
|
||||
# Global visualizer instance
|
||||
signal_visualizer = SignalVisualizer()
|
||||
Reference in New Issue
Block a user