Enhanced Textual TUI with proper API usage and documentation
- Fixed DataTable row selection and event handling - Added explicit column keys to prevent auto-generated keys - Implemented row-to-flow mapping for reliable selection tracking - Converted left metrics panel to horizontal top bar - Fixed all missing FlowStats/EnhancedAnalysisData attributes - Created comprehensive Textual API documentation in Documentation/textual/ - Added validation checklist to prevent future API mismatches - Preserved cursor position during data refreshes - Fixed RowKey type handling and event names The TUI now properly handles flow selection, displays metrics in a compact top bar, and correctly correlates selected rows with the details pane.
This commit is contained in:
3
analyzer/tui/textual/widgets/__init__.py
Normal file
3
analyzer/tui/textual/widgets/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Textual widgets for StreamLens TUI
|
||||
"""
|
||||
173
analyzer/tui/textual/widgets/flow_details.py
Normal file
173
analyzer/tui/textual/widgets/flow_details.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Flow Details Panel - Detailed information for selected flow
|
||||
"""
|
||||
|
||||
from textual.widget import Widget
|
||||
from textual.containers import Vertical
|
||||
from textual.widgets import Static
|
||||
from rich.text import Text
|
||||
from rich.panel import Panel
|
||||
from rich.console import RenderableType, Group
|
||||
from rich.table import Table
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class FlowDetailsPanel(Vertical):
|
||||
"""
|
||||
Detailed flow information panel
|
||||
|
||||
Shows:
|
||||
- Flow identification
|
||||
- Enhanced decoder status
|
||||
- Timing analysis
|
||||
- Frame type breakdown
|
||||
- Quality metrics
|
||||
"""
|
||||
|
||||
DEFAULT_CSS = """
|
||||
FlowDetailsPanel {
|
||||
height: 1fr;
|
||||
padding: 1;
|
||||
}
|
||||
|
||||
FlowDetailsPanel Static {
|
||||
margin-bottom: 1;
|
||||
}
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.current_flow = None
|
||||
|
||||
def compose(self):
|
||||
"""Create the details panel layout"""
|
||||
yield Static("Flow Details", classes="panel-header")
|
||||
yield Static(
|
||||
Panel("Select a flow to view details", border_style="dim"),
|
||||
id="details-content"
|
||||
)
|
||||
|
||||
def update_flow(self, flow: Optional['FlowStats']) -> None:
|
||||
"""Update panel with flow details"""
|
||||
self.current_flow = flow
|
||||
content_widget = self.query_one("#details-content", Static)
|
||||
|
||||
if not flow:
|
||||
content_widget.update(
|
||||
Panel("Select a flow to view details", border_style="dim")
|
||||
)
|
||||
return
|
||||
|
||||
# Create detailed content
|
||||
details = self._create_flow_details(flow)
|
||||
content_widget.update(details)
|
||||
|
||||
def _create_flow_details(self, flow: 'FlowStats') -> RenderableType:
|
||||
"""Create comprehensive flow details display"""
|
||||
sections = []
|
||||
|
||||
# Flow identification
|
||||
id_table = Table(show_header=False, box=None, padding=0)
|
||||
id_table.add_column(style="dim", width=12)
|
||||
id_table.add_column()
|
||||
|
||||
id_table.add_row("Source:", f"{flow.src_ip}:{flow.src_port}")
|
||||
id_table.add_row("Destination:", f"{flow.dst_ip}:{flow.dst_port}")
|
||||
id_table.add_row("Protocol:", flow.transport_protocol)
|
||||
id_table.add_row("Packets:", f"{flow.frame_count:,}")
|
||||
id_table.add_row("Volume:", self._format_bytes(flow.total_bytes))
|
||||
|
||||
sections.append(Panel(id_table, title="Flow Information", border_style="blue"))
|
||||
|
||||
# Enhanced analysis
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
enhanced_table = Table(show_header=False, box=None, padding=0)
|
||||
enhanced_table.add_column(style="dim", width=12)
|
||||
enhanced_table.add_column()
|
||||
|
||||
enhanced_table.add_row("Decoder:", flow.enhanced_analysis.decoder_type)
|
||||
enhanced_table.add_row("Quality:", f"{flow.enhanced_analysis.avg_frame_quality:.1f}%")
|
||||
enhanced_table.add_row("Fields:", str(flow.enhanced_analysis.field_count))
|
||||
|
||||
if flow.enhanced_analysis.frame_types:
|
||||
types_str = ", ".join(list(flow.enhanced_analysis.frame_types)[:3])
|
||||
if len(flow.enhanced_analysis.frame_types) > 3:
|
||||
types_str += f" +{len(flow.enhanced_analysis.frame_types) - 3}"
|
||||
enhanced_table.add_row("Types:", types_str)
|
||||
|
||||
sections.append(Panel(enhanced_table, title="Enhanced Analysis", border_style="green"))
|
||||
|
||||
# Timing analysis
|
||||
timing_table = Table(show_header=False, box=None, padding=0)
|
||||
timing_table.add_column(style="dim", width=12)
|
||||
timing_table.add_column()
|
||||
|
||||
timing_table.add_row("Duration:", f"{flow.duration:.2f}s")
|
||||
timing_table.add_row("Avg Interval:", f"{flow.avg_inter_arrival * 1000:.1f}ms")
|
||||
timing_table.add_row("Jitter:", f"{flow.jitter * 1000:.2f}ms")
|
||||
timing_table.add_row("First Seen:", self._format_timestamp(flow.first_seen))
|
||||
timing_table.add_row("Last Seen:", self._format_timestamp(flow.last_seen))
|
||||
|
||||
sections.append(Panel(timing_table, title="Timing Analysis", border_style="cyan"))
|
||||
|
||||
# Frame type breakdown (if multiple types)
|
||||
if len(flow.frame_types) > 1:
|
||||
frame_table = Table(show_header=True, box=None)
|
||||
frame_table.add_column("Type", style="blue")
|
||||
frame_table.add_column("Count", justify="right")
|
||||
frame_table.add_column("%", justify="right")
|
||||
|
||||
total = flow.frame_count
|
||||
for frame_type, stats in sorted(
|
||||
flow.frame_types.items(),
|
||||
key=lambda x: x[1].count,
|
||||
reverse=True
|
||||
)[:5]:
|
||||
percentage = (stats.count / total * 100) if total > 0 else 0
|
||||
frame_table.add_row(
|
||||
frame_type[:15],
|
||||
f"{stats.count:,}",
|
||||
f"{percentage:.1f}%"
|
||||
)
|
||||
|
||||
sections.append(Panel(frame_table, title="Frame Types", border_style="yellow"))
|
||||
|
||||
# Quality metrics
|
||||
if flow.outlier_frames or flow.enhanced_analysis.decoder_type != "Standard":
|
||||
quality_lines = []
|
||||
|
||||
if flow.outlier_frames:
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100
|
||||
quality_lines.append(f"Outliers: {len(flow.outlier_frames)} ({outlier_pct:.1f}%)")
|
||||
|
||||
if flow.enhanced_analysis.timing_accuracy:
|
||||
quality_lines.append(f"Timing: {flow.enhanced_analysis.timing_accuracy}")
|
||||
|
||||
if flow.enhanced_analysis.signal_quality:
|
||||
quality_lines.append(f"Signal: {flow.enhanced_analysis.signal_quality:.1f}%")
|
||||
|
||||
if quality_lines:
|
||||
quality_text = "\n".join(quality_lines)
|
||||
sections.append(Panel(quality_text, title="Quality Metrics", border_style="magenta"))
|
||||
|
||||
return Group(*sections)
|
||||
|
||||
def _format_bytes(self, bytes_count: int) -> str:
|
||||
"""Format byte count with units"""
|
||||
if bytes_count >= 1_000_000_000:
|
||||
return f"{bytes_count / 1_000_000_000:.2f} GB"
|
||||
elif bytes_count >= 1_000_000:
|
||||
return f"{bytes_count / 1_000_000:.2f} MB"
|
||||
elif bytes_count >= 1_000:
|
||||
return f"{bytes_count / 1_000:.2f} KB"
|
||||
else:
|
||||
return f"{bytes_count} B"
|
||||
|
||||
def _format_timestamp(self, timestamp: float) -> str:
|
||||
"""Format timestamp for display"""
|
||||
import datetime
|
||||
dt = datetime.datetime.fromtimestamp(timestamp)
|
||||
return dt.strftime("%H:%M:%S.%f")[:-3] # Show milliseconds
|
||||
339
analyzer/tui/textual/widgets/flow_table.py
Normal file
339
analyzer/tui/textual/widgets/flow_table.py
Normal file
@@ -0,0 +1,339 @@
|
||||
"""
|
||||
Flow Analysis Widget using Textual DataTable
|
||||
Hierarchical flow display with automatic formatting and responsive layout
|
||||
"""
|
||||
|
||||
from textual.widgets import DataTable, Static
|
||||
from textual.containers import Vertical
|
||||
from textual.reactive import reactive
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple
|
||||
from rich.text import Text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class FlowAnalysisWidget(Vertical):
|
||||
"""
|
||||
Enhanced Flow Analysis using Textual DataTable
|
||||
|
||||
Features:
|
||||
- Automatic column sizing and alignment
|
||||
- Hierarchical sub-rows for protocol breakdown
|
||||
- Rich text formatting with colors
|
||||
- Mouse and keyboard navigation
|
||||
- Real-time data updates
|
||||
"""
|
||||
|
||||
selected_flow_index = reactive(0)
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
self.flow_table = None
|
||||
self.flows_list = []
|
||||
|
||||
def compose(self):
|
||||
"""Create the widget layout"""
|
||||
yield Static("FLOW ANALYSIS", id="flow-title")
|
||||
|
||||
# Main flow data table
|
||||
flow_table = DataTable(id="flows-table")
|
||||
flow_table.cursor_type = "row"
|
||||
flow_table.zebra_stripes = True
|
||||
|
||||
# Add columns with proper alignment
|
||||
flow_table.add_columns(
|
||||
"#", # Flow number (right-aligned)
|
||||
"Source", # IP:port (left-aligned)
|
||||
"Proto", # Transport protocol (left-aligned)
|
||||
"Destination", # IP:port (left-aligned)
|
||||
"Extended", # Extended protocol (left-aligned)
|
||||
"Frame Type", # Frame type (left-aligned)
|
||||
"Pkts", # Packet count (right-aligned)
|
||||
"Volume", # Data volume (right-aligned)
|
||||
"Timing", # Inter-arrival timing (right-aligned)
|
||||
"Quality" # Quality metric (right-aligned)
|
||||
)
|
||||
|
||||
self.flow_table = flow_table
|
||||
yield flow_table
|
||||
|
||||
def on_mount(self) -> None:
|
||||
"""Initialize the widget when mounted"""
|
||||
self.refresh_data()
|
||||
|
||||
def refresh_data(self) -> None:
|
||||
"""Refresh the flow data in the table"""
|
||||
if not self.flow_table:
|
||||
return
|
||||
|
||||
# Preserve cursor position
|
||||
cursor_row = self.flow_table.cursor_row
|
||||
cursor_column = self.flow_table.cursor_column
|
||||
selected_row_key = None
|
||||
if self.flow_table.rows and cursor_row < len(self.flow_table.rows):
|
||||
selected_row_key = list(self.flow_table.rows.keys())[cursor_row]
|
||||
|
||||
# Clear existing data
|
||||
self.flow_table.clear()
|
||||
|
||||
# Get updated flows list
|
||||
self.flows_list = self._get_flows_list()
|
||||
|
||||
# Populate table with hierarchical data
|
||||
for i, flow in enumerate(self.flows_list):
|
||||
# Add main flow row
|
||||
main_row = self._create_flow_row(i + 1, flow)
|
||||
row_key = self.flow_table.add_row(*main_row, key=f"flow_{i}")
|
||||
|
||||
# Mark enhanced flows with special styling
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
# Note: DataTable doesn't have set_row_style, using CSS classes instead
|
||||
pass
|
||||
|
||||
# Add sub-rows for protocol/frame type breakdown
|
||||
protocol_combinations = self._get_protocol_frame_combinations(flow)
|
||||
for j, (extended_proto, frame_type, count, percentage) in enumerate(protocol_combinations):
|
||||
sub_row = self._create_sub_row(flow, extended_proto, frame_type, count, percentage)
|
||||
sub_key = self.flow_table.add_row(*sub_row, key=f"flow_{i}_sub_{j}")
|
||||
# Note: DataTable doesn't have set_row_style, using CSS classes instead
|
||||
|
||||
# Restore cursor position
|
||||
if selected_row_key and selected_row_key in self.flow_table.rows:
|
||||
row_index = list(self.flow_table.rows.keys()).index(selected_row_key)
|
||||
self.flow_table.move_cursor(row=row_index, column=cursor_column, animate=False)
|
||||
elif self.flow_table.row_count > 0:
|
||||
# If original selection not found, try to maintain row position
|
||||
new_row = min(cursor_row, self.flow_table.row_count - 1)
|
||||
self.flow_table.move_cursor(row=new_row, column=cursor_column, animate=False)
|
||||
|
||||
def _create_flow_row(self, flow_num: int, flow: 'FlowStats') -> List[Text]:
|
||||
"""Create main flow row with rich text formatting"""
|
||||
|
||||
# Format source with potential truncation
|
||||
source = f"{flow.src_ip}:{flow.src_port}"
|
||||
source_text = Text(source[:22] + "..." if len(source) > 25 else source)
|
||||
|
||||
# Transport protocol
|
||||
protocol_text = Text(flow.transport_protocol, style="bold cyan")
|
||||
|
||||
# Format destination
|
||||
destination = f"{flow.dst_ip}:{flow.dst_port}"
|
||||
dest_text = Text(destination[:22] + "..." if len(destination) > 25 else destination)
|
||||
|
||||
# Extended protocol summary
|
||||
combinations = self._get_protocol_frame_combinations(flow)
|
||||
extended_text = Text(f"{len(combinations)} types", style="yellow")
|
||||
|
||||
# Frame type summary
|
||||
frame_text = Text("Mixed" if len(flow.frame_types) > 1 else "Single", style="blue")
|
||||
|
||||
# Packet count (right-aligned)
|
||||
packets_text = Text(str(flow.frame_count), justify="right", style="white")
|
||||
|
||||
# Volume with units (right-aligned)
|
||||
volume = self._format_bytes(flow.total_bytes)
|
||||
volume_text = Text(volume, justify="right", style="magenta")
|
||||
|
||||
# Timing (right-aligned)
|
||||
if flow.avg_inter_arrival > 0:
|
||||
timing_ms = flow.avg_inter_arrival * 1000
|
||||
if timing_ms >= 1000:
|
||||
timing = f"{timing_ms/1000:.1f}s"
|
||||
else:
|
||||
timing = f"{timing_ms:.1f}ms"
|
||||
else:
|
||||
timing = "N/A"
|
||||
timing_text = Text(timing, justify="right", style="cyan")
|
||||
|
||||
# Quality indicator (right-aligned)
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
if flow.enhanced_analysis.avg_frame_quality > 0:
|
||||
quality = f"{flow.enhanced_analysis.avg_frame_quality:.0f}%"
|
||||
quality_style = "bold green"
|
||||
else:
|
||||
quality = "Enhanced"
|
||||
quality_style = "green"
|
||||
else:
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
if outlier_pct > 5:
|
||||
quality = f"{outlier_pct:.0f}% Out"
|
||||
quality_style = "red"
|
||||
else:
|
||||
quality = "Normal"
|
||||
quality_style = "green"
|
||||
|
||||
quality_text = Text(quality, justify="right", style=quality_style)
|
||||
|
||||
return [
|
||||
Text(str(flow_num), justify="right"),
|
||||
source_text,
|
||||
protocol_text,
|
||||
dest_text,
|
||||
extended_text,
|
||||
frame_text,
|
||||
packets_text,
|
||||
volume_text,
|
||||
timing_text,
|
||||
quality_text
|
||||
]
|
||||
|
||||
def _create_sub_row(self, flow: 'FlowStats', extended_proto: str, frame_type: str, count: int, percentage: float) -> List[Text]:
|
||||
"""Create sub-row for protocol/frame type combination"""
|
||||
|
||||
# Empty columns for inheritance from parent flow
|
||||
empty = Text("")
|
||||
|
||||
# Extended protocol
|
||||
extended_text = Text(extended_proto if extended_proto != '-' else "", style="dim yellow")
|
||||
|
||||
# Frame type
|
||||
frame_text = Text(frame_type, style="dim blue")
|
||||
|
||||
# Packet count for this combination
|
||||
count_text = Text(str(count), justify="right", style="dim white")
|
||||
|
||||
# Volume estimation
|
||||
volume_bytes = int(flow.total_bytes * (percentage / 100))
|
||||
volume = self._format_bytes(volume_bytes)
|
||||
volume_text = Text(volume, justify="right", style="dim magenta")
|
||||
|
||||
# Timing for this frame type
|
||||
if frame_type in flow.frame_types and flow.frame_types[frame_type].avg_inter_arrival > 0:
|
||||
timing_ms = flow.frame_types[frame_type].avg_inter_arrival * 1000
|
||||
if timing_ms >= 1000:
|
||||
timing = f"{timing_ms/1000:.1f}s"
|
||||
else:
|
||||
timing = f"{timing_ms:.1f}ms"
|
||||
else:
|
||||
timing = "-"
|
||||
timing_text = Text(timing, justify="right", style="dim cyan")
|
||||
|
||||
# Percentage as quality
|
||||
quality_text = Text(f"{percentage:.1f}%", justify="right", style="dim")
|
||||
|
||||
return [
|
||||
empty, # Flow number
|
||||
empty, # Source
|
||||
empty, # Protocol
|
||||
empty, # Destination
|
||||
extended_text, # Extended protocol
|
||||
frame_text, # Frame type
|
||||
count_text, # Packet count
|
||||
volume_text, # Volume
|
||||
timing_text, # Timing
|
||||
quality_text # Percentage
|
||||
]
|
||||
|
||||
def _get_protocol_frame_combinations(self, flow: 'FlowStats') -> List[Tuple[str, str, int, float]]:
|
||||
"""Get distinct extended protocol/frame type combinations for a flow"""
|
||||
combinations = []
|
||||
total_packets = flow.frame_count
|
||||
|
||||
# Group frame types by extended protocol
|
||||
protocol_frames = {}
|
||||
|
||||
if flow.frame_types:
|
||||
for frame_type, ft_stats in flow.frame_types.items():
|
||||
# Determine extended protocol for this frame type
|
||||
extended_proto = self._get_extended_protocol_for_frame(flow, frame_type)
|
||||
|
||||
if extended_proto not in protocol_frames:
|
||||
protocol_frames[extended_proto] = []
|
||||
|
||||
protocol_frames[extended_proto].append((frame_type, ft_stats.count))
|
||||
else:
|
||||
# No frame types, just show the flow-level extended protocol
|
||||
extended_proto = self._get_extended_protocol(flow)
|
||||
protocol_frames[extended_proto] = [("General", total_packets)]
|
||||
|
||||
# Convert to list of tuples with percentages
|
||||
for extended_proto, frame_list in protocol_frames.items():
|
||||
for frame_type, count in frame_list:
|
||||
percentage = (count / total_packets * 100) if total_packets > 0 else 0
|
||||
combinations.append((extended_proto, frame_type, count, percentage))
|
||||
|
||||
# Sort by count (descending)
|
||||
combinations.sort(key=lambda x: x[2], reverse=True)
|
||||
return combinations
|
||||
|
||||
def _get_extended_protocol_for_frame(self, flow: 'FlowStats', frame_type: str) -> str:
|
||||
"""Get extended protocol for a specific frame type"""
|
||||
if frame_type.startswith('CH10') or frame_type == 'TMATS':
|
||||
return 'CH10'
|
||||
elif frame_type.startswith('PTP'):
|
||||
return 'PTP'
|
||||
elif frame_type == 'IENA':
|
||||
return 'IENA'
|
||||
elif frame_type == 'NTP':
|
||||
return 'NTP'
|
||||
else:
|
||||
return self._get_extended_protocol(flow)
|
||||
|
||||
def _get_extended_protocol(self, flow: 'FlowStats') -> str:
|
||||
"""Get extended protocol (Chapter 10, PTP, IENA, etc.)"""
|
||||
if flow.detected_protocol_types:
|
||||
# Look for specialized protocols
|
||||
enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'}
|
||||
found_enhanced = flow.detected_protocol_types & enhanced_protocols
|
||||
if found_enhanced:
|
||||
protocol = list(found_enhanced)[0]
|
||||
# Simplify display names
|
||||
if protocol in ['CHAPTER10', 'CH10']:
|
||||
return 'CH10'
|
||||
return protocol
|
||||
|
||||
# Check for other common protocols
|
||||
if flow.detected_protocol_types and 'NTP' in flow.detected_protocol_types:
|
||||
return 'NTP'
|
||||
|
||||
return '-'
|
||||
|
||||
def _format_bytes(self, bytes_count: int) -> str:
|
||||
"""Format byte count with appropriate units"""
|
||||
if bytes_count >= 1_000_000_000:
|
||||
return f"{bytes_count / 1_000_000_000:.1f}GB"
|
||||
elif bytes_count >= 1_000_000:
|
||||
return f"{bytes_count / 1_000_000:.1f}MB"
|
||||
elif bytes_count >= 1_000:
|
||||
return f"{bytes_count / 1_000:.1f}KB"
|
||||
else:
|
||||
return f"{bytes_count}B"
|
||||
|
||||
def _get_flows_list(self) -> List['FlowStats']:
|
||||
"""Get flows sorted by importance for flow analysis"""
|
||||
flows_list = list(self.analyzer.flows.values())
|
||||
|
||||
# Sort by: Enhanced protocols first, then outliers, then packet count
|
||||
flows_list.sort(key=lambda x: (
|
||||
x.enhanced_analysis.decoder_type != "Standard",
|
||||
len(x.outlier_frames),
|
||||
x.frame_count
|
||||
), reverse=True)
|
||||
|
||||
return flows_list
|
||||
|
||||
def get_selected_flow(self) -> Optional['FlowStats']:
|
||||
"""Get currently selected flow"""
|
||||
if not self.flow_table or not self.flows_list:
|
||||
return None
|
||||
|
||||
cursor_row = self.flow_table.cursor_row
|
||||
if 0 <= cursor_row < len(self.flows_list):
|
||||
return self.flows_list[cursor_row]
|
||||
|
||||
return None
|
||||
|
||||
def on_data_table_row_selected(self, event: DataTable.RowSelected) -> None:
|
||||
"""Handle row selection in the data table"""
|
||||
# Extract flow index from row key
|
||||
if event.row_key and event.row_key.startswith("flow_"):
|
||||
try:
|
||||
# Parse "flow_N" or "flow_N_sub_M" to get flow index
|
||||
parts = event.row_key.split("_")
|
||||
flow_index = int(parts[1])
|
||||
self.selected_flow_index = flow_index
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
418
analyzer/tui/textual/widgets/flow_table_v2.py
Normal file
418
analyzer/tui/textual/widgets/flow_table_v2.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Enhanced Flow Table Widget - TipTop-inspired with inline visualizations
|
||||
"""
|
||||
|
||||
from textual.widgets import DataTable
|
||||
from textual.containers import Vertical
|
||||
from textual.reactive import reactive
|
||||
from textual.message import Message
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
from rich.text import Text
|
||||
from rich.box import ROUNDED
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
from ....models import FlowStats
|
||||
|
||||
|
||||
class EnhancedFlowTable(Vertical):
|
||||
"""
|
||||
Enhanced flow table with TipTop-style inline visualizations
|
||||
|
||||
Features:
|
||||
- Inline sparklines for packet rate
|
||||
- Bar charts for volume and quality
|
||||
- Color-coded rows based on status
|
||||
- Hierarchical sub-rows for protocol breakdown
|
||||
"""
|
||||
|
||||
DEFAULT_CSS = """
|
||||
EnhancedFlowTable {
|
||||
height: 1fr;
|
||||
}
|
||||
|
||||
EnhancedFlowTable DataTable {
|
||||
height: 1fr;
|
||||
scrollbar-gutter: stable;
|
||||
}
|
||||
"""
|
||||
|
||||
selected_flow_index = reactive(0)
|
||||
sort_key = reactive("flows")
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
self.flows_list = []
|
||||
self.row_to_flow_map = {} # Map row keys to flow indices
|
||||
self.flow_metrics = {} # Store per-flow metrics history
|
||||
|
||||
def compose(self):
|
||||
"""Create the enhanced flow table"""
|
||||
# Table title with sort indicators
|
||||
yield DataTable(
|
||||
id="flows-data-table",
|
||||
cursor_type="row",
|
||||
zebra_stripes=True,
|
||||
show_header=True,
|
||||
show_row_labels=False
|
||||
)
|
||||
|
||||
def on_mount(self):
|
||||
"""Initialize the table"""
|
||||
table = self.query_one("#flows-data-table", DataTable)
|
||||
|
||||
# Add columns with explicit keys to avoid auto-generated keys
|
||||
table.add_column("#", width=3, key="num")
|
||||
table.add_column("Source", width=22, key="source")
|
||||
table.add_column("Proto", width=6, key="proto")
|
||||
table.add_column("Destination", width=22, key="dest")
|
||||
table.add_column("Extended", width=10, key="extended")
|
||||
table.add_column("Frame Type", width=12, key="frame_type")
|
||||
table.add_column("Rate", width=12, key="rate")
|
||||
table.add_column("Volume", width=12, key="volume")
|
||||
table.add_column("Quality", width=12, key="quality")
|
||||
table.add_column("Status", width=8, key="status")
|
||||
|
||||
self.refresh_data()
|
||||
|
||||
def refresh_data(self):
|
||||
"""Refresh flow table with enhanced visualizations"""
|
||||
table = self.query_one("#flows-data-table", DataTable)
|
||||
|
||||
# Preserve cursor position
|
||||
cursor_row = table.cursor_row
|
||||
cursor_column = table.cursor_column
|
||||
selected_row_key = None
|
||||
if table.rows and cursor_row < len(table.rows):
|
||||
selected_row_key = list(table.rows.keys())[cursor_row]
|
||||
|
||||
table.clear()
|
||||
|
||||
# Clear row mapping
|
||||
self.row_to_flow_map.clear()
|
||||
|
||||
# Get and sort flows
|
||||
self.flows_list = self._get_sorted_flows()
|
||||
|
||||
# Add flows with enhanced display
|
||||
for i, flow in enumerate(self.flows_list):
|
||||
# Track metrics for this flow
|
||||
flow_key = f"{flow.src_ip}:{flow.src_port}-{flow.dst_ip}:{flow.dst_port}"
|
||||
if flow_key not in self.flow_metrics:
|
||||
self.flow_metrics[flow_key] = {
|
||||
'rate_history': [],
|
||||
'last_packet_count': flow.frame_count,
|
||||
'last_update': flow.last_seen
|
||||
}
|
||||
|
||||
# Calculate current rate
|
||||
metrics = self.flow_metrics[flow_key]
|
||||
time_delta = flow.last_seen - metrics['last_update'] if metrics['last_update'] else 1
|
||||
packet_delta = flow.frame_count - metrics['last_packet_count']
|
||||
current_rate = packet_delta / max(time_delta, 0.1)
|
||||
|
||||
# Update metrics
|
||||
metrics['rate_history'].append(current_rate)
|
||||
if len(metrics['rate_history']) > 10:
|
||||
metrics['rate_history'].pop(0)
|
||||
metrics['last_packet_count'] = flow.frame_count
|
||||
metrics['last_update'] = flow.last_seen
|
||||
|
||||
# Create row with visualizations
|
||||
row_data = self._create_enhanced_row(i + 1, flow, metrics)
|
||||
row_key = table.add_row(*row_data, key=f"flow_{i}")
|
||||
|
||||
# Map row key to flow index
|
||||
self.row_to_flow_map[row_key] = i
|
||||
|
||||
# Apply row styling based on status
|
||||
style = self._get_flow_style(flow)
|
||||
if style:
|
||||
# Note: DataTable doesn't have set_row_style, using CSS classes instead
|
||||
pass
|
||||
|
||||
# Add sub-rows for protocol breakdown
|
||||
if self._should_show_subrows(flow):
|
||||
sub_rows = self._create_protocol_subrows(flow)
|
||||
for j, sub_row in enumerate(sub_rows):
|
||||
sub_key = table.add_row(*sub_row, key=f"flow_{i}_sub_{j}")
|
||||
# Map sub-row to parent flow
|
||||
self.row_to_flow_map[sub_key] = i
|
||||
|
||||
# Restore cursor position
|
||||
if selected_row_key and selected_row_key in table.rows:
|
||||
row_index = list(table.rows.keys()).index(selected_row_key)
|
||||
table.move_cursor(row=row_index, column=cursor_column, animate=False)
|
||||
elif table.row_count > 0:
|
||||
# If original selection not found, try to maintain row position
|
||||
new_row = min(cursor_row, table.row_count - 1)
|
||||
table.move_cursor(row=new_row, column=cursor_column, animate=False)
|
||||
|
||||
def _create_enhanced_row(self, num: int, flow: 'FlowStats', metrics: dict) -> List[Text]:
|
||||
"""Create enhanced row with inline visualizations"""
|
||||
# Flow number
|
||||
num_text = Text(str(num), justify="right")
|
||||
|
||||
# Source (truncated if needed)
|
||||
source = f"{flow.src_ip}:{flow.src_port}"
|
||||
source_text = Text(source[:20] + "..." if len(source) > 22 else source)
|
||||
|
||||
# Protocol with color
|
||||
proto_text = Text(flow.transport_protocol, style="bold cyan")
|
||||
|
||||
# Destination
|
||||
dest = f"{flow.dst_ip}:{flow.dst_port}"
|
||||
dest_text = Text(dest[:20] + "..." if len(dest) > 22 else dest)
|
||||
|
||||
# Extended protocol
|
||||
extended = self._get_extended_protocol(flow)
|
||||
extended_text = Text(extended, style="yellow" if extended != "-" else "dim")
|
||||
|
||||
# Frame type summary
|
||||
frame_summary = self._get_frame_summary(flow)
|
||||
frame_text = Text(frame_summary, style="blue")
|
||||
|
||||
# Rate with sparkline
|
||||
rate_spark = self._create_rate_sparkline(metrics['rate_history'])
|
||||
rate_text = Text(f"{metrics['rate_history'][-1]:.0f} {rate_spark}")
|
||||
|
||||
# Volume with bar chart
|
||||
volume_bar = self._create_volume_bar(flow.total_bytes)
|
||||
volume_value = self._format_bytes(flow.total_bytes)
|
||||
volume_text = Text(f"{volume_value:>6} {volume_bar}")
|
||||
|
||||
# Quality with bar chart and color
|
||||
quality_bar, quality_color = self._create_quality_bar(flow)
|
||||
quality_value = self._get_quality_score(flow)
|
||||
quality_text = Text(f"{quality_value:>3}% {quality_bar}", style=quality_color)
|
||||
|
||||
# Status indicator
|
||||
status = self._get_flow_status(flow)
|
||||
status_color = {
|
||||
"Normal": "green",
|
||||
"Enhanced": "bold green",
|
||||
"Warning": "yellow",
|
||||
"Alert": "red"
|
||||
}.get(status, "white")
|
||||
status_text = Text(status, style=status_color)
|
||||
|
||||
return [
|
||||
num_text, source_text, proto_text, dest_text,
|
||||
extended_text, frame_text, rate_text, volume_text,
|
||||
quality_text, status_text
|
||||
]
|
||||
|
||||
def _create_rate_sparkline(self, history: List[float]) -> str:
|
||||
"""Create mini sparkline for rate"""
|
||||
if not history:
|
||||
return "─" * 4
|
||||
|
||||
spark_chars = " ▁▂▃▄▅▆▇█"
|
||||
data_min = min(history) if history else 0
|
||||
data_max = max(history) if history else 1
|
||||
|
||||
if data_max == data_min:
|
||||
return "▄" * 4
|
||||
|
||||
result = []
|
||||
for value in history[-4:]: # Last 4 values
|
||||
normalized = (value - data_min) / (data_max - data_min)
|
||||
char_index = int(normalized * 8)
|
||||
result.append(spark_chars[char_index])
|
||||
|
||||
return "".join(result)
|
||||
|
||||
def _create_volume_bar(self, bytes_count: int) -> str:
|
||||
"""Create bar chart for volume"""
|
||||
# Scale to GB for comparison
|
||||
gb = bytes_count / 1_000_000_000
|
||||
|
||||
# Create bar (max 5 chars)
|
||||
if gb >= 10:
|
||||
return "█████"
|
||||
elif gb >= 1:
|
||||
filled = int(gb / 2)
|
||||
return "█" * filled + "░" * (5 - filled)
|
||||
else:
|
||||
# For smaller volumes, show at least one bar
|
||||
mb = bytes_count / 1_000_000
|
||||
if mb >= 100:
|
||||
return "█░░░░"
|
||||
else:
|
||||
return "▌░░░░"
|
||||
|
||||
def _create_quality_bar(self, flow: 'FlowStats') -> tuple[str, str]:
|
||||
"""Create quality bar chart with color"""
|
||||
quality = self._get_quality_score(flow)
|
||||
|
||||
# Create bar (5 chars)
|
||||
filled = int(quality / 20) # 0-100 -> 0-5
|
||||
bar = "█" * filled + "░" * (5 - filled)
|
||||
|
||||
# Determine color
|
||||
if quality >= 90:
|
||||
color = "green"
|
||||
elif quality >= 70:
|
||||
color = "yellow"
|
||||
else:
|
||||
color = "red"
|
||||
|
||||
return bar, color
|
||||
|
||||
def _get_quality_score(self, flow: 'FlowStats') -> int:
|
||||
"""Calculate quality score for flow"""
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return int(flow.enhanced_analysis.avg_frame_quality)
|
||||
else:
|
||||
# Base quality on outlier percentage
|
||||
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
||||
return max(0, int(100 - outlier_pct * 10))
|
||||
|
||||
def _get_flow_status(self, flow: 'FlowStats') -> str:
|
||||
"""Determine flow status"""
|
||||
if flow.enhanced_analysis.decoder_type != "Standard":
|
||||
return "Enhanced"
|
||||
elif len(flow.outlier_frames) > flow.frame_count * 0.1:
|
||||
return "Alert"
|
||||
elif len(flow.outlier_frames) > 0:
|
||||
return "Warning"
|
||||
else:
|
||||
return "Normal"
|
||||
|
||||
def _get_flow_style(self, flow: 'FlowStats') -> Optional[str]:
|
||||
"""Get styling for flow row"""
|
||||
status = self._get_flow_status(flow)
|
||||
if status == "Enhanced":
|
||||
return "bold"
|
||||
elif status == "Alert":
|
||||
return "bold red"
|
||||
elif status == "Warning":
|
||||
return "yellow"
|
||||
return None
|
||||
|
||||
def _should_show_subrows(self, flow: 'FlowStats') -> bool:
|
||||
"""Determine if flow should show protocol breakdown"""
|
||||
# Show subrows for flows with multiple frame types or enhanced analysis
|
||||
return (len(flow.frame_types) > 1 or
|
||||
flow.enhanced_analysis.decoder_type != "Standard")
|
||||
|
||||
def _create_protocol_subrows(self, flow: 'FlowStats') -> List[List[Text]]:
|
||||
"""Create sub-rows for protocol/frame type breakdown"""
|
||||
subrows = []
|
||||
combinations = self._get_protocol_frame_combinations(flow)
|
||||
|
||||
for extended_proto, frame_type, count, percentage in combinations[:3]: # Max 3 subrows
|
||||
subrow = [
|
||||
Text(""), # Empty flow number
|
||||
Text(""), # Empty source
|
||||
Text(""), # Empty protocol
|
||||
Text(""), # Empty destination
|
||||
Text(f" └─ {extended_proto}", style="dim yellow"),
|
||||
Text(frame_type, style="dim blue"),
|
||||
Text(f"{count}", style="dim", justify="right"),
|
||||
Text(f"{percentage:.0f}%", style="dim"),
|
||||
Text(""), # Empty quality
|
||||
Text("") # Empty status
|
||||
]
|
||||
subrows.append(subrow)
|
||||
|
||||
return subrows
|
||||
|
||||
def _get_sorted_flows(self) -> List['FlowStats']:
|
||||
"""Get flows sorted by current sort key"""
|
||||
flows = list(self.analyzer.flows.values())
|
||||
|
||||
if self.sort_key == "packets":
|
||||
flows.sort(key=lambda x: x.frame_count, reverse=True)
|
||||
elif self.sort_key == "volume":
|
||||
flows.sort(key=lambda x: x.total_bytes, reverse=True)
|
||||
elif self.sort_key == "quality":
|
||||
flows.sort(key=lambda x: self._get_quality_score(x), reverse=True)
|
||||
else: # Default: sort by importance
|
||||
flows.sort(key=lambda x: (
|
||||
x.enhanced_analysis.decoder_type != "Standard",
|
||||
len(x.outlier_frames),
|
||||
x.frame_count
|
||||
), reverse=True)
|
||||
|
||||
return flows
|
||||
|
||||
def sort_by(self, key: str):
|
||||
"""Change sort order"""
|
||||
self.sort_key = key
|
||||
self.refresh_data()
|
||||
|
||||
class FlowSelected(Message):
|
||||
"""Message sent when a flow is selected"""
|
||||
def __init__(self, flow: Optional['FlowStats']) -> None:
|
||||
self.flow = flow
|
||||
super().__init__()
|
||||
|
||||
def get_selected_flow(self) -> Optional['FlowStats']:
|
||||
"""Get currently selected flow"""
|
||||
table = self.query_one("#flows-data-table", DataTable)
|
||||
if table.cursor_row is None or not table.rows:
|
||||
return None
|
||||
|
||||
# Get the row key at cursor position
|
||||
row_keys = list(table.rows.keys())
|
||||
if table.cursor_row >= len(row_keys):
|
||||
return None
|
||||
|
||||
row_key = row_keys[table.cursor_row]
|
||||
|
||||
# Look up flow index from our mapping
|
||||
flow_idx = self.row_to_flow_map.get(row_key)
|
||||
if flow_idx is not None and 0 <= flow_idx < len(self.flows_list):
|
||||
return self.flows_list[flow_idx]
|
||||
|
||||
return None
|
||||
|
||||
def on_data_table_row_highlighted(self, event: DataTable.RowHighlighted) -> None:
|
||||
"""Handle row highlight to update selection"""
|
||||
selected_flow = self.get_selected_flow()
|
||||
self.post_message(self.FlowSelected(selected_flow))
|
||||
|
||||
# Helper methods from original implementation
|
||||
def _get_extended_protocol(self, flow: 'FlowStats') -> str:
|
||||
"""Get extended protocol"""
|
||||
if flow.detected_protocol_types:
|
||||
enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'}
|
||||
found = flow.detected_protocol_types & enhanced_protocols
|
||||
if found:
|
||||
protocol = list(found)[0]
|
||||
return 'CH10' if protocol in ['CHAPTER10', 'CH10'] else protocol
|
||||
return '-'
|
||||
|
||||
def _get_frame_summary(self, flow: 'FlowStats') -> str:
|
||||
"""Get frame type summary"""
|
||||
if not flow.frame_types:
|
||||
return "General"
|
||||
elif len(flow.frame_types) == 1:
|
||||
return list(flow.frame_types.keys())[0][:11]
|
||||
else:
|
||||
return f"{len(flow.frame_types)} types"
|
||||
|
||||
def _get_protocol_frame_combinations(self, flow: 'FlowStats'):
|
||||
"""Get protocol/frame combinations"""
|
||||
combinations = []
|
||||
total = flow.frame_count
|
||||
|
||||
for frame_type, stats in flow.frame_types.items():
|
||||
extended = self._get_extended_protocol(flow)
|
||||
percentage = (stats.count / total * 100) if total > 0 else 0
|
||||
combinations.append((extended, frame_type, stats.count, percentage))
|
||||
|
||||
return sorted(combinations, key=lambda x: x[2], reverse=True)
|
||||
|
||||
def _format_bytes(self, bytes_count: int) -> str:
|
||||
"""Format byte count"""
|
||||
if bytes_count >= 1_000_000_000:
|
||||
return f"{bytes_count / 1_000_000_000:.1f}G"
|
||||
elif bytes_count >= 1_000_000:
|
||||
return f"{bytes_count / 1_000_000:.1f}M"
|
||||
elif bytes_count >= 1_000:
|
||||
return f"{bytes_count / 1_000:.1f}K"
|
||||
else:
|
||||
return f"{bytes_count}B"
|
||||
140
analyzer/tui/textual/widgets/metric_card.py
Normal file
140
analyzer/tui/textual/widgets/metric_card.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Metric Card Widget - Compact metric display inspired by TipTop
|
||||
"""
|
||||
|
||||
from textual.widget import Widget
|
||||
from textual.reactive import reactive
|
||||
from rich.text import Text
|
||||
from rich.console import RenderableType
|
||||
from rich.panel import Panel
|
||||
from typing import Optional, Literal
|
||||
|
||||
|
||||
ColorType = Literal["normal", "success", "warning", "error"]
|
||||
TrendType = Literal["up", "down", "stable"]
|
||||
|
||||
|
||||
class MetricCard(Widget):
|
||||
"""
|
||||
Compact metric display card with optional sparkline
|
||||
|
||||
Features:
|
||||
- Title and value display
|
||||
- Color coding for status
|
||||
- Optional trend indicator
|
||||
- Optional inline sparkline
|
||||
"""
|
||||
|
||||
DEFAULT_CSS = """
|
||||
MetricCard {
|
||||
width: 1fr;
|
||||
height: 3;
|
||||
margin: 0 1;
|
||||
}
|
||||
|
||||
MetricCard.success {
|
||||
border: solid $success;
|
||||
}
|
||||
|
||||
MetricCard.warning {
|
||||
border: solid $warning;
|
||||
}
|
||||
|
||||
MetricCard.error {
|
||||
border: solid $error;
|
||||
}
|
||||
"""
|
||||
|
||||
value = reactive("0")
|
||||
color = reactive("normal")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
title: str,
|
||||
value: str = "0",
|
||||
color: ColorType = "normal",
|
||||
trend: Optional[TrendType] = None,
|
||||
sparkline: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.title = title
|
||||
self.value = value
|
||||
self.color = color
|
||||
self.trend = trend
|
||||
self.sparkline = sparkline
|
||||
self.spark_data = []
|
||||
|
||||
def update_value(self, new_value: str) -> None:
|
||||
"""Update the metric value"""
|
||||
self.value = new_value
|
||||
|
||||
def update_color(self, new_color: ColorType) -> None:
|
||||
"""Update the color status"""
|
||||
self.color = new_color
|
||||
self.add_class(new_color)
|
||||
|
||||
def add_spark_data(self, value: float) -> None:
|
||||
"""Add data point for sparkline"""
|
||||
self.spark_data.append(value)
|
||||
if len(self.spark_data) > 10: # Keep last 10 points
|
||||
self.spark_data.pop(0)
|
||||
|
||||
def render(self) -> RenderableType:
|
||||
"""Render the metric card"""
|
||||
# Determine color style
|
||||
color_map = {
|
||||
"normal": "white",
|
||||
"success": "green",
|
||||
"warning": "yellow",
|
||||
"error": "red"
|
||||
}
|
||||
style = color_map.get(self.color, "white")
|
||||
|
||||
# Create trend indicator
|
||||
trend_icon = ""
|
||||
if self.trend:
|
||||
trend_map = {
|
||||
"up": "↑",
|
||||
"down": "↓",
|
||||
"stable": "→"
|
||||
}
|
||||
trend_icon = f" {trend_map.get(self.trend, '')}"
|
||||
|
||||
# Create sparkline if enabled
|
||||
spark_str = ""
|
||||
if self.sparkline and self.spark_data:
|
||||
spark_str = " " + self._create_mini_spark()
|
||||
|
||||
# Format content
|
||||
content = Text()
|
||||
content.append(f"{self.title}\n", style="dim")
|
||||
content.append(f"{self.value}", style=f"bold {style}")
|
||||
content.append(trend_icon, style=style)
|
||||
content.append(spark_str, style="dim cyan")
|
||||
|
||||
return Panel(
|
||||
content,
|
||||
height=3,
|
||||
border_style=style if self.color != "normal" else "dim"
|
||||
)
|
||||
|
||||
def _create_mini_spark(self) -> str:
|
||||
"""Create mini sparkline for inline display"""
|
||||
if not self.spark_data:
|
||||
return ""
|
||||
|
||||
spark_chars = " ▁▂▃▄▅▆▇█"
|
||||
data_min = min(self.spark_data)
|
||||
data_max = max(self.spark_data)
|
||||
|
||||
if data_max == data_min:
|
||||
return "▄" * len(self.spark_data)
|
||||
|
||||
result = []
|
||||
for value in self.spark_data:
|
||||
normalized = (value - data_min) / (data_max - data_min)
|
||||
char_index = int(normalized * 8)
|
||||
result.append(spark_chars[char_index])
|
||||
|
||||
return "".join(result)
|
||||
69
analyzer/tui/textual/widgets/metrics_dashboard.py
Normal file
69
analyzer/tui/textual/widgets/metrics_dashboard.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Statistical Analysis Widget - Metrics dashboard with real-time updates
|
||||
"""
|
||||
|
||||
from textual.widgets import Static, TabbedContent, TabPane, DataTable
|
||||
from textual.containers import Vertical, Horizontal
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
|
||||
|
||||
class StatisticalAnalysisWidget(Vertical):
|
||||
"""
|
||||
Statistical Analysis Dashboard
|
||||
|
||||
Features:
|
||||
- Real-time metrics display
|
||||
- Performance analysis charts
|
||||
- Outlier detection
|
||||
- Export capabilities
|
||||
"""
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
|
||||
def compose(self):
|
||||
"""Create the statistics dashboard"""
|
||||
|
||||
yield Static("STATISTICAL ANALYSIS", id="stats-title")
|
||||
|
||||
# Metrics summary
|
||||
with Horizontal(id="metrics-summary"):
|
||||
yield Static("Total Flows: 0", id="total-flows-metric")
|
||||
yield Static("Total Packets: 0", id="total-packets-metric")
|
||||
yield Static("Outliers: 0", id="outliers-metric")
|
||||
yield Static("Quality: 0%", id="quality-metric")
|
||||
|
||||
# Analysis modes
|
||||
with TabbedContent():
|
||||
with TabPane("Performance", id="performance-tab"):
|
||||
perf_table = DataTable(id="performance-table")
|
||||
perf_table.add_columns("Metric", "Value", "Threshold", "Status")
|
||||
yield perf_table
|
||||
|
||||
with TabPane("Protocol Distribution", id="protocol-tab"):
|
||||
proto_table = DataTable(id="protocol-table")
|
||||
proto_table.add_columns("Protocol", "Flows", "Packets", "Percentage")
|
||||
yield proto_table
|
||||
|
||||
with TabPane("Timing Analysis", id="timing-tab"):
|
||||
timing_table = DataTable(id="timing-table")
|
||||
timing_table.add_columns("Flow", "Min", "Max", "Avg", "Jitter")
|
||||
yield timing_table
|
||||
|
||||
with TabPane("Quality Metrics", id="quality-tab"):
|
||||
quality_table = DataTable(id="quality-table")
|
||||
quality_table.add_columns("Flow", "Enhanced", "Quality", "Outliers")
|
||||
yield quality_table
|
||||
|
||||
def on_mount(self) -> None:
|
||||
"""Initialize the widget"""
|
||||
self.refresh_data()
|
||||
|
||||
def refresh_data(self) -> None:
|
||||
"""Refresh statistical analysis data"""
|
||||
# TODO: Implement statistics data refresh
|
||||
pass
|
||||
56
analyzer/tui/textual/widgets/packet_viewer.py
Normal file
56
analyzer/tui/textual/widgets/packet_viewer.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Packet Decoder Widget - 3-panel packet inspection interface
|
||||
"""
|
||||
|
||||
from textual.widgets import Static, DataTable, Tree
|
||||
from textual.containers import Horizontal, Vertical
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....analysis.core import EthernetAnalyzer
|
||||
|
||||
|
||||
class PacketDecoderWidget(Horizontal):
|
||||
"""
|
||||
3-Panel Packet Decoder Interface
|
||||
|
||||
Layout:
|
||||
- Left: Flow summary tree
|
||||
- Center: Packet list table
|
||||
- Right: Field details tree
|
||||
"""
|
||||
|
||||
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.analyzer = analyzer
|
||||
|
||||
def compose(self):
|
||||
"""Create the 3-panel layout"""
|
||||
|
||||
# Left panel: Flow summary
|
||||
with Vertical(id="flow-summary-panel"):
|
||||
yield Static("Flow Summary", id="flow-summary-title")
|
||||
flow_tree = Tree("Flows", id="flow-tree")
|
||||
yield flow_tree
|
||||
|
||||
# Center panel: Packet list
|
||||
with Vertical(id="packet-list-panel"):
|
||||
yield Static("Packet Details", id="packet-list-title")
|
||||
packet_table = DataTable(id="packet-table")
|
||||
packet_table.add_columns("Time", "Src", "Dst", "Protocol", "Info")
|
||||
yield packet_table
|
||||
|
||||
# Right panel: Field details
|
||||
with Vertical(id="field-details-panel"):
|
||||
yield Static("Field Analysis", id="field-details-title")
|
||||
field_tree = Tree("Fields", id="field-tree")
|
||||
yield field_tree
|
||||
|
||||
def on_mount(self) -> None:
|
||||
"""Initialize the widget"""
|
||||
self.refresh_data()
|
||||
|
||||
def refresh_data(self) -> None:
|
||||
"""Refresh packet decoder data"""
|
||||
# TODO: Implement packet data refresh
|
||||
pass
|
||||
124
analyzer/tui/textual/widgets/sparkline.py
Normal file
124
analyzer/tui/textual/widgets/sparkline.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""
|
||||
Sparkline Widget - TipTop-style mini charts for real-time metrics
|
||||
"""
|
||||
|
||||
from textual.widget import Widget
|
||||
from textual.reactive import reactive
|
||||
from typing import List, Optional
|
||||
from rich.text import Text
|
||||
from rich.console import RenderableType
|
||||
from rich.panel import Panel
|
||||
|
||||
|
||||
class SparklineWidget(Widget):
|
||||
"""
|
||||
ASCII sparkline chart widget inspired by TipTop
|
||||
|
||||
Shows trend visualization using Unicode block characters:
|
||||
▁▂▃▄▅▆▇█
|
||||
"""
|
||||
|
||||
DEFAULT_CSS = """
|
||||
SparklineWidget {
|
||||
height: 4;
|
||||
padding: 0 1;
|
||||
}
|
||||
"""
|
||||
|
||||
data = reactive([], always_update=True)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
title: str,
|
||||
data: List[float] = None,
|
||||
height: int = 4,
|
||||
color: str = "cyan",
|
||||
**kwargs
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.title = title
|
||||
self.data = data or []
|
||||
self.height = height
|
||||
self.color = color
|
||||
self.spark_chars = " ▁▂▃▄▅▆▇█"
|
||||
|
||||
def update_data(self, new_data: List[float]) -> None:
|
||||
"""Update sparkline data"""
|
||||
self.data = new_data
|
||||
|
||||
def render(self) -> RenderableType:
|
||||
"""Render the sparkline chart"""
|
||||
if not self.data:
|
||||
return Panel(
|
||||
f"{self.title}: No data",
|
||||
height=self.height,
|
||||
border_style="dim"
|
||||
)
|
||||
|
||||
# Calculate sparkline
|
||||
sparkline = self._create_sparkline()
|
||||
|
||||
# Get current value and trend
|
||||
current = self.data[-1] if self.data else 0
|
||||
trend = self._calculate_trend()
|
||||
|
||||
# Format current value
|
||||
if self.title == "Flow Rate":
|
||||
current_str = f"{current:.0f} flows"
|
||||
elif self.title == "Packet Rate":
|
||||
current_str = f"{current:.1f} pps"
|
||||
else:
|
||||
current_str = f"{current:.1f}"
|
||||
|
||||
# Create content
|
||||
lines = [
|
||||
f"{self.title}: {current_str} {trend}",
|
||||
"",
|
||||
sparkline
|
||||
]
|
||||
|
||||
return Panel(
|
||||
"\n".join(lines),
|
||||
height=self.height,
|
||||
border_style=self.color
|
||||
)
|
||||
|
||||
def _create_sparkline(self) -> str:
|
||||
"""Create sparkline visualization"""
|
||||
if len(self.data) < 2:
|
||||
return "─" * 40
|
||||
|
||||
# Normalize data
|
||||
data_min = min(self.data)
|
||||
data_max = max(self.data)
|
||||
data_range = data_max - data_min
|
||||
|
||||
if data_range == 0:
|
||||
# All values are the same
|
||||
return "─" * min(len(self.data), 40)
|
||||
|
||||
# Create sparkline
|
||||
sparkline_chars = []
|
||||
for value in self.data[-40:]: # Last 40 values
|
||||
# Normalize to 0-8 range (9 spark characters)
|
||||
normalized = (value - data_min) / data_range
|
||||
char_index = int(normalized * 8)
|
||||
sparkline_chars.append(self.spark_chars[char_index])
|
||||
|
||||
return "".join(sparkline_chars)
|
||||
|
||||
def _calculate_trend(self) -> str:
|
||||
"""Calculate trend indicator"""
|
||||
if len(self.data) < 2:
|
||||
return ""
|
||||
|
||||
# Compare last value to average of previous 5
|
||||
current = self.data[-1]
|
||||
prev_avg = sum(self.data[-6:-1]) / min(5, len(self.data) - 1)
|
||||
|
||||
if current > prev_avg * 1.1:
|
||||
return "↑"
|
||||
elif current < prev_avg * 0.9:
|
||||
return "↓"
|
||||
else:
|
||||
return "→"
|
||||
Reference in New Issue
Block a user