2025-07-27 18:37:55 -04:00
|
|
|
|
"""
|
|
|
|
|
|
Enhanced Flow Table Widget - TipTop-inspired with inline visualizations
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
from textual.widgets import DataTable
|
|
|
|
|
|
from textual.containers import Vertical
|
|
|
|
|
|
from textual.reactive import reactive
|
|
|
|
|
|
from textual.message import Message
|
|
|
|
|
|
from typing import TYPE_CHECKING, List, Optional
|
|
|
|
|
|
from rich.text import Text
|
|
|
|
|
|
from rich.box import ROUNDED
|
|
|
|
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
|
from ....analysis.core import EthernetAnalyzer
|
|
|
|
|
|
from ....models import FlowStats
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class EnhancedFlowTable(Vertical):
|
|
|
|
|
|
"""
|
|
|
|
|
|
Enhanced flow table with TipTop-style inline visualizations
|
|
|
|
|
|
|
|
|
|
|
|
Features:
|
|
|
|
|
|
- Inline sparklines for packet rate
|
|
|
|
|
|
- Bar charts for volume and quality
|
|
|
|
|
|
- Color-coded rows based on status
|
|
|
|
|
|
- Hierarchical sub-rows for protocol breakdown
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_CSS = """
|
|
|
|
|
|
EnhancedFlowTable {
|
|
|
|
|
|
height: 1fr;
|
2025-07-28 08:14:15 -04:00
|
|
|
|
padding: 0;
|
|
|
|
|
|
margin: 0;
|
2025-07-27 18:37:55 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
EnhancedFlowTable DataTable {
|
|
|
|
|
|
height: 1fr;
|
|
|
|
|
|
scrollbar-gutter: stable;
|
2025-07-28 08:14:15 -04:00
|
|
|
|
padding: 0;
|
|
|
|
|
|
margin: 0;
|
2025-07-27 18:37:55 -04:00
|
|
|
|
}
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
selected_flow_index = reactive(0)
|
|
|
|
|
|
sort_key = reactive("flows")
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
|
|
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
|
|
self.analyzer = analyzer
|
|
|
|
|
|
self.flows_list = []
|
|
|
|
|
|
self.row_to_flow_map = {} # Map row keys to flow indices
|
|
|
|
|
|
self.flow_metrics = {} # Store per-flow metrics history
|
|
|
|
|
|
|
|
|
|
|
|
def compose(self):
|
|
|
|
|
|
"""Create the enhanced flow table"""
|
|
|
|
|
|
# Table title with sort indicators
|
|
|
|
|
|
yield DataTable(
|
|
|
|
|
|
id="flows-data-table",
|
|
|
|
|
|
cursor_type="row",
|
|
|
|
|
|
zebra_stripes=True,
|
|
|
|
|
|
show_header=True,
|
|
|
|
|
|
show_row_labels=False
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def on_mount(self):
|
|
|
|
|
|
"""Initialize the table"""
|
|
|
|
|
|
table = self.query_one("#flows-data-table", DataTable)
|
|
|
|
|
|
|
2025-07-28 08:14:15 -04:00
|
|
|
|
# Compact columns optimized for data density
|
|
|
|
|
|
table.add_column("#", width=2, key="num")
|
|
|
|
|
|
table.add_column("Source", width=18, key="source")
|
|
|
|
|
|
table.add_column("Proto", width=4, key="proto")
|
|
|
|
|
|
table.add_column("Destination", width=18, key="dest")
|
|
|
|
|
|
table.add_column("Extended", width=8, key="extended")
|
|
|
|
|
|
table.add_column("Frame Type", width=10, key="frame_type")
|
|
|
|
|
|
table.add_column("Pkts", width=6, key="rate")
|
|
|
|
|
|
table.add_column("Size", width=8, key="volume")
|
2025-07-28 09:50:59 -04:00
|
|
|
|
table.add_column("ΔT(ms)", width=8, key="delta_t")
|
|
|
|
|
|
table.add_column("σ(ms)", width=8, key="sigma")
|
|
|
|
|
|
table.add_column("Out", width=5, key="outliers")
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
self.refresh_data()
|
|
|
|
|
|
|
|
|
|
|
|
def refresh_data(self):
|
|
|
|
|
|
"""Refresh flow table with enhanced visualizations"""
|
|
|
|
|
|
table = self.query_one("#flows-data-table", DataTable)
|
|
|
|
|
|
|
2025-07-28 08:14:15 -04:00
|
|
|
|
# Preserve cursor and scroll positions
|
2025-07-27 18:37:55 -04:00
|
|
|
|
cursor_row = table.cursor_row
|
|
|
|
|
|
cursor_column = table.cursor_column
|
2025-07-28 08:14:15 -04:00
|
|
|
|
scroll_x = table.scroll_x
|
|
|
|
|
|
scroll_y = table.scroll_y
|
2025-07-27 18:37:55 -04:00
|
|
|
|
selected_row_key = None
|
|
|
|
|
|
if table.rows and cursor_row < len(table.rows):
|
|
|
|
|
|
selected_row_key = list(table.rows.keys())[cursor_row]
|
|
|
|
|
|
|
|
|
|
|
|
table.clear()
|
|
|
|
|
|
|
|
|
|
|
|
# Clear row mapping
|
|
|
|
|
|
self.row_to_flow_map.clear()
|
2025-07-28 09:50:59 -04:00
|
|
|
|
self.row_to_subflow_map = {} # Map row keys to (flow_index, subflow_type)
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
# Get and sort flows
|
|
|
|
|
|
self.flows_list = self._get_sorted_flows()
|
|
|
|
|
|
|
|
|
|
|
|
# Add flows with enhanced display
|
|
|
|
|
|
for i, flow in enumerate(self.flows_list):
|
|
|
|
|
|
# Track metrics for this flow
|
|
|
|
|
|
flow_key = f"{flow.src_ip}:{flow.src_port}-{flow.dst_ip}:{flow.dst_port}"
|
|
|
|
|
|
if flow_key not in self.flow_metrics:
|
|
|
|
|
|
self.flow_metrics[flow_key] = {
|
|
|
|
|
|
'rate_history': [],
|
|
|
|
|
|
'last_packet_count': flow.frame_count,
|
|
|
|
|
|
'last_update': flow.last_seen
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Calculate current rate
|
|
|
|
|
|
metrics = self.flow_metrics[flow_key]
|
|
|
|
|
|
time_delta = flow.last_seen - metrics['last_update'] if metrics['last_update'] else 1
|
|
|
|
|
|
packet_delta = flow.frame_count - metrics['last_packet_count']
|
|
|
|
|
|
current_rate = packet_delta / max(time_delta, 0.1)
|
|
|
|
|
|
|
|
|
|
|
|
# Update metrics
|
|
|
|
|
|
metrics['rate_history'].append(current_rate)
|
|
|
|
|
|
if len(metrics['rate_history']) > 10:
|
|
|
|
|
|
metrics['rate_history'].pop(0)
|
|
|
|
|
|
metrics['last_packet_count'] = flow.frame_count
|
|
|
|
|
|
metrics['last_update'] = flow.last_seen
|
|
|
|
|
|
|
|
|
|
|
|
# Create row with visualizations
|
|
|
|
|
|
row_data = self._create_enhanced_row(i + 1, flow, metrics)
|
|
|
|
|
|
row_key = table.add_row(*row_data, key=f"flow_{i}")
|
|
|
|
|
|
|
|
|
|
|
|
# Map row key to flow index
|
|
|
|
|
|
self.row_to_flow_map[row_key] = i
|
|
|
|
|
|
|
|
|
|
|
|
# Apply row styling based on status
|
|
|
|
|
|
style = self._get_flow_style(flow)
|
|
|
|
|
|
if style:
|
|
|
|
|
|
# Note: DataTable doesn't have set_row_style, using CSS classes instead
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
# Add sub-rows for protocol breakdown
|
|
|
|
|
|
if self._should_show_subrows(flow):
|
|
|
|
|
|
sub_rows = self._create_protocol_subrows(flow)
|
2025-07-28 09:50:59 -04:00
|
|
|
|
combinations = self._get_protocol_frame_combinations(flow)
|
2025-07-27 18:37:55 -04:00
|
|
|
|
for j, sub_row in enumerate(sub_rows):
|
|
|
|
|
|
sub_key = table.add_row(*sub_row, key=f"flow_{i}_sub_{j}")
|
2025-07-28 09:50:59 -04:00
|
|
|
|
# Map sub-row to parent flow and subflow type
|
2025-07-27 18:37:55 -04:00
|
|
|
|
self.row_to_flow_map[sub_key] = i
|
2025-07-28 09:50:59 -04:00
|
|
|
|
if j < len(combinations):
|
|
|
|
|
|
_, frame_type, _, _ = combinations[j]
|
|
|
|
|
|
self.row_to_subflow_map[sub_key] = (i, frame_type)
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
# Restore cursor position
|
|
|
|
|
|
if selected_row_key and selected_row_key in table.rows:
|
|
|
|
|
|
row_index = list(table.rows.keys()).index(selected_row_key)
|
|
|
|
|
|
table.move_cursor(row=row_index, column=cursor_column, animate=False)
|
|
|
|
|
|
elif table.row_count > 0:
|
|
|
|
|
|
# If original selection not found, try to maintain row position
|
|
|
|
|
|
new_row = min(cursor_row, table.row_count - 1)
|
|
|
|
|
|
table.move_cursor(row=new_row, column=cursor_column, animate=False)
|
2025-07-28 08:14:15 -04:00
|
|
|
|
|
|
|
|
|
|
# Restore scroll position
|
|
|
|
|
|
table.scroll_to(x=scroll_x, y=scroll_y, animate=False)
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
def _create_enhanced_row(self, num: int, flow: 'FlowStats', metrics: dict) -> List[Text]:
|
|
|
|
|
|
"""Create enhanced row with inline visualizations"""
|
|
|
|
|
|
# Flow number
|
|
|
|
|
|
num_text = Text(str(num), justify="right")
|
|
|
|
|
|
|
|
|
|
|
|
# Source (truncated if needed)
|
|
|
|
|
|
source = f"{flow.src_ip}:{flow.src_port}"
|
|
|
|
|
|
source_text = Text(source[:20] + "..." if len(source) > 22 else source)
|
|
|
|
|
|
|
|
|
|
|
|
# Protocol with color
|
|
|
|
|
|
proto_text = Text(flow.transport_protocol, style="bold cyan")
|
|
|
|
|
|
|
|
|
|
|
|
# Destination
|
|
|
|
|
|
dest = f"{flow.dst_ip}:{flow.dst_port}"
|
|
|
|
|
|
dest_text = Text(dest[:20] + "..." if len(dest) > 22 else dest)
|
|
|
|
|
|
|
|
|
|
|
|
# Extended protocol
|
|
|
|
|
|
extended = self._get_extended_protocol(flow)
|
|
|
|
|
|
extended_text = Text(extended, style="yellow" if extended != "-" else "dim")
|
|
|
|
|
|
|
|
|
|
|
|
# Frame type summary
|
|
|
|
|
|
frame_summary = self._get_frame_summary(flow)
|
|
|
|
|
|
frame_text = Text(frame_summary, style="blue")
|
|
|
|
|
|
|
|
|
|
|
|
# Rate with sparkline
|
|
|
|
|
|
rate_spark = self._create_rate_sparkline(metrics['rate_history'])
|
|
|
|
|
|
rate_text = Text(f"{metrics['rate_history'][-1]:.0f} {rate_spark}")
|
|
|
|
|
|
|
2025-07-28 08:14:15 -04:00
|
|
|
|
# Size with actual value
|
|
|
|
|
|
size_value = self._format_bytes(flow.total_bytes)
|
|
|
|
|
|
size_text = Text(f"{size_value:>8}")
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
2025-07-28 09:50:59 -04:00
|
|
|
|
# Delta T (average time between packets in ms)
|
|
|
|
|
|
if flow.avg_inter_arrival > 0:
|
|
|
|
|
|
delta_t_ms = flow.avg_inter_arrival * 1000
|
|
|
|
|
|
if delta_t_ms >= 1000:
|
|
|
|
|
|
delta_t_str = f"{delta_t_ms/1000:.1f}s"
|
|
|
|
|
|
else:
|
|
|
|
|
|
delta_t_str = f"{delta_t_ms:.1f}"
|
|
|
|
|
|
else:
|
|
|
|
|
|
delta_t_str = "N/A"
|
|
|
|
|
|
delta_t_text = Text(delta_t_str, justify="right")
|
|
|
|
|
|
|
|
|
|
|
|
# Sigma (standard deviation in ms)
|
|
|
|
|
|
if flow.std_inter_arrival > 0:
|
|
|
|
|
|
sigma_ms = flow.std_inter_arrival * 1000
|
|
|
|
|
|
if sigma_ms >= 1000:
|
|
|
|
|
|
sigma_str = f"{sigma_ms/1000:.1f}s"
|
|
|
|
|
|
else:
|
|
|
|
|
|
sigma_str = f"{sigma_ms:.1f}"
|
|
|
|
|
|
else:
|
|
|
|
|
|
sigma_str = "N/A"
|
|
|
|
|
|
sigma_text = Text(sigma_str, justify="right")
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
2025-07-28 09:50:59 -04:00
|
|
|
|
# Outlier count (packets outside tolerance)
|
|
|
|
|
|
outlier_count = len(flow.outlier_frames)
|
|
|
|
|
|
outlier_text = Text(str(outlier_count), justify="right",
|
|
|
|
|
|
style="red" if outlier_count > 0 else "green")
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
return [
|
|
|
|
|
|
num_text, source_text, proto_text, dest_text,
|
2025-07-28 09:50:59 -04:00
|
|
|
|
extended_text, frame_text, rate_text, size_text,
|
|
|
|
|
|
delta_t_text, sigma_text, outlier_text
|
2025-07-27 18:37:55 -04:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def _create_rate_sparkline(self, history: List[float]) -> str:
|
|
|
|
|
|
"""Create mini sparkline for rate"""
|
|
|
|
|
|
if not history:
|
|
|
|
|
|
return "─" * 4
|
|
|
|
|
|
|
|
|
|
|
|
spark_chars = " ▁▂▃▄▅▆▇█"
|
|
|
|
|
|
data_min = min(history) if history else 0
|
|
|
|
|
|
data_max = max(history) if history else 1
|
|
|
|
|
|
|
|
|
|
|
|
if data_max == data_min:
|
|
|
|
|
|
return "▄" * 4
|
|
|
|
|
|
|
|
|
|
|
|
result = []
|
|
|
|
|
|
for value in history[-4:]: # Last 4 values
|
|
|
|
|
|
normalized = (value - data_min) / (data_max - data_min)
|
|
|
|
|
|
char_index = int(normalized * 8)
|
|
|
|
|
|
result.append(spark_chars[char_index])
|
|
|
|
|
|
|
|
|
|
|
|
return "".join(result)
|
|
|
|
|
|
|
|
|
|
|
|
def _create_volume_bar(self, bytes_count: int) -> str:
|
|
|
|
|
|
"""Create bar chart for volume"""
|
|
|
|
|
|
# Scale to GB for comparison
|
|
|
|
|
|
gb = bytes_count / 1_000_000_000
|
|
|
|
|
|
|
|
|
|
|
|
# Create bar (max 5 chars)
|
|
|
|
|
|
if gb >= 10:
|
|
|
|
|
|
return "█████"
|
|
|
|
|
|
elif gb >= 1:
|
|
|
|
|
|
filled = int(gb / 2)
|
|
|
|
|
|
return "█" * filled + "░" * (5 - filled)
|
|
|
|
|
|
else:
|
|
|
|
|
|
# For smaller volumes, show at least one bar
|
|
|
|
|
|
mb = bytes_count / 1_000_000
|
|
|
|
|
|
if mb >= 100:
|
|
|
|
|
|
return "█░░░░"
|
|
|
|
|
|
else:
|
|
|
|
|
|
return "▌░░░░"
|
|
|
|
|
|
|
|
|
|
|
|
def _create_quality_bar(self, flow: 'FlowStats') -> tuple[str, str]:
|
|
|
|
|
|
"""Create quality bar chart with color"""
|
|
|
|
|
|
quality = self._get_quality_score(flow)
|
|
|
|
|
|
|
|
|
|
|
|
# Create bar (5 chars)
|
|
|
|
|
|
filled = int(quality / 20) # 0-100 -> 0-5
|
|
|
|
|
|
bar = "█" * filled + "░" * (5 - filled)
|
|
|
|
|
|
|
|
|
|
|
|
# Determine color
|
|
|
|
|
|
if quality >= 90:
|
|
|
|
|
|
color = "green"
|
|
|
|
|
|
elif quality >= 70:
|
|
|
|
|
|
color = "yellow"
|
|
|
|
|
|
else:
|
|
|
|
|
|
color = "red"
|
|
|
|
|
|
|
|
|
|
|
|
return bar, color
|
|
|
|
|
|
|
|
|
|
|
|
def _get_quality_score(self, flow: 'FlowStats') -> int:
|
|
|
|
|
|
"""Calculate quality score for flow"""
|
|
|
|
|
|
if flow.enhanced_analysis.decoder_type != "Standard":
|
|
|
|
|
|
return int(flow.enhanced_analysis.avg_frame_quality)
|
|
|
|
|
|
else:
|
|
|
|
|
|
# Base quality on outlier percentage
|
|
|
|
|
|
outlier_pct = len(flow.outlier_frames) / flow.frame_count * 100 if flow.frame_count > 0 else 0
|
|
|
|
|
|
return max(0, int(100 - outlier_pct * 10))
|
|
|
|
|
|
|
|
|
|
|
|
def _get_flow_status(self, flow: 'FlowStats') -> str:
|
|
|
|
|
|
"""Determine flow status"""
|
|
|
|
|
|
if flow.enhanced_analysis.decoder_type != "Standard":
|
|
|
|
|
|
return "Enhanced"
|
|
|
|
|
|
elif len(flow.outlier_frames) > flow.frame_count * 0.1:
|
|
|
|
|
|
return "Alert"
|
|
|
|
|
|
elif len(flow.outlier_frames) > 0:
|
|
|
|
|
|
return "Warning"
|
|
|
|
|
|
else:
|
|
|
|
|
|
return "Normal"
|
|
|
|
|
|
|
|
|
|
|
|
def _get_flow_style(self, flow: 'FlowStats') -> Optional[str]:
|
|
|
|
|
|
"""Get styling for flow row"""
|
|
|
|
|
|
status = self._get_flow_status(flow)
|
|
|
|
|
|
if status == "Enhanced":
|
|
|
|
|
|
return "bold"
|
|
|
|
|
|
elif status == "Alert":
|
|
|
|
|
|
return "bold red"
|
|
|
|
|
|
elif status == "Warning":
|
|
|
|
|
|
return "yellow"
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _should_show_subrows(self, flow: 'FlowStats') -> bool:
|
|
|
|
|
|
"""Determine if flow should show protocol breakdown"""
|
|
|
|
|
|
# Show subrows for flows with multiple frame types or enhanced analysis
|
|
|
|
|
|
return (len(flow.frame_types) > 1 or
|
|
|
|
|
|
flow.enhanced_analysis.decoder_type != "Standard")
|
|
|
|
|
|
|
|
|
|
|
|
def _create_protocol_subrows(self, flow: 'FlowStats') -> List[List[Text]]:
|
|
|
|
|
|
"""Create sub-rows for protocol/frame type breakdown"""
|
|
|
|
|
|
subrows = []
|
|
|
|
|
|
combinations = self._get_protocol_frame_combinations(flow)
|
|
|
|
|
|
|
|
|
|
|
|
for extended_proto, frame_type, count, percentage in combinations[:3]: # Max 3 subrows
|
2025-07-28 09:50:59 -04:00
|
|
|
|
# Calculate timing for this frame type if available
|
|
|
|
|
|
frame_delta_t = ""
|
|
|
|
|
|
frame_sigma = ""
|
|
|
|
|
|
frame_outliers = ""
|
|
|
|
|
|
|
|
|
|
|
|
if frame_type in flow.frame_types:
|
|
|
|
|
|
ft_stats = flow.frame_types[frame_type]
|
|
|
|
|
|
if ft_stats.avg_inter_arrival > 0:
|
|
|
|
|
|
dt_ms = ft_stats.avg_inter_arrival * 1000
|
|
|
|
|
|
frame_delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
|
|
|
|
|
|
if ft_stats.std_inter_arrival > 0:
|
|
|
|
|
|
sig_ms = ft_stats.std_inter_arrival * 1000
|
|
|
|
|
|
frame_sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
|
|
|
|
|
|
frame_outliers = str(len(ft_stats.outlier_frames))
|
|
|
|
|
|
|
2025-07-27 18:37:55 -04:00
|
|
|
|
subrow = [
|
|
|
|
|
|
Text(""), # Empty flow number
|
|
|
|
|
|
Text(""), # Empty source
|
|
|
|
|
|
Text(""), # Empty protocol
|
|
|
|
|
|
Text(""), # Empty destination
|
2025-07-28 08:14:15 -04:00
|
|
|
|
Text(f" {extended_proto}", style="dim yellow"),
|
2025-07-27 18:37:55 -04:00
|
|
|
|
Text(frame_type, style="dim blue"),
|
|
|
|
|
|
Text(f"{count}", style="dim", justify="right"),
|
2025-07-28 09:50:59 -04:00
|
|
|
|
Text(f"{self._format_bytes(count * (flow.total_bytes // flow.frame_count) if flow.frame_count > 0 else 0):>8}", style="dim"),
|
|
|
|
|
|
Text(frame_delta_t, style="dim", justify="right"),
|
|
|
|
|
|
Text(frame_sigma, style="dim", justify="right"),
|
|
|
|
|
|
Text(frame_outliers, style="dim red" if frame_outliers and int(frame_outliers) > 0 else "dim", justify="right")
|
2025-07-27 18:37:55 -04:00
|
|
|
|
]
|
|
|
|
|
|
subrows.append(subrow)
|
|
|
|
|
|
|
|
|
|
|
|
return subrows
|
|
|
|
|
|
|
|
|
|
|
|
def _get_sorted_flows(self) -> List['FlowStats']:
|
|
|
|
|
|
"""Get flows sorted by current sort key"""
|
|
|
|
|
|
flows = list(self.analyzer.flows.values())
|
|
|
|
|
|
|
|
|
|
|
|
if self.sort_key == "packets":
|
|
|
|
|
|
flows.sort(key=lambda x: x.frame_count, reverse=True)
|
|
|
|
|
|
elif self.sort_key == "volume":
|
|
|
|
|
|
flows.sort(key=lambda x: x.total_bytes, reverse=True)
|
|
|
|
|
|
elif self.sort_key == "quality":
|
|
|
|
|
|
flows.sort(key=lambda x: self._get_quality_score(x), reverse=True)
|
|
|
|
|
|
else: # Default: sort by importance
|
|
|
|
|
|
flows.sort(key=lambda x: (
|
|
|
|
|
|
x.enhanced_analysis.decoder_type != "Standard",
|
|
|
|
|
|
len(x.outlier_frames),
|
|
|
|
|
|
x.frame_count
|
|
|
|
|
|
), reverse=True)
|
|
|
|
|
|
|
|
|
|
|
|
return flows
|
|
|
|
|
|
|
|
|
|
|
|
def sort_by(self, key: str):
|
|
|
|
|
|
"""Change sort order"""
|
|
|
|
|
|
self.sort_key = key
|
|
|
|
|
|
self.refresh_data()
|
|
|
|
|
|
|
|
|
|
|
|
class FlowSelected(Message):
|
|
|
|
|
|
"""Message sent when a flow is selected"""
|
2025-07-28 09:50:59 -04:00
|
|
|
|
def __init__(self, flow: Optional['FlowStats'], subflow_type: Optional[str] = None) -> None:
|
2025-07-27 18:37:55 -04:00
|
|
|
|
self.flow = flow
|
2025-07-28 09:50:59 -04:00
|
|
|
|
self.subflow_type = subflow_type
|
2025-07-27 18:37:55 -04:00
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
|
|
|
|
def get_selected_flow(self) -> Optional['FlowStats']:
|
|
|
|
|
|
"""Get currently selected flow"""
|
|
|
|
|
|
table = self.query_one("#flows-data-table", DataTable)
|
|
|
|
|
|
if table.cursor_row is None or not table.rows:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
# Get the row key at cursor position
|
|
|
|
|
|
row_keys = list(table.rows.keys())
|
|
|
|
|
|
if table.cursor_row >= len(row_keys):
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
row_key = row_keys[table.cursor_row]
|
|
|
|
|
|
|
|
|
|
|
|
# Look up flow index from our mapping
|
|
|
|
|
|
flow_idx = self.row_to_flow_map.get(row_key)
|
|
|
|
|
|
if flow_idx is not None and 0 <= flow_idx < len(self.flows_list):
|
|
|
|
|
|
return self.flows_list[flow_idx]
|
|
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
2025-07-28 09:50:59 -04:00
|
|
|
|
def get_selected_subflow_type(self) -> Optional[str]:
|
|
|
|
|
|
"""Get currently selected sub-flow type if applicable"""
|
|
|
|
|
|
table = self.query_one("#flows-data-table", DataTable)
|
|
|
|
|
|
if table.cursor_row is None or not table.rows:
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
# Get the row key at cursor position
|
|
|
|
|
|
row_keys = list(table.rows.keys())
|
|
|
|
|
|
if table.cursor_row >= len(row_keys):
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
row_key = row_keys[table.cursor_row]
|
|
|
|
|
|
|
|
|
|
|
|
# Check if this is a sub-row
|
|
|
|
|
|
if row_key in self.row_to_subflow_map:
|
|
|
|
|
|
_, subflow_type = self.row_to_subflow_map[row_key]
|
|
|
|
|
|
return subflow_type
|
|
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
2025-07-27 18:37:55 -04:00
|
|
|
|
def on_data_table_row_highlighted(self, event: DataTable.RowHighlighted) -> None:
|
|
|
|
|
|
"""Handle row highlight to update selection"""
|
|
|
|
|
|
selected_flow = self.get_selected_flow()
|
2025-07-28 09:50:59 -04:00
|
|
|
|
subflow_type = self.get_selected_subflow_type()
|
|
|
|
|
|
self.post_message(self.FlowSelected(selected_flow, subflow_type))
|
2025-07-27 18:37:55 -04:00
|
|
|
|
|
|
|
|
|
|
# Helper methods from original implementation
|
|
|
|
|
|
def _get_extended_protocol(self, flow: 'FlowStats') -> str:
|
|
|
|
|
|
"""Get extended protocol"""
|
|
|
|
|
|
if flow.detected_protocol_types:
|
|
|
|
|
|
enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'}
|
|
|
|
|
|
found = flow.detected_protocol_types & enhanced_protocols
|
|
|
|
|
|
if found:
|
|
|
|
|
|
protocol = list(found)[0]
|
|
|
|
|
|
return 'CH10' if protocol in ['CHAPTER10', 'CH10'] else protocol
|
|
|
|
|
|
return '-'
|
|
|
|
|
|
|
|
|
|
|
|
def _get_frame_summary(self, flow: 'FlowStats') -> str:
|
|
|
|
|
|
"""Get frame type summary"""
|
|
|
|
|
|
if not flow.frame_types:
|
|
|
|
|
|
return "General"
|
|
|
|
|
|
elif len(flow.frame_types) == 1:
|
|
|
|
|
|
return list(flow.frame_types.keys())[0][:11]
|
|
|
|
|
|
else:
|
|
|
|
|
|
return f"{len(flow.frame_types)} types"
|
|
|
|
|
|
|
|
|
|
|
|
def _get_protocol_frame_combinations(self, flow: 'FlowStats'):
|
|
|
|
|
|
"""Get protocol/frame combinations"""
|
|
|
|
|
|
combinations = []
|
|
|
|
|
|
total = flow.frame_count
|
|
|
|
|
|
|
|
|
|
|
|
for frame_type, stats in flow.frame_types.items():
|
|
|
|
|
|
extended = self._get_extended_protocol(flow)
|
|
|
|
|
|
percentage = (stats.count / total * 100) if total > 0 else 0
|
|
|
|
|
|
combinations.append((extended, frame_type, stats.count, percentage))
|
|
|
|
|
|
|
|
|
|
|
|
return sorted(combinations, key=lambda x: x[2], reverse=True)
|
|
|
|
|
|
|
|
|
|
|
|
def _format_bytes(self, bytes_count: int) -> str:
|
|
|
|
|
|
"""Format byte count"""
|
|
|
|
|
|
if bytes_count >= 1_000_000_000:
|
|
|
|
|
|
return f"{bytes_count / 1_000_000_000:.1f}G"
|
|
|
|
|
|
elif bytes_count >= 1_000_000:
|
|
|
|
|
|
return f"{bytes_count / 1_000_000:.1f}M"
|
|
|
|
|
|
elif bytes_count >= 1_000:
|
|
|
|
|
|
return f"{bytes_count / 1_000:.1f}K"
|
|
|
|
|
|
else:
|
|
|
|
|
|
return f"{bytes_count}B"
|