Files
StreamLens/analyzer/tui/textual/widgets/flow_table_v2.py

674 lines
28 KiB
Python
Raw Normal View History

"""
Enhanced Flow Table Widget - TipTop-inspired with inline visualizations
"""
from textual.widgets import DataTable
from textual.containers import Vertical
from textual.reactive import reactive
from textual.message import Message
2025-07-28 18:28:26 -04:00
from typing import TYPE_CHECKING, List, Optional, Dict, Tuple
from rich.text import Text
from rich.box import ROUNDED
if TYPE_CHECKING:
from ....analysis.core import EthernetAnalyzer
from ....models import FlowStats
class EnhancedFlowTable(Vertical):
"""
Enhanced flow table with TipTop-style inline visualizations
Features:
- Inline sparklines for packet rate
- Bar charts for volume and quality
- Color-coded rows based on status
- Hierarchical sub-rows for protocol breakdown
"""
DEFAULT_CSS = """
EnhancedFlowTable {
height: 1fr;
2025-07-28 08:14:15 -04:00
padding: 0;
margin: 0;
}
EnhancedFlowTable DataTable {
height: 1fr;
scrollbar-gutter: stable;
2025-07-28 08:14:15 -04:00
padding: 0;
margin: 0;
}
"""
selected_flow_index = reactive(0)
sort_key = reactive("flows")
2025-07-30 23:48:32 -04:00
simplified_view = reactive(True) # Default to simplified view without subflows
def __init__(self, analyzer: 'EthernetAnalyzer', **kwargs):
super().__init__(**kwargs)
self.analyzer = analyzer
self.flows_list = []
self.row_to_flow_map = {} # Map row keys to flow indices
self.flow_metrics = {} # Store per-flow metrics history
2025-07-28 18:28:26 -04:00
self.view_mode_changed = False # Track when view mode changes
def compose(self):
"""Create the enhanced flow table"""
# Table title with sort indicators
yield DataTable(
id="flows-data-table",
cursor_type="row",
zebra_stripes=True,
show_header=True,
show_row_labels=False
)
def on_mount(self):
"""Initialize the table"""
2025-07-28 18:28:26 -04:00
self._setup_table_columns()
self.refresh_data()
def _setup_table_columns(self):
"""Setup table columns based on current view mode"""
table = self.query_one("#flows-data-table", DataTable)
2025-07-28 18:28:26 -04:00
# Clear existing columns if any
if table.columns:
table.clear(columns=True)
if self.simplified_view:
# Simplified view - only main flows with summary data
table.add_column("#", width=3, key="num")
table.add_column("Source", width=18, key="source")
table.add_column("Destination", width=18, key="dest")
table.add_column("Protocol", width=8, key="protocol")
table.add_column("Packets", width=8, key="packets")
table.add_column("Volume", width=10, key="volume")
table.add_column("Avg ΔT", width=8, key="avg_delta")
table.add_column("Quality", width=8, key="quality")
table.add_column("Status", width=10, key="status")
else:
# Detailed view - original layout with subflows
table.add_column("#", width=2, key="num")
table.add_column("Source", width=18, key="source")
table.add_column("Proto", width=4, key="proto")
table.add_column("Destination", width=18, key="dest")
table.add_column("Extended", width=8, key="extended")
table.add_column("Frame Type", width=10, key="frame_type")
2025-07-30 23:48:32 -04:00
table.add_column("Pkts", width=6, key="packets")
2025-07-28 18:28:26 -04:00
table.add_column("Size", width=8, key="volume")
table.add_column("ΔT(ms)", width=8, key="delta_t")
table.add_column("σ(ms)", width=8, key="sigma")
table.add_column("Out", width=5, key="outliers")
2025-07-30 23:48:32 -04:00
table.add_column("Rate", width=6, key="rate")
def refresh_data(self):
2025-07-28 18:28:26 -04:00
"""Refresh flow table with current view mode"""
# Check if view mode changed and rebuild table structure if needed
if self.view_mode_changed:
self._setup_table_columns()
self.view_mode_changed = False
table = self.query_one("#flows-data-table", DataTable)
2025-07-28 08:14:15 -04:00
# Preserve cursor and scroll positions
cursor_row = table.cursor_row
cursor_column = table.cursor_column
2025-07-28 08:14:15 -04:00
scroll_x = table.scroll_x
scroll_y = table.scroll_y
selected_row_key = None
if table.rows and cursor_row < len(table.rows):
selected_row_key = list(table.rows.keys())[cursor_row]
table.clear()
# Clear row mapping
self.row_to_flow_map.clear()
self.row_to_subflow_map = {} # Map row keys to (flow_index, subflow_type)
# Get and sort flows
self.flows_list = self._get_sorted_flows()
2025-07-28 18:28:26 -04:00
if self.simplified_view:
self._populate_simplified_view()
else:
self._populate_detailed_view()
# Restore cursor position
if selected_row_key and selected_row_key in table.rows:
row_index = list(table.rows.keys()).index(selected_row_key)
table.move_cursor(row=row_index, column=cursor_column, animate=False)
elif table.row_count > 0:
# If original selection not found, try to maintain row position
new_row = min(cursor_row, table.row_count - 1)
table.move_cursor(row=new_row, column=cursor_column, animate=False)
# Restore scroll position
table.scroll_to(x=scroll_x, y=scroll_y, animate=False)
def _populate_simplified_view(self):
"""Populate table with simplified flow summary data"""
table = self.query_one("#flows-data-table", DataTable)
for i, flow in enumerate(self.flows_list):
# Create simplified row data - no subflows shown
row_data = self._create_simplified_row(i + 1, flow)
row_key = table.add_row(*row_data, key=f"flow_{i}")
# Map row key to flow index
self.row_to_flow_map[row_key] = i
def _populate_detailed_view(self):
"""Populate table with detailed flow data including subflows"""
table = self.query_one("#flows-data-table", DataTable)
for i, flow in enumerate(self.flows_list):
# Track metrics for this flow
flow_key = f"{flow.src_ip}:{flow.src_port}-{flow.dst_ip}:{flow.dst_port}"
if flow_key not in self.flow_metrics:
self.flow_metrics[flow_key] = {
'rate_history': [],
'last_packet_count': flow.frame_count,
'last_update': flow.last_seen
}
# Calculate current rate
metrics = self.flow_metrics[flow_key]
time_delta = flow.last_seen - metrics['last_update'] if metrics['last_update'] else 1
packet_delta = flow.frame_count - metrics['last_packet_count']
current_rate = packet_delta / max(time_delta, 0.1)
# Update metrics
metrics['rate_history'].append(current_rate)
if len(metrics['rate_history']) > 10:
metrics['rate_history'].pop(0)
metrics['last_packet_count'] = flow.frame_count
metrics['last_update'] = flow.last_seen
2025-07-28 18:28:26 -04:00
# Create row with detailed visualizations
row_data = self._create_enhanced_row(i + 1, flow, metrics)
row_key = table.add_row(*row_data, key=f"flow_{i}")
# Map row key to flow index
self.row_to_flow_map[row_key] = i
2025-07-28 18:28:26 -04:00
# Add sub-rows for protocol breakdown (only in detailed view)
if self._should_show_subrows(flow):
sub_rows = self._create_protocol_subrows(flow)
combinations = self._get_protocol_frame_combinations(flow)
for j, sub_row in enumerate(sub_rows):
sub_key = table.add_row(*sub_row, key=f"flow_{i}_sub_{j}")
# Map sub-row to parent flow and subflow type
self.row_to_flow_map[sub_key] = i
if j < len(combinations):
_, frame_type, _, _ = combinations[j]
self.row_to_subflow_map[sub_key] = (i, frame_type)
def _create_enhanced_row(self, num: int, flow: 'FlowStats', metrics: dict) -> List[Text]:
"""Create enhanced row with inline visualizations"""
# Flow number
num_text = Text(str(num), justify="right")
# Source (truncated if needed)
source = f"{flow.src_ip}:{flow.src_port}"
source_text = Text(source[:20] + "..." if len(source) > 22 else source)
# Protocol with color
proto_text = Text(flow.transport_protocol, style="bold cyan")
# Destination
dest = f"{flow.dst_ip}:{flow.dst_port}"
dest_text = Text(dest[:20] + "..." if len(dest) > 22 else dest)
# Extended protocol
extended = self._get_extended_protocol(flow)
extended_text = Text(extended, style="yellow" if extended != "-" else "dim")
# Frame type summary
frame_summary = self._get_frame_summary(flow)
frame_text = Text(frame_summary, style="blue")
2025-07-30 23:48:32 -04:00
# Packet count (separate from rate)
packets_text = Text(str(flow.frame_count), justify="right")
# Rate sparkline (separate column)
rate_spark = self._create_rate_sparkline(metrics['rate_history'])
2025-07-30 23:48:32 -04:00
rate_text = Text(rate_spark, justify="center")
2025-07-28 08:14:15 -04:00
# Size with actual value
size_value = self._format_bytes(flow.total_bytes)
size_text = Text(f"{size_value:>8}")
2025-07-30 23:48:32 -04:00
# Delta T and Sigma - empty for main flows (subflows show the detail)
delta_t_text = Text("", justify="right")
sigma_text = Text("", justify="right")
2025-07-30 23:48:32 -04:00
# Outlier count - sum of frame-type-specific outliers (not flow-level)
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
outlier_text = Text(str(frame_type_outlier_count), justify="right",
style="red" if frame_type_outlier_count > 0 else "green")
return [
num_text, source_text, proto_text, dest_text,
2025-07-30 23:48:32 -04:00
extended_text, frame_text, packets_text, size_text,
delta_t_text, sigma_text, outlier_text, rate_text
]
2025-07-28 18:28:26 -04:00
def _create_simplified_row(self, num: int, flow: 'FlowStats') -> List[Text]:
"""Create simplified row with summary data only"""
# Flow number
num_text = Text(str(num), justify="right")
# Source (IP only for simplified view)
source_text = Text(flow.src_ip)
# Destination (IP only for simplified view)
dest_text = Text(flow.dst_ip)
# Main protocol (transport + extended if available)
extended = self._get_extended_protocol(flow)
if extended != "-":
protocol_str = f"{flow.transport_protocol}/{extended}"
else:
protocol_str = flow.transport_protocol
protocol_text = Text(protocol_str, style="bold cyan")
# Total packet count
packets_text = Text(str(flow.frame_count), justify="right")
# Total volume
volume_text = Text(self._format_bytes(flow.total_bytes), justify="right")
# Average delta T
if flow.avg_inter_arrival > 0:
delta_t_ms = flow.avg_inter_arrival * 1000
if delta_t_ms >= 1000:
avg_delta_str = f"{delta_t_ms/1000:.1f}s"
else:
avg_delta_str = f"{delta_t_ms:.1f}ms"
else:
avg_delta_str = "N/A"
avg_delta_text = Text(avg_delta_str, justify="right")
# Quality score as percentage
quality_score = self._get_quality_score(flow)
quality_text = Text(f"{quality_score}%", justify="right",
style="green" if quality_score >= 90 else
"yellow" if quality_score >= 70 else "red")
# Flow status
status = self._get_flow_status(flow)
status_color = {
"Enhanced": "bold blue",
"Alert": "bold red",
"Warning": "yellow",
"Normal": "green"
}.get(status, "white")
status_text = Text(status, style=status_color)
return [
num_text, source_text, dest_text, protocol_text,
packets_text, volume_text, avg_delta_text,
quality_text, status_text
]
def _create_rate_sparkline(self, history: List[float]) -> str:
"""Create mini sparkline for rate"""
if not history:
return "" * 4
spark_chars = " ▁▂▃▄▅▆▇█"
data_min = min(history) if history else 0
data_max = max(history) if history else 1
if data_max == data_min:
return "" * 4
result = []
for value in history[-4:]: # Last 4 values
normalized = (value - data_min) / (data_max - data_min)
char_index = int(normalized * 8)
result.append(spark_chars[char_index])
return "".join(result)
def _create_volume_bar(self, bytes_count: int) -> str:
"""Create bar chart for volume"""
# Scale to GB for comparison
gb = bytes_count / 1_000_000_000
# Create bar (max 5 chars)
if gb >= 10:
return "█████"
elif gb >= 1:
filled = int(gb / 2)
return "" * filled + "" * (5 - filled)
else:
# For smaller volumes, show at least one bar
mb = bytes_count / 1_000_000
if mb >= 100:
return "█░░░░"
else:
return "▌░░░░"
def _create_quality_bar(self, flow: 'FlowStats') -> tuple[str, str]:
"""Create quality bar chart with color"""
quality = self._get_quality_score(flow)
# Create bar (5 chars)
filled = int(quality / 20) # 0-100 -> 0-5
bar = "" * filled + "" * (5 - filled)
# Determine color
if quality >= 90:
color = "green"
elif quality >= 70:
color = "yellow"
else:
color = "red"
return bar, color
def _get_quality_score(self, flow: 'FlowStats') -> int:
"""Calculate quality score for flow"""
if flow.enhanced_analysis.decoder_type != "Standard":
return int(flow.enhanced_analysis.avg_frame_quality)
else:
2025-07-30 23:48:32 -04:00
# Base quality on frame-type-specific outlier percentage
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
outlier_pct = frame_type_outlier_count / flow.frame_count * 100 if flow.frame_count > 0 else 0
return max(0, int(100 - outlier_pct * 10))
def _get_flow_status(self, flow: 'FlowStats') -> str:
"""Determine flow status"""
if flow.enhanced_analysis.decoder_type != "Standard":
return "Enhanced"
else:
2025-07-30 23:48:32 -04:00
# Use frame-type-specific outliers for status
frame_type_outlier_count = sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
if frame_type_outlier_count > flow.frame_count * 0.1:
return "Alert"
elif frame_type_outlier_count > 0:
return "Warning"
else:
return "Normal"
def _get_flow_style(self, flow: 'FlowStats') -> Optional[str]:
"""Get styling for flow row"""
status = self._get_flow_status(flow)
if status == "Enhanced":
return "bold"
elif status == "Alert":
return "bold red"
elif status == "Warning":
return "yellow"
return None
def _should_show_subrows(self, flow: 'FlowStats') -> bool:
"""Determine if flow should show protocol breakdown"""
2025-07-28 18:28:26 -04:00
# Only show subrows if there are enhanced frame types
enhanced_frame_types = self._get_enhanced_frame_types(flow)
return len(enhanced_frame_types) > 0
def _get_enhanced_frame_types(self, flow: 'FlowStats') -> Dict[str, 'FrameTypeStats']:
"""Get only frame types that belong to enhanced protocols"""
enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'}
enhanced_frame_types = {}
for frame_type, stats in flow.frame_types.items():
# Check if this frame type belongs to an enhanced protocol
if any(enhanced_proto in frame_type for enhanced_proto in enhanced_protocols):
enhanced_frame_types[frame_type] = stats
elif frame_type.startswith(('CH10-', 'PTP-', 'IENA-')):
enhanced_frame_types[frame_type] = stats
elif frame_type in ('TMATS', 'TMATS-Data'): # TMATS is part of Chapter 10
enhanced_frame_types[frame_type] = stats
return enhanced_frame_types
def _get_enhanced_protocol_frame_combinations(self, flow: 'FlowStats', enhanced_frame_types: Dict[str, 'FrameTypeStats']) -> List[Tuple[str, str, int, float]]:
"""Get protocol/frame combinations for enhanced protocols only"""
combinations = []
total_packets = flow.frame_count
# Group enhanced frame types by extended protocol
protocol_frames = {}
for frame_type, ft_stats in enhanced_frame_types.items():
# Determine extended protocol for this frame type
extended_proto = self._get_extended_protocol_for_frame(flow, frame_type)
if extended_proto not in protocol_frames:
protocol_frames[extended_proto] = []
protocol_frames[extended_proto].append((frame_type, ft_stats.count))
# Convert to list of tuples with percentages
for extended_proto, frame_list in protocol_frames.items():
for frame_type, count in frame_list:
percentage = (count / total_packets * 100) if total_packets > 0 else 0
combinations.append((extended_proto, frame_type, count, percentage))
# Sort by count (descending)
combinations.sort(key=lambda x: x[2], reverse=True)
return combinations
def _create_protocol_subrows(self, flow: 'FlowStats') -> List[List[Text]]:
2025-07-30 23:48:32 -04:00
"""Create sub-rows for protocol/frame type breakdown - matches details panel logic"""
subrows = []
2025-07-30 23:48:32 -04:00
# For enhanced flows, show ALL frame types (same logic as details panel)
if flow.enhanced_analysis.decoder_type != "Standard":
combinations = self._get_protocol_frame_combinations(flow)
else:
# For standard flows, only show enhanced frame types
enhanced_frame_types = self._get_enhanced_frame_types(flow)
combinations = self._get_enhanced_protocol_frame_combinations(flow, enhanced_frame_types)
for extended_proto, frame_type, count, percentage in combinations:
# Calculate timing for this frame type if available
frame_delta_t = ""
frame_sigma = ""
frame_outliers = ""
if frame_type in flow.frame_types:
ft_stats = flow.frame_types[frame_type]
2025-07-30 23:48:32 -04:00
# Always calculate timing if we have data, even if very small values
if ft_stats.avg_inter_arrival > 0:
dt_ms = ft_stats.avg_inter_arrival * 1000
frame_delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
2025-07-30 23:48:32 -04:00
elif len(ft_stats.inter_arrival_times) >= 2:
# If avg is 0 but we have data, recalculate on the fly
import statistics
avg_arrival = statistics.mean(ft_stats.inter_arrival_times)
if avg_arrival > 0:
dt_ms = avg_arrival * 1000
frame_delta_t = f"{dt_ms:.1f}" if dt_ms < 1000 else f"{dt_ms/1000:.1f}s"
if ft_stats.std_inter_arrival > 0:
sig_ms = ft_stats.std_inter_arrival * 1000
frame_sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
2025-07-30 23:48:32 -04:00
elif len(ft_stats.inter_arrival_times) >= 2:
# If std is 0 but we have data, recalculate on the fly
import statistics
std_arrival = statistics.stdev(ft_stats.inter_arrival_times)
if std_arrival > 0:
sig_ms = std_arrival * 1000
frame_sigma = f"{sig_ms:.1f}" if sig_ms < 1000 else f"{sig_ms/1000:.1f}s"
frame_outliers = str(len(ft_stats.outlier_frames))
subrow = [
Text(""), # Empty flow number
Text(""), # Empty source
Text(""), # Empty protocol
Text(""), # Empty destination
2025-07-28 08:14:15 -04:00
Text(f" {extended_proto}", style="dim yellow"),
Text(frame_type, style="dim blue"),
Text(f"{count}", style="dim", justify="right"),
Text(f"{self._format_bytes(count * (flow.total_bytes // flow.frame_count) if flow.frame_count > 0 else 0):>8}", style="dim"),
Text(frame_delta_t, style="dim", justify="right"),
Text(frame_sigma, style="dim", justify="right"),
2025-07-30 23:48:32 -04:00
Text(frame_outliers, style="dim red" if frame_outliers and int(frame_outliers) > 0 else "dim", justify="right"),
Text("", style="dim") # Empty rate column for subrows
]
subrows.append(subrow)
return subrows
def _get_sorted_flows(self) -> List['FlowStats']:
"""Get flows sorted by current sort key"""
flows = list(self.analyzer.flows.values())
if self.sort_key == "packets":
flows.sort(key=lambda x: x.frame_count, reverse=True)
elif self.sort_key == "volume":
flows.sort(key=lambda x: x.total_bytes, reverse=True)
elif self.sort_key == "quality":
flows.sort(key=lambda x: self._get_quality_score(x), reverse=True)
else: # Default: sort by importance
flows.sort(key=lambda x: (
x.enhanced_analysis.decoder_type != "Standard",
len(x.outlier_frames),
x.frame_count
), reverse=True)
return flows
def sort_by(self, key: str):
"""Change sort order"""
self.sort_key = key
self.refresh_data()
2025-07-28 18:28:26 -04:00
def toggle_view_mode(self):
"""Toggle between simplified and detailed view modes"""
self.simplified_view = not self.simplified_view
self.view_mode_changed = True
self.refresh_data()
def get_current_view_mode(self) -> str:
"""Get current view mode as string"""
return "SIMPLIFIED" if self.simplified_view else "DETAILED"
class FlowSelected(Message):
"""Message sent when a flow is selected"""
def __init__(self, flow: Optional['FlowStats'], subflow_type: Optional[str] = None) -> None:
self.flow = flow
self.subflow_type = subflow_type
super().__init__()
def get_selected_flow(self) -> Optional['FlowStats']:
"""Get currently selected flow"""
table = self.query_one("#flows-data-table", DataTable)
if table.cursor_row is None or not table.rows:
return None
# Get the row key at cursor position
row_keys = list(table.rows.keys())
if table.cursor_row >= len(row_keys):
return None
row_key = row_keys[table.cursor_row]
# Look up flow index from our mapping
flow_idx = self.row_to_flow_map.get(row_key)
if flow_idx is not None and 0 <= flow_idx < len(self.flows_list):
return self.flows_list[flow_idx]
return None
def get_selected_subflow_type(self) -> Optional[str]:
"""Get currently selected sub-flow type if applicable"""
table = self.query_one("#flows-data-table", DataTable)
if table.cursor_row is None or not table.rows:
return None
# Get the row key at cursor position
row_keys = list(table.rows.keys())
if table.cursor_row >= len(row_keys):
return None
row_key = row_keys[table.cursor_row]
# Check if this is a sub-row
if row_key in self.row_to_subflow_map:
_, subflow_type = self.row_to_subflow_map[row_key]
return subflow_type
return None
def on_data_table_row_highlighted(self, event: DataTable.RowHighlighted) -> None:
"""Handle row highlight to update selection"""
selected_flow = self.get_selected_flow()
subflow_type = self.get_selected_subflow_type()
2025-07-28 11:06:10 -04:00
# Debug through app's debug panel
flow_info = f"{selected_flow.src_ip}:{selected_flow.src_port}" if selected_flow else "None"
table = self.query_one("#flows-data-table", DataTable)
current_row = table.cursor_row if table.cursor_row is not None else -1
try:
debug_panel = self.app.query_one("#debug-panel")
debug_panel.add_debug_message(f"TABLE: Row {current_row} - {flow_info}, subflow:{subflow_type}")
except:
pass # Debug panel might not be available yet
self.post_message(self.FlowSelected(selected_flow, subflow_type))
# Helper methods from original implementation
2025-07-28 18:28:26 -04:00
def _get_extended_protocol_for_frame(self, flow: 'FlowStats', frame_type: str) -> str:
"""Get extended protocol for a specific frame type"""
if frame_type.startswith('CH10') or frame_type == 'TMATS':
return 'CH10'
elif frame_type.startswith('PTP'):
return 'PTP'
elif frame_type == 'IENA':
return 'IENA'
elif frame_type == 'NTP':
return 'NTP'
else:
return self._get_extended_protocol(flow)
def _get_extended_protocol(self, flow: 'FlowStats') -> str:
"""Get extended protocol"""
if flow.detected_protocol_types:
enhanced_protocols = {'CHAPTER10', 'CH10', 'PTP', 'IENA'}
found = flow.detected_protocol_types & enhanced_protocols
if found:
protocol = list(found)[0]
return 'CH10' if protocol in ['CHAPTER10', 'CH10'] else protocol
return '-'
def _get_frame_summary(self, flow: 'FlowStats') -> str:
"""Get frame type summary"""
if not flow.frame_types:
return "General"
elif len(flow.frame_types) == 1:
return list(flow.frame_types.keys())[0][:11]
else:
return f"{len(flow.frame_types)} types"
def _get_protocol_frame_combinations(self, flow: 'FlowStats'):
"""Get protocol/frame combinations"""
combinations = []
total = flow.frame_count
for frame_type, stats in flow.frame_types.items():
extended = self._get_extended_protocol(flow)
percentage = (stats.count / total * 100) if total > 0 else 0
combinations.append((extended, frame_type, stats.count, percentage))
return sorted(combinations, key=lambda x: x[2], reverse=True)
def _format_bytes(self, bytes_count: int) -> str:
"""Format byte count"""
if bytes_count >= 1_000_000_000:
return f"{bytes_count / 1_000_000_000:.1f}G"
elif bytes_count >= 1_000_000:
return f"{bytes_count / 1_000_000:.1f}M"
elif bytes_count >= 1_000:
return f"{bytes_count / 1_000:.1f}K"
else:
return f"{bytes_count}B"