Files
StreamLens/analyzer/tui/textual/app_v2.py

699 lines
30 KiB
Python
Raw Normal View History

"""
StreamLens Textual Application V2 - TipTop-Inspired Design
Modern TUI with real-time metrics, sparklines, and professional monitoring aesthetic
"""
from textual.app import App, ComposeResult
from textual.containers import Container, Horizontal, Vertical, ScrollableContainer
2025-07-30 23:48:32 -04:00
from textual.widgets import Header, Footer, Static, DataTable, Label, TabPane
from textual.reactive import reactive
from textual.timer import Timer
2025-07-28 08:14:15 -04:00
from textual.events import MouseDown, MouseMove
2025-07-30 23:48:32 -04:00
from textual.binding import Binding
from typing import TYPE_CHECKING
from rich.text import Text
from rich.console import Group
from rich.panel import Panel
from rich.table import Table
import time
2025-07-28 18:28:26 -04:00
import signal
import sys
2025-07-30 23:48:32 -04:00
import datetime
from pathlib import Path
import subprocess
import platform
from .widgets.sparkline import SparklineWidget
from .widgets.metric_card import MetricCard
from .widgets.flow_table_v2 import EnhancedFlowTable
2025-07-30 23:48:32 -04:00
from .widgets.filtered_flow_view import FilteredFlowView
from ...reporting import FlowReportGenerator
from .widgets.split_flow_details import FlowMainDetailsPanel, SubFlowDetailsPanel
2025-07-28 11:06:10 -04:00
from .widgets.debug_panel import DebugPanel
2025-07-30 23:48:32 -04:00
from .widgets.progress_bar import ParsingProgressBar
2025-07-28 18:28:26 -04:00
from ...analysis.background_analyzer import BackgroundAnalyzer
2025-07-30 23:48:32 -04:00
# Debugging imports
try:
from textual_state_visualizer import TextualStateMonitor, TextualStateWebServer
from textual_inspector import inspect_textual_app, print_widget_tree
DEBUGGING_AVAILABLE = True
except ImportError:
DEBUGGING_AVAILABLE = False
if TYPE_CHECKING:
from ...analysis.core import EthernetAnalyzer
class StreamLensAppV2(App):
"""
StreamLens TipTop-Inspired Interface
Features:
- Real-time metrics with sparklines
- Color-coded quality indicators
- Compact information display
- Multi-column layout
- Smooth live updates
"""
CSS_PATH = "styles/streamlens_v2.tcss"
2025-07-28 08:14:15 -04:00
ENABLE_COMMAND_PALETTE = False
AUTO_FOCUS = None
BINDINGS = [
("q", "quit", "Quit"),
2025-07-30 23:48:32 -04:00
("1", "select_filter('1')", "Overview"),
("2", "select_filter('2')", "Frame Type 2"),
("3", "select_filter('3')", "Frame Type 3"),
("4", "select_filter('4')", "Frame Type 4"),
("5", "select_filter('5')", "Frame Type 5"),
("6", "select_filter('6')", "Frame Type 6"),
("7", "select_filter('7')", "Frame Type 7"),
("8", "select_filter('8')", "Frame Type 8"),
("9", "select_filter('9')", "Frame Type 9"),
("0", "select_filter('0')", "Frame Type 10"),
("alt+1", "sort_table_column(0)", "Sort by column 1"),
("alt+2", "sort_table_column(1)", "Sort by column 2"),
("alt+3", "sort_table_column(2)", "Sort by column 3"),
("alt+4", "sort_table_column(3)", "Sort by column 4"),
("alt+5", "sort_table_column(4)", "Sort by column 5"),
("alt+6", "sort_table_column(5)", "Sort by column 6"),
("alt+7", "sort_table_column(6)", "Sort by column 7"),
("alt+8", "sort_table_column(7)", "Sort by column 8"),
("alt+9", "sort_table_column(8)", "Sort by column 9"),
("alt+0", "sort_table_column(9)", "Sort by column 10"),
("p", "toggle_pause", "Pause"),
("d", "show_details", "Details"),
2025-07-28 18:28:26 -04:00
("v", "toggle_view_mode", "Toggle View"),
2025-07-30 23:48:32 -04:00
("r", "generate_report", "Generate Report"),
("o", "copy_outliers", "Copy Outliers"),
("?", "toggle_help", "Help"),
2025-07-30 23:48:32 -04:00
Binding("ctrl+d,t", "debug_tree", "Debug: Widget Tree", show=False),
Binding("ctrl+d,f", "debug_focus", "Debug: Focused Widget", show=False),
Binding("ctrl+d,w", "start_web_debug", "Debug: Web Interface", show=False),
]
# Reactive attributes
total_flows = reactive(0)
total_packets = reactive(0)
packets_per_sec = reactive(0.0)
bytes_per_sec = reactive(0.0)
enhanced_flows = reactive(0)
outlier_count = reactive(0)
2025-07-28 11:06:10 -04:00
debug_visible = reactive(False) # Hide debug panel for now
# Update timers
metric_timer: Timer = None
flow_timer: Timer = None
def __init__(self, analyzer: 'EthernetAnalyzer'):
super().__init__()
self.analyzer = analyzer
self.title = "StreamLens"
self.sub_title = "Network Flow Analysis"
self.paused = False
2025-07-30 23:48:32 -04:00
# Background parsing support - Use single thread to avoid race conditions in frame reference tracking
2025-07-28 18:28:26 -04:00
self.background_analyzer = BackgroundAnalyzer(
analyzer=analyzer,
2025-07-30 23:48:32 -04:00
num_threads=1, # Single-threaded to prevent race conditions in outlier frame references
2025-07-28 18:28:26 -04:00
batch_size=1000,
2025-07-30 23:48:32 -04:00
progress_callback=self._on_progress_update,
2025-07-28 18:28:26 -04:00
flow_update_callback=self._on_flow_update
)
self.pcap_file = None
# Metrics history for sparklines
self.packets_history = []
self.bytes_history = []
self.flows_history = []
self.max_history = 60 # 60 seconds of history
def compose(self) -> ComposeResult:
"""Create TipTop-inspired layout"""
yield Header()
with Container(id="main-container"):
2025-07-30 23:48:32 -04:00
# Progress bar for PCAP loading (initially hidden)
yield ParsingProgressBar(id="progress-bar")
2025-07-28 08:14:15 -04:00
# Ultra-compact metrics bar
with Horizontal(id="metrics-bar"):
2025-07-28 08:14:15 -04:00
yield MetricCard("Flows", f"{self.total_flows}", id="flows-metric")
yield MetricCard("Pkts/s", f"{self.packets_per_sec:.0f}", id="packets-metric")
yield MetricCard("Vol/s", self._format_bytes_per_sec(self.bytes_per_sec), id="volume-metric")
yield MetricCard("Enhanced", f"{self.enhanced_flows}", color="success", id="enhanced-metric")
yield MetricCard("Outliers", f"{self.outlier_count}", color="warning" if self.outlier_count > 0 else "normal", id="outliers-metric")
2025-07-28 11:06:10 -04:00
# Main content area with conditional debug panel
with Horizontal(id="content-area"):
2025-07-30 23:48:32 -04:00
# Left - Filtered flow view with frame type buttons
yield FilteredFlowView(
self.analyzer,
2025-07-30 23:48:32 -04:00
id="filtered-flow-view",
classes="panel-wide"
)
2025-07-28 11:06:10 -04:00
# Middle - Flow details
with Vertical(id="flow-panels"):
yield FlowMainDetailsPanel(id="main-flow-details")
yield SubFlowDetailsPanel(id="sub-flow-details")
2025-07-28 11:06:10 -04:00
# Right - Debug panel (conditionally visible)
if self.debug_visible:
yield DebugPanel(id="debug-panel")
yield Footer()
def on_mount(self) -> None:
2025-07-28 08:14:15 -04:00
"""Initialize the application with TipTop-style updates"""
2025-07-28 11:06:10 -04:00
try:
debug_panel = self.query_one("#debug-panel", DebugPanel)
debug_panel.add_debug_message("APP: Application mounted, checking panels...")
try:
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
debug_panel.add_debug_message("APP: Both panels found successfully")
except Exception as e:
debug_panel.add_debug_message(f"APP: Panel query failed: {e}")
except:
pass # Debug panel not visible
2025-07-28 18:28:26 -04:00
# Set initial subtitle with view mode
try:
flow_table = self.query_one("#flow-table", EnhancedFlowTable)
view_mode = flow_table.get_current_view_mode()
status = "PAUSED" if self.paused else "LIVE"
self.sub_title = f"Network Flow Analysis - {status} - {view_mode} VIEW"
except:
pass
self.update_metrics()
2025-07-30 23:48:32 -04:00
# Set up update intervals (slower during parsing to reduce CPU usage)
self.metric_timer = self.set_interval(5.0, self.update_metrics) # 0.2Hz for slower background updates
self.flow_timer = self.set_interval(10.0, self.update_flows) # 0.1Hz for slower fallback flow updates
# Initialize sparkline history
self._initialize_history()
2025-07-28 08:14:15 -04:00
# Set initial focus to the flow table for immediate keyboard navigation
self.call_after_refresh(self._set_initial_focus)
2025-07-28 08:14:15 -04:00
def _set_initial_focus(self):
2025-07-30 23:48:32 -04:00
"""Set initial focus to the filtered flow view after widgets are ready"""
2025-07-28 08:14:15 -04:00
try:
2025-07-30 23:48:32 -04:00
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
flow_view.flow_table.focus()
2025-07-28 08:14:15 -04:00
except Exception:
2025-07-30 23:48:32 -04:00
# If flow view isn't ready yet, try again after a short delay
2025-07-28 08:14:15 -04:00
self.set_timer(0.1, self._set_initial_focus)
def _initialize_history(self):
"""Initialize metrics history arrays"""
current_time = time.time()
for _ in range(self.max_history):
self.packets_history.append(0)
self.bytes_history.append(0)
self.flows_history.append(0)
def update_metrics(self) -> None:
"""Update real-time metrics and sparklines"""
if self.paused:
return
# Get current metrics
summary = self.analyzer.get_summary()
self.total_flows = summary.get('unique_flows', 0)
self.total_packets = summary.get('total_packets', 0)
# Calculate rates (simplified for now)
# In real implementation, track deltas over time
current_time = time.time()
if not hasattr(self, '_start_time'):
self._start_time = current_time
elapsed = max(1, current_time - self._start_time)
self.packets_per_sec = self.total_packets / elapsed
self.bytes_per_sec = summary.get('total_bytes', 0) / elapsed
2025-07-28 18:28:26 -04:00
# Count enhanced and outliers (thread-safe access)
enhanced = 0
outliers = 0
2025-07-28 18:28:26 -04:00
try:
# Use background analyzer's thread-safe flow access
flows = self.background_analyzer.get_current_flows()
for flow in flows.values():
if flow.enhanced_analysis.decoder_type != "Standard":
enhanced += 1
2025-07-30 23:48:32 -04:00
# Use frame-type-specific outliers instead of flow-level outliers
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
2025-07-28 18:28:26 -04:00
except Exception:
# Fallback to direct access if background analyzer not available
for flow in self.analyzer.flows.values():
if flow.enhanced_analysis.decoder_type != "Standard":
enhanced += 1
2025-07-30 23:48:32 -04:00
# Use frame-type-specific outliers instead of flow-level outliers
outliers += sum(len(ft_stats.outlier_frames) for ft_stats in flow.frame_types.values())
2025-07-28 18:28:26 -04:00
self.enhanced_flows = enhanced
self.outlier_count = outliers
# Update metric cards
self._update_metric_cards()
# Update sparklines (removed - no longer in left panel)
# self._update_sparklines()
def _update_metric_cards(self):
"""Update the metric card displays"""
# Update flows metric
flows_card = self.query_one("#flows-metric", MetricCard)
flows_card.update_value(f"{self.total_flows}")
# Update packets/s with color coding
packets_card = self.query_one("#packets-metric", MetricCard)
packets_card.update_value(f"{self.packets_per_sec:.1f}")
if self.packets_per_sec > 10000:
packets_card.color = "warning"
elif self.packets_per_sec > 50000:
packets_card.color = "error"
else:
packets_card.color = "success"
# Update volume/s
volume_card = self.query_one("#volume-metric", MetricCard)
volume_card.update_value(self._format_bytes_per_sec(self.bytes_per_sec))
# Update enhanced flows
enhanced_card = self.query_one("#enhanced-metric", MetricCard)
enhanced_card.update_value(f"{self.enhanced_flows}")
# Update outliers with color
outliers_card = self.query_one("#outliers-metric", MetricCard)
outliers_card.update_value(f"{self.outlier_count}")
if self.outlier_count > 100:
outliers_card.color = "error"
elif self.outlier_count > 10:
outliers_card.color = "warning"
else:
outliers_card.color = "normal"
def _update_sparklines(self):
"""Update sparkline charts with latest data"""
# Add new data points
self.packets_history.append(self.packets_per_sec)
self.bytes_history.append(self.bytes_per_sec)
self.flows_history.append(self.total_flows)
# Keep only recent history
if len(self.packets_history) > self.max_history:
self.packets_history.pop(0)
self.bytes_history.pop(0)
self.flows_history.pop(0)
# Update sparkline widgets
flow_spark = self.query_one("#flow-rate-spark", SparklineWidget)
flow_spark.update_data(self.flows_history)
packet_spark = self.query_one("#packet-rate-spark", SparklineWidget)
packet_spark.update_data(self.packets_history)
def update_flows(self) -> None:
"""Update flow table data"""
if self.paused:
return
2025-07-30 23:48:32 -04:00
# Update filtered flow view
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
flow_view.refresh_frame_types()
flow_view.refresh_flow_data()
def _on_progress_update(self, progress):
"""Handle progress updates from background parser"""
try:
# Use call_from_thread to safely update UI from background thread
self.call_from_thread(self._update_progress_ui, progress)
except Exception:
# Ignore errors during shutdown
pass
2025-07-30 23:48:32 -04:00
def _update_progress_ui(self, progress):
"""Update progress UI (called from main thread)"""
try:
progress_bar = self.query_one("#progress-bar", ParsingProgressBar)
if progress.error:
progress_bar.show_error(progress.error)
elif progress.is_complete:
progress_bar.complete_parsing()
# Trigger frame type button creation now that parsing is complete
self._create_frame_type_buttons()
else:
# Start progress if this is the first update
if not progress_bar.is_visible and progress.total_packets > 0:
progress_bar.start_parsing(progress.total_packets)
# Update progress
progress_bar.update_progress(
progress.processed_packets,
progress.total_packets,
progress.packets_per_second,
progress.estimated_time_remaining
)
except Exception as e:
# Progress bar widget may not be available yet
pass
2025-07-28 18:28:26 -04:00
def _on_flow_update(self):
"""Handle flow data updates from background parser"""
try:
# Use call_from_thread to safely update UI from background thread
self.call_from_thread(self._update_flow_ui)
except Exception:
# Ignore errors during shutdown
pass
def _update_flow_ui(self):
"""Update flow UI (called from main thread)"""
try:
2025-07-30 23:48:32 -04:00
# Update filtered flow view - frame types first for dynamic button creation
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
flow_view.refresh_frame_types() # This will create buttons as frame types are detected
flow_view.refresh_flow_data()
2025-07-28 18:28:26 -04:00
2025-07-30 23:48:32 -04:00
# Also trigger button creation if parsing is complete but buttons haven't been created yet
if not self.analyzer.is_parsing and not getattr(flow_view, '_buttons_created', False):
self._create_frame_type_buttons()
2025-07-28 18:28:26 -04:00
# Also update metrics in real-time
self.update_metrics()
except Exception:
2025-07-30 23:48:32 -04:00
# Flow view widget may not be available yet
pass
def _create_frame_type_buttons(self):
"""Create frame type buttons now that parsing is complete"""
try:
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
# Force refresh of frame types now that parsing is complete
flow_view.refresh_frame_types()
flow_view.refresh_flow_data()
except Exception as e:
# Flow view widget may not be available yet
2025-07-28 18:28:26 -04:00
pass
def start_background_parsing(self, pcap_file: str):
"""Start parsing PCAP file in background"""
self.pcap_file = pcap_file
# Start background parsing
self.background_analyzer.start_parsing(pcap_file)
def stop_background_parsing(self):
"""Stop background parsing"""
self.background_analyzer.stop_parsing()
def cleanup(self):
"""Cleanup resources when app shuts down"""
try:
self.background_analyzer.cleanup()
# Cancel any pending timers
if self.metric_timer:
self.metric_timer.stop()
if self.flow_timer:
self.flow_timer.stop()
except Exception as e:
# Don't let cleanup errors prevent shutdown
pass
def on_enhanced_flow_table_flow_selected(self, event: EnhancedFlowTable.FlowSelected) -> None:
"""Handle flow selection events"""
2025-07-28 11:06:10 -04:00
try:
debug_panel = self.query_one("#debug-panel", DebugPanel)
flow_info = f"{event.flow.src_ip}:{event.flow.src_port}" if event.flow else "None"
debug_panel.add_debug_message(f"APP: Flow selected - {flow_info}, subflow={event.subflow_type}")
except:
pass # Debug panel not visible
if event.flow:
# Update main flow details panel
main_panel = self.query_one("#main-flow-details", FlowMainDetailsPanel)
main_panel.update_flow(event.flow)
# Update sub-flow details panel
sub_panel = self.query_one("#sub-flow-details", SubFlowDetailsPanel)
sub_panel.update_flow(event.flow, event.subflow_type)
def _format_bytes_per_sec(self, bps: float) -> str:
"""Format bytes per second with appropriate units"""
if bps >= 1_000_000_000:
return f"{bps / 1_000_000_000:.1f} GB/s"
elif bps >= 1_000_000:
return f"{bps / 1_000_000:.1f} MB/s"
elif bps >= 1_000:
return f"{bps / 1_000:.1f} KB/s"
else:
return f"{bps:.0f} B/s"
def action_toggle_pause(self) -> None:
"""Toggle pause state"""
self.paused = not self.paused
status = "PAUSED" if self.paused else "LIVE"
2025-07-28 18:28:26 -04:00
2025-07-30 23:48:32 -04:00
# Update subtitle
self.sub_title = f"Network Flow Analysis - {status}"
def action_select_filter(self, number: str) -> None:
"""Select frame type filter by number key"""
2025-07-28 18:28:26 -04:00
try:
2025-07-30 23:48:32 -04:00
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
flow_view.action_select_filter(number)
except Exception:
pass
2025-07-30 23:48:32 -04:00
def action_sort_table_column(self, column_index: int) -> None:
"""Sort table by column index"""
try:
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
flow_view.action_sort_column(column_index)
except Exception:
pass
def action_show_details(self) -> None:
"""Show detailed view for selected flow"""
# TODO: Implement detailed flow modal
2025-07-28 08:14:15 -04:00
pass
2025-07-28 18:28:26 -04:00
def action_toggle_view_mode(self) -> None:
2025-07-30 23:48:32 -04:00
"""Toggle between different display modes"""
# For now, this could cycle through different column layouts
# or show more/less detail in the frame type views
pass
2025-07-28 18:28:26 -04:00
2025-07-28 08:14:15 -04:00
def on_mouse_down(self, event: MouseDown) -> None:
"""Prevent default mouse down behavior to disable mouse interaction."""
event.prevent_default()
def on_mouse_move(self, event: MouseMove) -> None:
"""Prevent default mouse move behavior to disable mouse interaction."""
2025-07-28 18:28:26 -04:00
event.prevent_default()
2025-07-30 23:48:32 -04:00
def action_generate_report(self) -> None:
"""Generate comprehensive flow analysis report"""
try:
# Generate timestamp-based filename
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
output_file = f"streamlens_flow_report_{timestamp}.md"
# Create report generator
report_generator = FlowReportGenerator(self.analyzer)
# Generate report (markdown format)
report_content = report_generator.generate_report(output_file, "markdown")
# Show success notification in the footer
self.sub_title = f"✅ Report generated: {output_file}"
# Set a timer to restore the original subtitle
self.set_timer(3.0, self._restore_subtitle)
except Exception as e:
# Show error notification
self.sub_title = f"❌ Report generation failed: {str(e)}"
self.set_timer(3.0, self._restore_subtitle)
def _restore_subtitle(self) -> None:
"""Restore the original subtitle"""
status = "PAUSED" if self.paused else "LIVE"
self.sub_title = f"Network Flow Analysis - {status}"
def action_copy_outliers(self) -> None:
"""Copy outlier frame information to clipboard"""
try:
# Get selected flow from the filtered view
flow_view = self.query_one("#filtered-flow-view", FilteredFlowView)
# For now, get the first flow (could be improved to use actual selection)
flows = list(self.analyzer.flows.values())
selected_flow = flows[0] if flows else None
if not selected_flow:
self.sub_title = "⚠️ No flow selected"
self.set_timer(2.0, self._restore_subtitle)
return
# Build frame-type-specific outlier information
outlier_info = []
outlier_info.append(f"Flow: {selected_flow.src_ip}:{selected_flow.src_port}{selected_flow.dst_ip}:{selected_flow.dst_port}")
outlier_info.append(f"Protocol: {selected_flow.transport_protocol}")
outlier_info.append(f"Total Packets: {selected_flow.frame_count}")
# Calculate total frame-type-specific outliers
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
outlier_info.append(f"Total Frame-Type Outliers: {total_frame_type_outliers}")
if total_frame_type_outliers > 0:
outlier_info.append(f"\n=== Frame Type Outlier Analysis ===")
# Show outliers per frame type
for frame_type, ft_stats in sorted(selected_flow.frame_types.items(), key=lambda x: len(x[1].outlier_frames), reverse=True):
if ft_stats.outlier_frames:
outlier_info.append(f"\n{frame_type}: {len(ft_stats.outlier_frames)} outliers")
outlier_info.append(f" Frames: {', '.join(map(str, sorted(ft_stats.outlier_frames)))}")
outlier_info.append(f" Avg ΔT: {ft_stats.avg_inter_arrival * 1000:.3f} ms")
outlier_info.append(f" Std σ: {ft_stats.std_inter_arrival * 1000:.3f} ms")
outlier_info.append(f" 3σ Threshold: {(ft_stats.avg_inter_arrival + 3 * ft_stats.std_inter_arrival) * 1000:.3f} ms")
# Show enhanced outlier information for this frame type
if hasattr(ft_stats, 'enhanced_outlier_details') and ft_stats.enhanced_outlier_details:
outlier_info.append(f" Enhanced Outlier Details:")
for frame_num, prev_frame_num, inter_time in sorted(ft_stats.enhanced_outlier_details[:5]):
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
outlier_info.append(f" Frame {frame_num} (from {prev_frame_num}): {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
if len(ft_stats.enhanced_outlier_details) > 5:
outlier_info.append(f" ... and {len(ft_stats.enhanced_outlier_details) - 5} more")
elif ft_stats.outlier_details:
outlier_info.append(f" Outlier Details:")
for frame_num, inter_time in sorted(ft_stats.outlier_details[:5]):
deviation = (inter_time - ft_stats.avg_inter_arrival) / ft_stats.std_inter_arrival if ft_stats.std_inter_arrival > 0 else 0
outlier_info.append(f" Frame {frame_num}: {inter_time * 1000:.3f} ms ({deviation:.1f}σ)")
if len(ft_stats.outlier_details) > 5:
outlier_info.append(f" ... and {len(ft_stats.outlier_details) - 5} more")
else:
outlier_info.append("\nNo frame-type-specific timing outliers detected.")
# Copy to clipboard
clipboard_text = "\n".join(outlier_info)
self._copy_to_clipboard(clipboard_text)
# Show success notification
total_frame_type_outliers = sum(len(ft_stats.outlier_frames) for ft_stats in selected_flow.frame_types.values())
self.sub_title = f"✅ Copied {total_frame_type_outliers} frame-type outliers to clipboard"
self.set_timer(2.0, self._restore_subtitle)
except Exception as e:
self.sub_title = f"❌ Failed to copy: {str(e)}"
self.set_timer(2.0, self._restore_subtitle)
def _copy_to_clipboard(self, text: str) -> None:
"""Copy text to system clipboard"""
system = platform.system()
if system == "Darwin": # macOS
process = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
process.communicate(text.encode('utf-8'))
elif system == "Linux":
# Try xclip first, then xsel
try:
process = subprocess.Popen(['xclip', '-selection', 'clipboard'], stdin=subprocess.PIPE)
process.communicate(text.encode('utf-8'))
except FileNotFoundError:
try:
process = subprocess.Popen(['xsel', '--clipboard', '--input'], stdin=subprocess.PIPE)
process.communicate(text.encode('utf-8'))
except FileNotFoundError:
raise Exception("Neither xclip nor xsel found. Please install one.")
elif system == "Windows":
process = subprocess.Popen(['clip'], stdin=subprocess.PIPE, shell=True)
process.communicate(text.encode('utf-8'))
else:
raise Exception(f"Unsupported platform: {system}")
2025-07-28 18:28:26 -04:00
def action_quit(self) -> None:
"""Quit the application with proper cleanup"""
self.cleanup()
self.exit()
def on_unmount(self) -> None:
"""Called when app is being unmounted - ensure cleanup"""
2025-07-30 23:48:32 -04:00
self.cleanup()
# Debugging methods
def start_debugging(self, web_interface: bool = True, port: int = 8080):
"""Start debugging tools"""
if not DEBUGGING_AVAILABLE:
print("❌ Debugging tools not available. Run: pip install watchdog")
return
self._debug_monitor = TextualStateMonitor(self)
self._debug_monitor.start_monitoring()
if web_interface:
self._debug_server = TextualStateWebServer(self._debug_monitor, port)
self._debug_server.start()
print(f"🔍 Debug monitoring started!")
if web_interface:
print(f"🌐 Web interface: http://localhost:{port}")
def stop_debugging(self):
"""Stop debugging tools"""
if hasattr(self, '_debug_monitor') and self._debug_monitor:
self._debug_monitor.stop_monitoring()
if hasattr(self, '_debug_server') and self._debug_server:
self._debug_server.stop()
def debug_widget_tree(self):
"""Print current widget tree to console"""
if not DEBUGGING_AVAILABLE:
print("❌ Debugging tools not available")
return
data = inspect_textual_app(self)
print("🔍 TEXTUAL APP INSPECTION")
print("=" * 50)
print_widget_tree(data.get('current_screen', {}))
def debug_focused_widget(self):
"""Print info about currently focused widget"""
focused = self.focused
if focused:
print(f"🎯 Focused widget: {focused.__class__.__name__}")
if hasattr(focused, 'id'):
print(f" ID: {focused.id}")
if hasattr(focused, 'classes'):
print(f" Classes: {list(focused.classes)}")
if hasattr(focused, 'label'):
print(f" Label: {focused.label}")
else:
print("🎯 No widget has focus")
# Debugging key bindings
def action_debug_tree(self):
"""Debug action: Print widget tree"""
self.debug_widget_tree()
def action_debug_focus(self):
"""Debug action: Print focused widget"""
self.debug_focused_widget()
def action_start_web_debug(self):
"""Debug action: Start web debugging interface"""
self.start_debugging()