pretty good
This commit is contained in:
162
analyzer/protocols/decoders/base.py
Normal file
162
analyzer/protocols/decoders/base.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""
|
||||
Base classes for Chapter 10 data type decoders
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from dataclasses import dataclass
|
||||
import struct
|
||||
|
||||
|
||||
@dataclass
|
||||
class DecodedPayload:
|
||||
"""Container for decoded Chapter 10 payload data"""
|
||||
data_type: int
|
||||
data_type_name: str
|
||||
format_version: int
|
||||
decoded_data: Dict[str, Any]
|
||||
raw_payload: bytes
|
||||
errors: List[str]
|
||||
metadata: Dict[str, Any]
|
||||
|
||||
def __post_init__(self):
|
||||
if self.errors is None:
|
||||
self.errors = []
|
||||
if self.metadata is None:
|
||||
self.metadata = {}
|
||||
|
||||
|
||||
class DataTypeDecoder(ABC):
|
||||
"""Abstract base class for Chapter 10 data type decoders"""
|
||||
|
||||
def __init__(self):
|
||||
self.supported_formats: List[int] = []
|
||||
self.data_type_base: int = 0x00
|
||||
self.data_type_name: str = "Unknown"
|
||||
|
||||
@abstractmethod
|
||||
def can_decode(self, data_type: int) -> bool:
|
||||
"""Check if this decoder can handle the given data type"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def decode(self, payload: bytes, ch10_header: Dict[str, Any]) -> Optional[DecodedPayload]:
|
||||
"""Decode the payload data"""
|
||||
pass
|
||||
|
||||
def get_data_type_name(self, data_type: int) -> str:
|
||||
"""Get human-readable name for data type"""
|
||||
return f"{self.data_type_name} Format {data_type & 0x0F}"
|
||||
|
||||
def _parse_intra_packet_header(self, payload: bytes, offset: int = 0) -> Optional[Dict[str, Any]]:
|
||||
"""Parse common intra-packet header (IPH)"""
|
||||
if len(payload) < offset + 8:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Standard IPH format
|
||||
iph_time = struct.unpack('<I', payload[offset:offset+4])[0]
|
||||
reserved = struct.unpack('<H', payload[offset+4:offset+6])[0]
|
||||
iph_length = struct.unpack('<H', payload[offset+6:offset+8])[0]
|
||||
|
||||
return {
|
||||
'iph_time': iph_time,
|
||||
'reserved': reserved,
|
||||
'iph_length': iph_length,
|
||||
'data_start': offset + 8
|
||||
}
|
||||
except struct.error:
|
||||
return None
|
||||
|
||||
def _safe_unpack(self, format_str: str, data: bytes, offset: int = 0) -> Optional[tuple]:
|
||||
"""Safely unpack binary data with error handling"""
|
||||
try:
|
||||
size = struct.calcsize(format_str)
|
||||
if len(data) < offset + size:
|
||||
return None
|
||||
return struct.unpack(format_str, data[offset:offset+size])
|
||||
except struct.error:
|
||||
return None
|
||||
|
||||
|
||||
class ContainerDecoder(DataTypeDecoder):
|
||||
"""Decoder for containerized data formats"""
|
||||
|
||||
def decode_container(self, payload: bytes, ch10_header: Dict[str, Any]) -> List[DecodedPayload]:
|
||||
"""Decode multiple embedded packets from container"""
|
||||
decoded_packets = []
|
||||
offset = 0
|
||||
|
||||
while offset < len(payload):
|
||||
# Look for embedded CH-10 sync pattern
|
||||
sync_offset = self._find_sync_pattern(payload, offset)
|
||||
if sync_offset is None:
|
||||
break
|
||||
|
||||
# Parse embedded header
|
||||
embedded_header = self._parse_embedded_header(payload, sync_offset)
|
||||
if not embedded_header:
|
||||
break
|
||||
|
||||
# Extract embedded payload
|
||||
embedded_payload = self._extract_embedded_payload(payload, sync_offset, embedded_header)
|
||||
if embedded_payload:
|
||||
# Recursively decode embedded packet
|
||||
decoded = self.decode(embedded_payload, embedded_header)
|
||||
if decoded:
|
||||
decoded_packets.append(decoded)
|
||||
|
||||
# Move to next packet
|
||||
offset = sync_offset + embedded_header.get('packet_length', 24)
|
||||
|
||||
return decoded_packets
|
||||
|
||||
def _find_sync_pattern(self, data: bytes, start_offset: int = 0) -> Optional[int]:
|
||||
"""Find CH-10 sync pattern in data"""
|
||||
sync_pattern = 0xEB25
|
||||
for offset in range(start_offset, len(data) - 1):
|
||||
if offset + 2 <= len(data):
|
||||
word = struct.unpack('<H', data[offset:offset+2])[0]
|
||||
if word == sync_pattern:
|
||||
return offset
|
||||
return None
|
||||
|
||||
def _parse_embedded_header(self, data: bytes, offset: int) -> Optional[Dict[str, Any]]:
|
||||
"""Parse embedded CH-10 header"""
|
||||
if len(data) < offset + 24:
|
||||
return None
|
||||
|
||||
try:
|
||||
sync_pattern = struct.unpack('<H', data[offset:offset+2])[0]
|
||||
channel_id = struct.unpack('<H', data[offset+2:offset+4])[0]
|
||||
packet_length = struct.unpack('<I', data[offset+4:offset+8])[0]
|
||||
data_length = struct.unpack('<I', data[offset+8:offset+12])[0]
|
||||
data_type = struct.unpack('<H', data[offset+12:offset+14])[0]
|
||||
flags = struct.unpack('<H', data[offset+14:offset+16])[0]
|
||||
time_bytes = data[offset+16:offset+22]
|
||||
time_counter = int.from_bytes(time_bytes, 'little')
|
||||
sequence = struct.unpack('<H', data[offset+22:offset+24])[0]
|
||||
|
||||
return {
|
||||
'sync_pattern': sync_pattern,
|
||||
'channel_id': channel_id,
|
||||
'packet_length': packet_length,
|
||||
'data_length': data_length,
|
||||
'data_type': data_type,
|
||||
'packet_flags': flags,
|
||||
'relative_time_counter': time_counter,
|
||||
'sequence_number': sequence
|
||||
}
|
||||
except struct.error:
|
||||
return None
|
||||
|
||||
def _extract_embedded_payload(self, data: bytes, header_offset: int,
|
||||
header: Dict[str, Any]) -> Optional[bytes]:
|
||||
"""Extract payload from embedded packet"""
|
||||
payload_start = header_offset + 24
|
||||
payload_length = header.get('data_length', 0)
|
||||
|
||||
if payload_start + payload_length > len(data):
|
||||
return None
|
||||
|
||||
return data[payload_start:payload_start + payload_length]
|
||||
Reference in New Issue
Block a user