From 4ff3f92f1345e50c081176403d3016b4965c4131 Mon Sep 17 00:00:00 2001 From: Ernegien Date: Sun, 15 Oct 2023 20:42:15 -0500 Subject: [PATCH] Add data validation --- xbe/__init__.py | 52 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 2 deletions(-) diff --git a/xbe/__init__.py b/xbe/__init__.py index 7971bad..d9c222f 100644 --- a/xbe/__init__.py +++ b/xbe/__init__.py @@ -29,6 +29,7 @@ import logging import struct import time +import hashlib from enum import IntFlag from typing import ( @@ -543,7 +544,6 @@ class InitFlags(IntFlag): ] _enummap_ = {"init_flags": InitFlags} - class XbeImageHeaderExtendedType1(XbeImageHeader): """ XBE Extended Image Header structure (Type 1) @@ -781,10 +781,38 @@ def _init_from_data(self, data: bytes) -> None: assert self.header.image_header_size > ctypes.sizeof(XbeImageHeader) # FIXME: Validate magic - # FIXME: Validate signature/integrity log.debug("Image Header:\n%s", self.header.dumps(indent=2)) + # FIXME: alternate modulus values for debug/chihiro + retail_modulus = bytes([ + 0xd3,0xd7,0x4e,0xe5, 0x66,0x3d,0xd7,0xe6, 0xc2,0xd4,0xa3,0xa1, 0xf2,0x17,0x36,0xd4, + 0x2e,0x52,0xf6,0xd2, 0x02,0x10,0xf5,0x64, 0x9c,0x34,0x7b,0xff, 0xef,0x7f,0xc2,0xee, + 0xbd,0x05,0x8b,0xde, 0x79,0xb4,0x77,0x8e, 0x5b,0x8c,0x14,0x99, 0xe3,0xae,0xc6,0x73, + 0x72,0x73,0xb5,0xfb, 0x01,0x5b,0x58,0x46, 0x6d,0xfc,0x8a,0xd6, 0x95,0xda,0xed,0x1b, + 0x2e,0x2f,0xa2,0x29, 0xe1,0x3f,0xf1,0xb9, 0x5b,0x64,0x51,0x2e, 0xa2,0xc0,0xf7,0xba, + 0xb3,0x3e,0x8a,0x75, 0xff,0x06,0x92,0x5c, 0x07,0x26,0x75,0x79, 0x10,0x5d,0x47,0xbe, + 0xd1,0x6a,0x52,0x90, 0x0b,0xae,0x6a,0x0b, 0x33,0x44,0x93,0x5e, 0xf9,0x9d,0xfb,0x15, + 0xd9,0xa4,0x1c,0xcf, 0x6f,0xe4,0x71,0x94, 0xbe,0x13,0x00,0xa8, 0x52,0xca,0x07,0xbd, + 0x27,0x98,0x01,0xa1, 0x9e,0x4f,0xa3,0xed, 0x9f,0xa0,0xaa,0x73, 0xc4,0x71,0xf3,0xe9, + 0x4e,0x72,0x42,0x9c, 0xf0,0x39,0xce,0xbe, 0x03,0x76,0xfa,0x2b, 0x89,0x14,0x9a,0x81, + 0x16,0xc1,0x80,0x8c, 0x3e,0x6b,0xaa,0x05, 0xec,0x67,0x5a,0xcf, 0xa5,0x70,0xbd,0x60, + 0x0c,0xe8,0x37,0x9d, 0xeb,0xf4,0x52,0xea, 0x4e,0x60,0x9f,0xe4, 0x69,0xcf,0x52,0xdb, + 0x68,0xf5,0x11,0xcb, 0x57,0x8f,0x9d,0xa1, 0x38,0x0a,0x0c,0x47, 0x1b,0xb4,0x6c,0x5a, + 0x53,0x6e,0x26,0x98, 0xf1,0x88,0xae,0x7c, 0x96,0xbc,0xf6,0xbf, 0xb0,0x47,0x9a,0x8d, + 0xe4,0xb3,0xe2,0x98, 0x85,0x61,0xb1,0xca, 0x5f,0xf7,0x98,0x51, 0x2d,0x83,0x81,0x76, + 0x0c,0x88,0xba,0xd4, 0xc2,0xd5,0x3c,0x14, 0xc7,0x72,0xda,0x7e, 0xbd,0x1b,0x4b,0xa4 + ]) + + # Validate header signature self.header_data = data[0 : self.header.headers_size] + header_data_digest = calc_digest(self.header_data, 260, self.header.headers_size - 260) + key_val = int.from_bytes(retail_modulus, byteorder='little', signed=False) + sig_val = int.from_bytes(bytes(self.header.signature), byteorder='little', signed=False) + decrypted_sig_val = pow(sig_val, 65537, key_val) + decrypted_sig_bytes = decrypted_sig_val.to_bytes((decrypted_sig_val.bit_length() + 7) // 8, byteorder='little', signed=False) + self.header.signature_valid = decrypted_sig_bytes[:len(header_data_digest)] == header_data_digest[::-1] + if not self.header.signature_valid: + log.warning("Header signature invalid.") # Load debug pathname, filename self.pathname = self.get_cstring_from_offset( @@ -862,6 +890,11 @@ def _init_from_data(self, data: bytes) -> None: log.debug("Section %d: %s\n%s", i, sec_name, sec_hdr.dumps(indent=2)) sec_hdr_offset += ctypes.sizeof(XbeSectionHeader) + # Validate section data + sec_hdr.digest_valid = bytes(sec_hdr.digest) == calc_digest(sec_data, 0, len(sec_data)) + if not sec_hdr.digest_valid: + log.warning("Section digest invalid.") + # Load certificate cert_offset = self.vaddr_to_file_offset(self.header.certificate_addr) log.debug("Parsing image certificate at offset %#x", cert_offset) @@ -1508,3 +1541,18 @@ def encode_bmp(w: int, h: int, pixels: List["RGBA"]) -> bytes: ) return hdr + enc + +def calc_digest(data: bytes, offset: int, size: int) -> bytes: + sha1 = hashlib.sha1() + + # Convert the size to bytes + size_bytes = struct.pack('I', size) + + # Update the hash with the size bytes + sha1.update(size_bytes) + + # Update the hash with the actual data + sha1.update(data[offset:offset+size]) + + # Get the final SHA-1 hash + return sha1.digest()