#! /bin/python3 import struct import zlib import sys import traceback import math from bones import * #Ver 0 .geo pre-header: #Offset Size Description #0 4 'ziplen' ? Compressed header data size + 4 #4 4 Header size (Must be non-zero for old formats.) #8 variable zlib Compressed header data #variable 4 Dummy data? #Version 2+ .geo pre-header: #Offset Size Description #0 4 'ziplen' ? Compressed header data size + 12 #4 4 Legacy header size (Must be 0 for new file formats.) #8 4 Version number 2 to 5, and 7 to 8 #12 4 Header size #16 variable zlib Compressed header data #.geo header #Offset Size Description #0 4 i32: gld->datasize #4 4 i32: texname_blocksize #8 4 i32: objname_blocksize #12 4 i32: texidx_blocksize #16 4 i32: lodinfo_blocksize (only present in version 2 to 6) #? texname_blocksize PackNames block of texture names #? objname_blocksize objnames #? texidx_blocksize texidx #? lodinfo_blocksize lodinfo (only present in version 2 to 6) #? ModelHeader ??? #PackNames block: #Size Description #4 name_count #4*name_count offset of the name, relative to the start of name_block #variable name_block (ASCIIZ?) #ModelHeader: #Size Description #124 ASCIIZ name. #4 Model pointer (0 in file?) #4 Float track length (technically a fallback if not specified elsewhere?) #4 Pointer to an array of Model pointers (-1 in file?) #*note: pointers aren't really needed. #Model: #version 0 to 2: (struct ModelFormatOnDisk_v2: "Common/seq/anim.c" ) #Offset Size Description #0 4 u32: Flags #4 4 f32: radius #8 4 ptr: VBO? #12 4 i32: tex_count #16 2 i16: id ("I am this bone" ?) #18 1 u8: blend_mode #19 1 u8: loadstate #20 4 ptr(BoneInfo) #24 4 ptr(TrickNode) #28 4 i32: vert_count #32 4 i32: tri_count #36 4 ptr: TexID #40 32 PolyGrid: polygrid #72 4 ptr(CTri): ctris ? #76 4 ptr(i32): tags #80 4 ptr(char): name #84 4 ptr(AltPivotInfo): api #88 4 ptr(ModelExtra): extra #92 12 Vec3: scale #104 12 Vec3: min #116 12 Vec3: max #120 4 ptr(GeoLoadData): gld #124 84 PackBlockOnDisk #208 #version 3+: "Common/seq/anim.c" # Offset Size Description # 0 4 i32: size # 4 4 f32: radius # 8 4 i32: tex_count # 12 4 ptr(BoneInfo): boneinfo ? # 16 4 i32: vert_count # 20 4 i32: tri_count #ver 8+ 24 4 i32: reflection_quad_count # +4 4 i32: tex_idx # +4 32 PolyGrid: grid # +32 4 ptr(char): name # +4 4 ptr(AltPivotInfo): # +4 12 Vec3: scale # +12 12 Vec3: min # +12 12 Vec3: max # +12 12 PackData: pack.tris # +12 12 PackData: pack.verts # +12 12 PackData: pack.norms # +12 12 PackData: pack.sts # +12 12 PackData: pack.sts3 # +12 12 PackData: pack.weights # +12 12 PackData: pack.matidxs # +12 12 PackData: pack.grid #Ver 4 +12 12 PackData: pack.lmap_utransforms #Ver 4 +12 12 PackData: pack.lmap_vtransforms #Ver 7+ +12 12 PackData: pack.reductions #Ver 8+ +12 12 PackData: pack.reflection_quads #Ver 7+ +12 12 f32[3]: autolod_dists # +12 2 i16: id #PackBlockOnDisk: "Common/seq/anim.c" #Offset Size Description #0 12 PackData: tris #12 12 PackData: verts #24 12 PackData: norms #36 12 PackData: sts #48 12 PackData: weights #60 12 PackData: matidxs #72 12 PackData: grid #84 #PackData: #Offset Size Description #0 4 i32: packsize, The compressed size of this data block. 0 if this is uncompressed. #4 4 u32: unpacksize, The size of this data block when uncompressed. #8 4 ptr(u8): data, The offset of this block of data inside the .geo's main data block. #12 #struct PolyGrid: "libs/UtilitiesLib/components/gridpoly.h" #Offset Size Description #0 4 ptr(PolyCell): cell #4 12 Vec3: pos #16 4 f32: size #20 4 f32: inv_size #24 4 i32: tag #28 4 i32: num_bits #32 #PolyCell: #Offset Size Description #0 4 ptr(ptr(PolyCell)): children #4 4 ptr(u16): tri_idxs, Triangle indexes. #8 4 i32: tri_count #BoneInfo #Offset Size Description #0 4 i32: numbones #4 4*15 i32[15]: bone_ID, Bones used by this geometry. #64 #Reductions #Size Description #4 int: num_reductions #num_reductions*4 int[]:num_tris_left #num_reductions*4 int[]:error_values #num_reductions*4 int[]:remap_counts #num_reductions*4 int[]:changes_counts #4 int: total_remaps #total_remaps*4 int[]:remaps #4 int: total_remap_tris #total_remap_tris*4 int[]:remap_tris #4 int: total_changes #total_changes*4 int[]:changes #4 int: positions_delta_length #positions_delta_length byte[]: compressDelta(positions, 3, total_changes, "f", 0x8000) #4 int: total_changes_delta_length #total_changes_delta_length byte[]: compressDelta(changes, 2, total_changes, "f", 0x1000) ZERO_BYTE = struct.pack("B", 0) #def unbyte(v): # return struct.unpack("B", v)[0] if sys.version_info[0] < 3: byte = chr unbyte = ord else: def byte(v): #return struct.pack("B", v) return bytes((v,)) def unbyte(v): return v def unpackNames(data): """Extract strings from data, as layed out as a PackNames structure.""" (length, ) = struct.unpack("= length: return string[0 : length - 1] + ZERO_BYTE else: return string + ZERO_BYTE * (length - len(string)) def uncompressDeltas(src, stride, count, pack_type): """Expand a list a delta compressed with deltas src: Source data containg 3 parts: - delta codes: Contain 'count' * 'stride' 2 bit size code fields. The end of the block is padded to the byte boundary. - float scale: A byte with the exponent for scaling floating point deltas. This part is present with integer types, but unused. - byte data: This is read and processed according to the codes specified in the delta codes block. stride: the number of channels in the data (1 to 3) count: the number off elements in the data pack_type: Is a character indicating the type of packing. Valid values are "f" (float32), "H" (unsigned16), "I" (unsigned32) (these match the struct modules types). The returned data is an array of arrays, with the inner most arrays being 'stride' long, and the outer array being 'count' in length. """ if len(src) <= 0: return None #Compute offset to the byte data after all the bit fields. byte_offset = int((2 * count * stride + 7) / 8) #print("stride: %d count: %d pack_type: %s" % (stride, count, pack_type)) #print("src:%s" % ([src],)) float_scale = float(1 << unbyte(src[byte_offset])) float_scale_inv = 1.0 / float_scale byte_offset += 1 current_bit = 0 if pack_type in "f": fLast = [0.0] * stride else: iLast = [0] * stride out_data = [] for i in range(count): row_data = [] for j in range(stride): code = (unbyte(src[current_bit >> 3]) >> (current_bit & 0x7)) & 0x3 current_bit += 2 if code == 0: iDelta = 0 elif code == 1: iDelta = unbyte(src[byte_offset]) - 0x7f byte_offset += 1 elif code == 2: iDelta = ( unbyte(src[byte_offset]) | (unbyte(src[byte_offset + 1]) << 8) ) - 0x7fff byte_offset += 2 elif code == 3: (iDelta, ) = struct.unpack("= len(starts): output.append( (0, 0, o) ) continue output.append( (0, starts[i] - o, o) ) return tuple(output) class PolyCell: def __init__(self, model): self.model = model self.children = None self.tri_idxs = [] self.tri_count = 0 def decode(self, data, offset): (children_offset, tri_idxs_offset, self.tri_count) = struct.unpack(" len(self.data): self.data += ZERO_BYTE * (self.offset - len(self.data)) self.data += data self.offset = len(self.data) elif self.offset + len(data) >= len(self.data): self.data = self.data[0 : self.offset] + data else: self.data = self.data[0 : self.offset] + data + self.data [self.offset + len(data) : ] def truncate(self, offset = None): if offset is None: offset = self.offset self.data[0 : offset] class Reductions: def __init__(self, model): self.model = model def decode(self, data): (self.num_reductions, ) = data.decode("= 8: (self.reflection_quad_count, ) = self.geo.getHeaderElement("= 7: self.pack_reductions = self.geo.getHeaderElement("= 8: self.pack_reflection_quads = self.geo.getHeaderElement("= 7: self.autolod_dists = list(self.geo.getHeaderElement(" 2: #.geos only support 2 weights per vertex. #todo: get only largest weights? w = w[0 : 2] wb = wb[0 : 2] if len(w) == 1 or wb[0] == wb[1]: self.weights_data += byte(255) self.matidxs_data += byte(bone_lookup[wb[0]] * 3) + ZERO_BYTE continue if w[0] + w[1] == 0: w[0] = 0.5 w[1] = 0.5 w[0] = w[0] / float(w[0] + w[1]) if w[0] < 0: w[0] = 0.0 elif w[0] > 1: w[0] = 1.0 #print("weights: %s -> %s" % (self.weights[i], w)) self.weights_data += byte(int(math.floor(255 * w[0] + 0.5))) self.matidxs_data += byte(bone_lookup[wb[0]] * 3) +byte(bone_lookup[wb[1]] * 3) self.bone_count = len(self.bone_ids) self.bone_ids += [0] * (15 - self.bone_count) def encode(self): #Regenerate dynamic data #todo: build PolyGrid if self.geo.version >= 7: #todo: build reductions if self.reductions is not None: self.reductions_data = self.reductions.encode() else: self.reductions_data = b"" pass self.rebuildWeightsAndBones() #Encode data into the main block. #note: PackData should go first, otherwise other ptr types might point to the start of the data block, which would result in a 0 point, which is treated as not present. self.pack_tris = self.geo.encodeMainDataPackedDeltas(self.tris, 3, len(self.tris), "I", 1) self.pack_verts = self.geo.encodeMainDataPackedDeltas(self.verts, 3, len(self.verts), "f", 0x8000) self.pack_norms = self.geo.encodeMainDataPackedDeltas(self.norms, 3, len(self.norms), "f", 0x100) self.pack_sts = self.geo.encodeMainDataPackedDeltas(self.sts, 2, len(self.verts), "f", 0x1000) self.pack_sts3 = self.geo.encodeMainDataPackedDeltas(self.sts3, 2, len(self.verts), "f", 0x8000) self.pack_weights = self.geo.encodeMainDataPacked(self.weights_data) self.pack_matidxs = self.geo.encodeMainDataPacked(self.matidxs_data) self.pack_grid = self.geo.encodeMainDataPacked(self.grid_data) self.pack_reductions = self.geo.encodeMainDataPacked(self.reductions_data) self.pack_reflection_quads = self.geo.encodeMainDataPacked(self.reflection_quads_data) bone_data = struct.pack("<" + "i" * (1 + 15), self.bone_count, *self.bone_ids) self.boneinfo_ptr = self.geo.encodeMainData(bone_data) #Encode shared header data print("unhandled: model.texidx_ptr !") self.texidx_ptr = 0 self.name_ptr = len(self.geo.header_objname_data) self.geo.header_objname_data += self.name + ZERO_BYTE self.geo.header_objnames.append(self.name) print("unhandled: model.api_ptr !") self.api_ptr = 0 #Encode the header self.header_data = b"" self.header_data += struct.pack("= 8: if self.reflection_quads is None or len(self.reflection_quads): self.reflection_quads_count = 0 else: self.reflection_quads_count = len(self.reflection_quads) self.header_data += struct.pack("= 7: self.header_data += struct.pack("= 8: self.header_data += struct.pack("= 2 and self.version <= 6: (self.lodinfo_blocksize, ) = struct.unpack("= 2 and self.version <= 6: self.header_data += struct.pack("= 2 and self.version <= 6: self.header_data += self.lodinfo_data #Encode the main model header. self.header_data += storeString(self.header_modelheader_name, 124) self.header_data += struct.pack("= len(data): self.main_data += data return (0, len(data), o) else: self.main_data += d return (len(d), len(data), o) def encodeMainDataPackedDeltas(self, data, stride, count, pack_type, float_scale): if data is None: return (0, 0, 0) deltas = compressDeltas(data, stride, count, pack_type, float_scale) pack = self.encodeMainDataPacked(deltas) return pack def dump(self): print("version: %d" % self.version) print("header_size: expected: %d actual: %d" % (self.header_size, len(self.header_data))) #print("header_data: %s" % [self.header_data]) print("main_data_size: %d" % self.main_data_size) print("header_data sizes: texname: %d objname: %d texidx: %d lodinfo: %d" % (self.texname_blocksize, self.objname_blocksize, self.texidx_blocksize, self.lodinfo_blocksize)) print("header_texname_data: %s" % [self.header_texname_data]) print("header_texnames: %s" % [self.header_texnames]) print("header_objname_data: %s" % [self.header_objname_data]) print("header_objnames: %s" % [self.header_objnames]) print("header_texidx_data: %s" % [self.header_texidx_data]) print("header_lodinfo_data: %s" % [self.header_lodinfo_data]) print("header_modelheader_name: %s" % [self.header_modelheader_name]) print("header_modelheader_tracklength: %s" % self.header_modelheader_tracklength) print("header_modelheader_modelcount: %s" % self.header_modelheader_modelcount) print("header_modelheader_data: %s" % [self.header_modelheader_data[124:]]) print("header remaining: %d" % ((len(self.header_data) - self.header_offset), )) print("header remaining: %s" % ([self.header_data[self.header_offset:]], )) #%d objname: %d texidx: %d lodinfo: %d" % (self.texname_blocksize, self.objname_blocksize, self.texidx_blocksize, self.lodinfo_blocksize) for i in range(len(self.models)): print("Model %d:" % (i, )) self.models[i].dump() def seekMainData(self, offset): self.main_data_offset = offset def skipMainData(self, skip): if type(skip) is int: self.main_data_offset += skip else: self.main_data_offset += struct.calcsize(skip) def getMainElement(self, fmt): size = struct.calcsize(fmt) data = struct.unpack(fmt, self.main_data[self.main_data_offset : self.main_data_offset + size]) self.main_data_offset += size return data def getElement(self, offset, fmt): size = struct.calcsize(fmt) data = struct.unpack(fmt, self.main_data[offset : offset + size]) return data if __name__ == "__main__": if len(sys.argv) <= 1: print("Usage:") print(" %s []") print("Test loads a .geo file, dumps its content, and optionally writes its content out.") exit(0) fh = open(sys.argv[1], "rb") print(sys.argv) if len(sys.argv) <= 2: fho = None else: fho = open(sys.argv[2], "wb") geo = Geo() geo.loadFromFile(fh) if fho is not None: #geo.dump() geo.saveToFile(fho) else: geo.dump() #print("%s" % [geo.header_data])