diff --git a/.gitignore b/.gitignore index 2ae7d1c..701ed28 100644 --- a/.gitignore +++ b/.gitignore @@ -59,8 +59,8 @@ textures/* # vscode files .vscode/ -# We only want to store .mbin files, any exml files generated during testing should be ignored... -*.EXML +# We only want to store .mbin files, any mxml files generated during testing should be ignored... +*.MXML # pytest cache files .pytest_cache/ diff --git a/ModelExporter/Descriptor.py b/ModelExporter/Descriptor.py index 747c13e..49b183c 100644 --- a/ModelExporter/Descriptor.py +++ b/ModelExporter/Descriptor.py @@ -38,10 +38,10 @@ def __str__(self): def __len__(self): return len(self.children) - def to_exml(self): + def to_mxml(self): main_data = List() for child in self.children: - main_data.append(child.to_exml()) + main_data.append(child.to_mxml()) return TkModelDescriptorList(List=main_data) @@ -62,11 +62,11 @@ def __str__(self): out += str(child) return out - def to_exml(self): + def to_mxml(self): # converts this to a TkResourceDescriptorList object descriptors = List() for child in self.children: - descriptors.append(child.to_exml()) + descriptors.append(child.to_mxml()) return TkResourceDescriptorList(TypeId=self.TypeId, Descriptors=descriptors) @@ -96,7 +96,7 @@ def __str__(self): out += str(child) return out - def to_exml(self): + def to_mxml(self): # first, we need to do some processing on the name. # if the name starts with the prefix, then we need to sort it out so # that it is correct @@ -132,7 +132,7 @@ def not_proc(ob): refs = List() children = List() for child in self.children: - children.append(child.to_exml()) + children.append(child.to_mxml()) # Check how many children there are so we aren't making empty # TkModelDescriptorList's for nothing. if len(children) == 0: diff --git a/ModelExporter/addon_script.py b/ModelExporter/addon_script.py index d34e0b1..65c8aab 100644 --- a/ModelExporter/addon_script.py +++ b/ModelExporter/addon_script.py @@ -11,7 +11,7 @@ from idprop.types import IDPropertyGroup from mathutils import Matrix, Vector # Internal imports -from ModelExporter.utils import get_surr, calc_tangents +from ModelExporter.utils import calc_tangents from utils.misc import CompareMatrices, get_obj_name from utils.image_convert import convert_image from ModelExporter.animations import process_anims @@ -19,20 +19,37 @@ from ModelExporter.Descriptor import Descriptor from NMS.classes import (TkMaterialData, TkMaterialFlags, TkVolumeTriggerType, TkMaterialSampler, - TkTransformData, TkMaterialUniform, - TkRotationComponentData, TkPhysicsComponentData) + TkMaterialUniform, TkRotationComponentData, TkPhysicsComponentData) from NMS.classes import TkAnimationComponentData, TkAnimationData from NMS.classes import List, Vector4f from NMS.classes import TkAttachmentData -from NMS.classes import (Model, Mesh, Locator, Reference, Collision, Light, - Joint) +from NMS.classes.Object import Object, Model, Mesh, Locator, Reference, Collision, Light, Joint from NMS.LOOKUPS import MATERIALFLAGS from ModelExporter.ActionTriggerParser import ParseNodes +from serialization.NMS_Structures.Structures import TkTransformData + +import numpy as np ROT_X_MAT = Matrix.Rotation(radians(-90), 4, 'X') +def triangulate_mesh_new(mesh): + """ Triangule the provided mesh. + + Note + ---- + This will modify the original mesh. To avoid this you should ALWAYS pass in + a temporary mesh object and do manipulations on this. + """ + bm = bmesh.new() + bm.from_mesh(mesh) + bmesh.ops.triangulate(bm, faces=bm.faces, ngon_method='EAR_CLIP') + bm.to_mesh(mesh) + bm.free() + del bm + + def triangulate_mesh(mesh): """ Triangule the provided mesh. @@ -44,7 +61,6 @@ def triangulate_mesh(mesh): bm = bmesh.new() bm.from_mesh(mesh) data = bmesh.ops.triangulate(bm, faces=bm.faces, ngon_method='EAR_CLIP') - faces = data['faces'] face_mapping = data['face_map'] # This face mapping should be able to be used to map the new triangles back # to the original polygons so that we can group them. When grouping we need @@ -236,7 +252,7 @@ def __init__(self, output_directory, export_directory='CUSTOMMODELS', entitydata = ParseNodes() entity = TkAttachmentData(Components=List(entitydata)) entity.make_elements(main=True) - entity.tree.write('{}.ENTITY.exml'.format( + entity.tree.write('{}.ENTITY.mxml'.format( op.join(self.output_directory, self.export_dir, group_name, scene_name))) self.state = {'FINISHED'} @@ -555,6 +571,32 @@ def mesh_parser(self, ob, is_coll_mesh: bool = False): if uvcount < 1: raise Exception(f"Object {ob.name} missing UV map") + # is_triangulated = all([len(poly.vertices) == 3 for poly in data.polygons]) + # if not is_triangulated: + # triangulate_mesh_new(data) + + # _num_verts = len(data.vertices) + _num_indexes = len(data.loops) + + # np_verts = np.empty((3 * _num_verts, 1)) + # data.vertices.foreach_get("co", np_verts) + np_indexes = np.empty((_num_indexes, ), dtype=np.uint32) + data.loops.foreach_get("vertex_index", np_indexes) + # if not is_coll_mesh: + # np_uvs = np.empty((2 * _num_indexes, 1)) + # data.uv_layers.active.data.foreach_get("uv", np_uvs) + # np_uvs = np_uvs.reshape((_num_indexes, 2)) + # np_uvs[..., 1] = 1 - np_uvs[..., 1] + # np_uvs = np.hstack((np_uvs, np.zeros((_num_indexes, 1)), np.ones((_num_indexes, 1)))) + + # np_verts.reshape((_num_verts, 3)) + # np_verts = np.hstack((np_verts, np.ones((_num_verts, 1)))) + + # print(np_verts) + # print(np_indexes) + # if not is_coll_mesh: + # print(np_uvs) + # Lists _num_verts = len(data.vertices) indexes = [] @@ -753,7 +795,7 @@ def mesh_parser(self, ob, is_coll_mesh: bool = False): print(f'Exported collisions with {len(verts)} verts, ' f'{len(indexes)} indexes') - return verts, normals, tangents, uvs, indexes, chverts, colours + return verts, normals, tangents, uvs, indexes, chverts, colours, np_indexes def recurce_entity(self, parent, obj, list_element=None, index=0): # this will return the class object of the property recursively @@ -792,7 +834,7 @@ def recurce_entity(self, parent, obj, list_element=None, index=0): properties[prop] = self.recurce_entity(prop_group, prop) return cls(**properties) - def parse_object(self, ob, parent): + def parse_object(self, ob, parent: Object): newob = None # Some objects (eg. imported bounded hulls) shouldn't be exported. # If the object has this custom property then ignore it. @@ -823,7 +865,6 @@ def parse_object(self, ob, parent): # let's first sort out any entity data that is specified: if ob.NMSMesh_props.has_entity or ob.NMSLocator_props.has_entity: - print('this has an entity:', ob) # we need to pull information from two places: # ob.NMSEntity_props # check to see if the mesh's entity will get the action trigger @@ -881,8 +922,10 @@ def parse_object(self, ob, parent): # The object will have its name in the scene so that any data # required can be linked up. This name will be overwritten by the # exporter to be the path name of the scene. - optdict = {'Name': get_obj_name(ob, None), - 'orig_node_data': orig_node_data} + optdict = { + 'Name': get_obj_name(ob, None), + 'orig_node_data': orig_node_data, + } # Let's do a check on the values of the scale and the dimensions. # We can have it so that the user can apply scale, even if by @@ -920,7 +963,7 @@ def parse_object(self, ob, parent): # We'll give them some "fake" vertex data which consists of # no actual vertex data, but an index that doesn't point to # anything. - verts, norms, tangs, luvs, indexes, chverts, _ = self.mesh_parser(ob, True) # noqa + verts, norms, tangs, luvs, indexes, chverts, _, np_indexes = self.mesh_parser(ob, True) # Reset Transforms on meshes @@ -929,6 +972,7 @@ def parse_object(self, ob, parent): optdict['Normals'] = norms optdict['Tangents'] = tangs optdict['CHVerts'] = chverts + optdict['np_indexes'] = np_indexes # Handle Primitives elif colType == "Box": optdict['Width'] = dims[0] / factor[0] @@ -954,7 +998,7 @@ def parse_object(self, ob, parent): elif ob.NMSNode_props.node_types == 'Mesh': # ACTUAL MESH # Parse object Geometry - verts, norms, tangs, luvs, indexes, chverts, colours = self.mesh_parser(ob) # noqa + verts, norms, tangs, luvs, indexes, chverts, colours, np_indexes = self.mesh_parser(ob) # check whether the mesh has any child nodes we care about (such as # a rotation vector) @@ -971,8 +1015,7 @@ def parse_object(self, ob, parent): entitydata.append(rotation_data) # Create Mesh Object - actualname = get_obj_name(ob, None) - newob = Mesh(Name=actualname, + newob = Mesh(Name=get_obj_name(ob, None), Transform=transform, Vertices=verts, UVs=luvs, @@ -983,7 +1026,8 @@ def parse_object(self, ob, parent): Colours=colours, ExtraEntityData=entitydata, HasAttachment=ob.NMSMesh_props.has_entity, - orig_node_data=orig_node_data) + orig_node_data=orig_node_data, + np_indexes=np_indexes) # Check to see if the mesh's entity will be animation controller, # if so assign to the anim_controller_obj variable. @@ -991,8 +1035,7 @@ def parse_object(self, ob, parent): ob.NMSMesh_props.has_entity): # tuple, first entry is the name of the entity, the second is # the actual mesh object... - self.anim_controller_obj = (ob.NMSEntity_props.name_or_path, - newob) + self.anim_controller_obj = (ob.NMSEntity_props.name_or_path, newob) # Try to parse material if ob.NMSMesh_props.material_path != "": diff --git a/ModelExporter/export.py b/ModelExporter/export.py index 2ee42b7..5024848 100644 --- a/ModelExporter/export.py +++ b/ModelExporter/export.py @@ -2,7 +2,7 @@ """Process the 3d model data and create required files for NMS. This function will take all the data provided by the blender script and create -a number of .exml files that contain all the data required by the game to view +a number of .mxml files that contain all the data required by the game to view the 3d model created. """ @@ -12,17 +12,28 @@ # Blender imports import bpy +import numpy as np + # stdlib imports import os import subprocess from collections import OrderedDict as odict from array import array +import struct +from itertools import accumulate # Internal imports -from NMS.classes import (TkAttachmentData, TkGeometryData, List, - TkVertexElement, TkVertexLayout, Vector4f) +from NMS.classes import TkAttachmentData, TkGeometryData from NMS.LOOKUPS import SEMANTICS, REV_SEMANTICS, STRIDES +from NMS.classes.Object import Model +from serialization.NMS_Structures import MBINHeader +from serialization.NMS_Structures.Structures import ( + TkMeshData, TkGeometryStreamData, TkVertexLayout, TkVertexElement, TkMeshMetaData +) +from serialization.NMS_Structures.Structures import ( + TkGeometryData as TkGeometryData_new, +) from serialization.mbincompiler import mbinCompiler -from serialization.StreamCompiler import StreamData, TkMeshMetaData +from serialization.StreamCompiler import StreamData from serialization.serializers import (serialize_index_stream, serialize_vertex_stream) from ModelExporter.utils import nmsHash, traverse @@ -54,7 +65,7 @@ class Export(): A dictionaty containing various export settings. These will generally be set by the blender export helper. """ - def __init__(self, export_directory, scene_directory, scene_name, model, + def __init__(self, export_directory, scene_directory, scene_name, model: Model, anim_data=dict(), descriptor=None, settings=dict()): self.export_directory = export_directory self.scene_directory = scene_directory.upper() @@ -89,6 +100,8 @@ def __init__(self, export_directory, scene_directory, scene_name, model, self.hashes = odict() self.mesh_names = list() + self.np_index_data = np.array([], dtype=np.uint32) + # Make some settings values more easily accessible. self.preserve_node_info = self.settings.get('preserve_node_info', False) @@ -98,14 +111,21 @@ def __init__(self, export_directory, scene_directory, scene_name, model, # a list of any extra properties to go in each entity # self.Entities = [] + self.np_indexes: list[np.ndarray] = [] + self.np_index_lenths = [] + self.np_index_maxs = [] + # extract the streams from the mesh objects. - for mesh in self.Model.Meshes.values(): + for i, mesh in enumerate(self.Model.Meshes.values()): self.mesh_names.append(mesh.Name) self.index_stream[mesh.Name] = mesh.Indexes self.vertex_stream[mesh.Name] = mesh.Vertices self.uv_stream[mesh.Name] = mesh.UVs self.n_stream[mesh.Name] = mesh.Normals self.t_stream[mesh.Name] = mesh.Tangents + self.np_indexes.append(mesh.np_indexes) + self.np_index_lenths.append(mesh.np_indexes.size) + self.np_index_maxs.append(max(mesh.np_indexes) + 1) if self.Model.has_vertex_colours: if mesh.Colours is None: self.c_stream[mesh.Name] = ( @@ -120,24 +140,25 @@ def __init__(self, export_directory, scene_directory, scene_name, model, if mesh.Material is not None: self.materials.add(mesh.Material) + if sum(self.np_index_maxs) > 0xFFFF: + self.Indices16Bit = 0 + else: + self.Indices16Bit = 1 + # for obj in self.Model.ListOfEntities: # self.Entities.append(obj.EntityData) self.num_mesh_objs = len(self.Model.Meshes) # generate some variables relating to the paths - self.basepath = os.path.join(self.export_directory, - self.scene_directory) - self.texture_path = os.path.join(self.basepath, self.scene_name, - 'TEXTURES') + self.basepath = os.path.join(self.export_directory, self.scene_directory) + self.texture_path = os.path.join(self.basepath, self.scene_name, 'TEXTURES') self.anims_path = os.path.join(self.basepath, 'ANIMS') # path location of the entity folder. Calling makedirs of this will # ensure all the folders are made in one go - self.ent_path = os.path.join(self.basepath, self.scene_name, - 'ENTITIES') + self.ent_path = os.path.join(self.basepath, self.scene_name, 'ENTITIES') # The name of the scene relative to the PCBANKS folder - self.rel_named_path = os.path.join(self.scene_directory, - self.scene_name) + self.rel_named_path = os.path.join(self.scene_directory, self.scene_name) self.abs_name_path = os.path.join(self.basepath, self.scene_name) self.create_paths() @@ -147,12 +168,12 @@ def __init__(self, export_directory, scene_directory, scene_name, model, self.preprocess_streams() + self.gstream_fpath = f"{os.path.join(self.basepath, self.scene_name)}.GEOMETRY.DATA.MBIN.PC" + if (not self.preserve_node_info or (self.preserve_node_info and self.export_original_geom_data)): - self.geometry_stream = StreamData( - '{}.GEOMETRY.DATA.MBIN.PC'.format( - os.path.join(self.basepath, self.scene_name))) + self.geometry_stream = StreamData(self.gstream_fpath) # generate the geometry stream data now self.serialize_data() @@ -176,16 +197,13 @@ def __init__(self, export_directory, scene_directory, scene_name, model, # Assign each of the class objects that contain all of the data their # data - if (not self.preserve_node_info - or (self.preserve_node_info - and self.export_original_geom_data)): - self.TkGeometryData = TkGeometryData(**self.GeometryData) - self.TkGeometryData.make_elements(main=True) + # if (not self.preserve_node_info + # or (self.preserve_node_info + # and self.export_original_geom_data)): + # self.TkGeometryData = TkGeometryData(**self.GeometryData) + # self.TkGeometryData.make_elements(main=True) self.Model.construct_data() self.TkSceneNodeData = self.Model.get_data() - # get the model to create all the required data and this will continue - # on down the tree - self.TkSceneNodeData.make_elements(main=True) for material in self.materials: if not isinstance(material, str): material.make_elements(main=True) @@ -260,40 +278,95 @@ def serialize_data(self): directly to the gstream and geometry file constructors """ vertex_data = [] - index_data = [] - metadata = odict() - for name in self.mesh_names: - vertex_data.append(serialize_vertex_stream( + vertex_sizes = [] + index_sizes = [] + mesh_datas: list[TkMeshData] = [] + for i, name in enumerate(self.mesh_names): + v_data = serialize_vertex_stream( requires=self.stream_list, Vertices=self.vertex_stream[name], UVs=self.uv_stream[name], Normals=self.n_stream[name], Tangents=self.t_stream[name], - Colours=self.c_stream[name])) - new_indexes = self.index_stream[name] - if max(new_indexes) > 2 ** 16: - indexes = array('I', new_indexes) - else: - indexes = array('H', new_indexes) - index_data.append(serialize_index_stream(indexes)) - metadata[name] = self.mesh_metadata[name] - self.geometry_stream.create(metadata, vertex_data, index_data) - self.geometry_stream.save() # offset data populated here + Colours=self.c_stream[name] + ) + v_len = len(v_data) + vertex_data.append(v_data) + vertex_sizes.append(v_len) + # new_indexes = self.index_stream[name] + # # TODO: serialize the same way as they are in the actual data. + # # This will also fail I think if there are indexes > 0xFFFF since it will serialize some as H and + # # some as I + # if max(new_indexes) > 2 ** 16: + # indexes = array('I', new_indexes) + # else: + # indexes = array('H', new_indexes) + # i_data = serialize_index_stream(indexes) + i_data = self.np_indexes[i] + if self.Indices16Bit: + i_data = i_data.astype(np.uint16) + i_data = i_data.tobytes() + i_len = len(i_data) + index_sizes.append(i_len) + md = TkMeshData( + name.upper(), + v_data + i_data, + self.mesh_metadata[name]["hash"], + i_len, + v_len + ) + mesh_datas.append(md) + gstream_data = TkGeometryStreamData(mesh_datas) + + with open(self.gstream_fpath, "wb") as f: + hdr = MBINHeader() + hdr.header_namehash = 0x40025754 + hdr.header_guid = 0x1D6CC846AC06B54C + hdr.write(f) + gstream_data.write(f) + + offsets = [] + + # A bit of a hack, but we need the offsets of the index and vert data. We'll use this code to get it + # since it works. + with open(self.gstream_fpath, "rb") as f: + f.seek(0x28, 0) + entries = struct.unpack(" 2**16, not count) - if self.GeometryData['IndexCount'] > 2**16: - self.GeometryData['Indices16Bit'] = 0 - else: - self.GeometryData['Indices16Bit'] = 1 # Sort out mesh collision convex hull data @@ -366,6 +437,7 @@ def process_data(self): # create the list of mesh collision index data batch_offset = 0 + for name in self.mesh_coll_verts.keys(): # For each mesh collision determine the start verts and indexes start_verts = self.GeometryData['MeshVertREnd'][-1] + 1 @@ -385,6 +457,9 @@ def process_data(self): mesh_index_end = ( mesh_index_end + max(self.mesh_coll_indexes[name]) + 1 ) + + self.GeometryData['Indices16Bit'] = self.Indices16Bit + # Fix up the index values for the actual mesh data for name, batch in self.batches.items(): self.batches[name] = [batch[0] + batch_offset, @@ -408,10 +483,7 @@ def process_data(self): for verts in self.mesh_coll_verts.values(): hull_data.extend(verts) for vert in hull_data: - self.GeometryData['BoundHullVerts'].append(Vector4f(x=vert[0], - y=vert[1], - z=vert[2], - t=1.0)) + self.GeometryData['BoundHullVerts'].append((vert[0], vert[1], vert[2], 1.0)) self.vert_bounds.update(hull_verts) self.hull_bounds.update(hull_indexes) @@ -481,7 +553,7 @@ def process_nodes(self): # also write the entity file now too as we don't # need to do anything else to it AttachmentData.tree.write( - "{}.ENTITY.exml".format( + "{}.ENTITY.mxml".format( os.path.join(self.export_directory, ent_path))) else: @@ -499,7 +571,7 @@ def process_nodes(self): 'BATCHCOUNT'] else: # We need to rename the mesh collision objects - obj.Name = self.rel_named_path + obj.Name = self.rel_named_path + "|Collision" else: if obj._Type == 'LOCATOR': if obj.HasAttachment: @@ -519,7 +591,7 @@ def process_nodes(self): # also write the entity file now too as we don't # need to do anything else to it AttachmentData.tree.write( - "{}.ENTITY.exml".format( + "{}.ENTITY.mxml".format( os.path.join(self.export_directory, ent_path))) else: @@ -527,7 +599,7 @@ def process_nodes(self): else: data = None elif obj._Type == 'COLLISION': - obj.Name = self.rel_named_path + obj.Name = self.rel_named_path + "|Collision" if obj.CType == 'Box': data = {'WIDTH': obj.Width, 'HEIGHT': obj.Height, 'DEPTH': obj.Depth} @@ -544,8 +616,8 @@ def process_nodes(self): def create_vertex_layouts(self): # sort out what streams are given and create appropriate vertex layouts - VertexElements = List() - SmallVertexElements = List() + VertexElements = [] + SmallVertexElements = [] for sID in self.stream_list: # sID is the SemanticID if sID in [0, 1]: @@ -555,8 +627,8 @@ def create_vertex_layouts(self): Type=5131, Offset=Offset, Normalise=0, - Instancing="PerVertex", - PlatformData="")) + Instancing=0, + PlatformData=0)) # Also write the small vertex data Offset = 8 * sID SmallVertexElements.append( @@ -565,8 +637,8 @@ def create_vertex_layouts(self): Type=5131, Offset=Offset, Normalise=0, - Instancing="PerVertex", - PlatformData="")) + Instancing=0, + PlatformData=0)) # for the INT_2_10_10_10_REV stuff elif sID in [2, 3]: Offset = self.offsets[sID] @@ -575,8 +647,8 @@ def create_vertex_layouts(self): Type=36255, Offset=Offset, Normalise=0, - Instancing="PerVertex", - PlatformData="")) + Instancing=0, + PlatformData=0)) elif sID == 4: Offset = self.offsets[sID] VertexElements.append(TkVertexElement(SemanticID=sID, @@ -584,20 +656,22 @@ def create_vertex_layouts(self): Type=5121, Offset=Offset, Normalise=0, - Instancing="PerVertex", - PlatformData="")) + Instancing=0, + PlatformData=0)) self.GeometryData['VertexLayout'] = TkVertexLayout( ElementCount=self.element_count, Stride=self.stride, - PlatformData="", - VertexElements=VertexElements) + PlatformData=0, + VertexElements=VertexElements, + ) # TODO: do more generically self.GeometryData['SmallVertexLayout'] = TkVertexLayout( ElementCount=len(SmallVertexElements), Stride=0x8 * len(SmallVertexElements), - PlatformData="", - VertexElements=SmallVertexElements) + PlatformData=0, + VertexElements=SmallVertexElements, + ) def mix_streams(self): # this combines all the input streams into one single stream with the @@ -631,30 +705,36 @@ def mix_streams(self): self.GeometryData['VertexStream'] = VertexStream self.GeometryData['SmallVertexStream'] = SmallVertexStream - # finally we can also flatten the index stream: - IndexBuffer = array('I') - # First write the mesh collision index buffer - for name in self.Model.mesh_colls.keys(): - obj = self.index_stream[name] - IndexBuffer.extend(obj) - # Then write the normal mesh index buffer - for name in self.mesh_names: - obj = self.index_stream[name] - IndexBuffer.extend(obj) - # TODO: make this better (determine format correctly) - """ - # let's convert to the correct type of array data type here - if not max(IndexBuffer) > 2**16: - IndexBuffer = array('H', IndexBuffer) - """ - self.GeometryData['IndexBuffer'] = IndexBuffer + # Handle the index streams. + # First, we create a list which contains the cumulative count, and then we add this value to each + # array. + # We add 0 to the start to avoid adding anything to the first one. + addt_values = [0] + list(accumulate(self.np_index_maxs)) + for i, addt in enumerate(addt_values[:-1]): + self.np_indexes[i] += addt + mesh_col_offset = len(self.mesh_names) + + if self.Indices16Bit == 0: + dtype = np.uint32 + else: + dtype = np.uint16 + + index_array = np.concatenate( + self.np_indexes[mesh_col_offset:] + self.np_indexes[:mesh_col_offset], + dtype=dtype, + ) + index_array_bytes = index_array.tobytes() + if (dfct := (len(index_array_bytes) % 4)) != 0: + index_array_bytes += b"\x00" * (4 - dfct) + + self.GeometryData['IndexBuffer'] = np.frombuffer(index_array_bytes, dtype=np.uint32) def get_bounds(self): # this analyses the vertex stream and finds the smallest bounding box # corners. - self.GeometryData['MeshAABBMin'] = List() - self.GeometryData['MeshAABBMax'] = List() + self.GeometryData['MeshAABBMin'] = [] + self.GeometryData['MeshAABBMax'] = [] # Combine the meshes objs = [*self.Model.Meshes.values(), *self.Model.mesh_colls.values()] @@ -668,15 +748,14 @@ def get_bounds(self): y_bounds = (min(y_verts), max(y_verts)) z_bounds = (min(z_verts), max(z_verts)) - self.GeometryData['MeshAABBMin'].append( - Vector4f(x=x_bounds[0], y=y_bounds[0], z=z_bounds[0], t=1)) - self.GeometryData['MeshAABBMax'].append( - Vector4f(x=x_bounds[1], y=y_bounds[1], z=z_bounds[1], t=1)) + self.GeometryData['MeshAABBMin'].append((x_bounds[0], y_bounds[0], z_bounds[0], 1)) + self.GeometryData['MeshAABBMax'].append((x_bounds[1], y_bounds[1], z_bounds[1], 1)) if obj._Type == "MESH": # only add the meshes to the self.mesh_bounds dict: self.mesh_bounds[obj.Name] = {'x': x_bounds, 'y': y_bounds, 'z': z_bounds} + # TODO: Change this here too... def write(self): """ Write all of the files required for the scene. """ # We only need to write the geometry data if we aren't preserving @@ -684,23 +763,65 @@ def write(self): if (not self.preserve_node_info or (self.preserve_node_info and self.export_original_geom_data)): - mbinc = mbinCompiler( - self.TkGeometryData, - "{}.GEOMETRY.MBIN.PC".format(self.abs_name_path)) - mbinc.serialize() - print(f'Scene written to {self.abs_name_path}') - self.TkSceneNodeData.tree.write("{}.SCENE.exml".format( - self.abs_name_path)) - # Build all the descriptor exml data + # mbinc = mbinCompiler( + # self.TkGeometryData, + # "{}.GEOMETRY.MBIN.PC".format(self.abs_name_path)) + # mbinc.serialize() + + with open(f"{self.abs_name_path}.GEOMETRY.MBIN.PC", "wb") as f: + hdr = MBINHeader( + header_magic = 0xDDDDDDDDDDDDDDDD, + header_namehash = 0x819C3220, + header_guid = 0x32F6AE7B03222A1F, + header_timestamp = 0xFFFFFFFFFFFFFFFF, + ) + hdr.write(f) + gd = self.GeometryData + thing = TkGeometryData_new( + SmallVertexLayout=gd["SmallVertexLayout"], # good + VertexLayout=gd["VertexLayout"], # good + BoundHullVertEd=gd["BoundHullVertEd"], # good + BoundHullVerts=gd["BoundHullVerts"], # good + BoundHullVertSt=gd["BoundHullVertSt"], # good + IndexBuffer=gd["IndexBuffer"], + JointBindings=[], + JointExtents=[], + JointMirrorAxes=[], + JointMirrorPairs=[], + MeshAABBMax=gd["MeshAABBMax"], # good + MeshAABBMin=gd["MeshAABBMin"], # good + MeshBaseSkinMat=[], + MeshVertREnd=gd["MeshVertREnd"], # good + MeshVertRStart=gd["MeshVertRStart"], # good + ProcGenNodeNames=[], + ProcGenParentId=[], + SkinMatrixLayout=[], + StreamMetaDataArray=gd["StreamMetaDataArray"], # good + CollisionIndexCount=gd["CollisionIndexCount"], # good + IndexCount=gd["IndexCount"], # good + Indices16Bit=self.Indices16Bit, # good + VertexCount=gd["VertexCount"], # good + ) + thing.write(f) + + scene_path = f"{self.abs_name_path}.SCENE.MBIN" + with open(scene_path, "wb") as f: + hdr = MBINHeader() + hdr.header_namehash = 0x3DB87E47 + hdr.header_guid = 0x6A9DE02E8902AAC3 + hdr.write(f) + self.TkSceneNodeData.write(f) + print(f'Scene written to {scene_path}') + # Build all the descriptor mxml data if self.descriptor is not None: - descriptor = self.descriptor.to_exml() + descriptor = self.descriptor.to_mxml() descriptor.make_elements(main=True) descriptor.tree.write( - "{}.DESCRIPTOR.exml".format(self.abs_name_path)) + "{}.DESCRIPTOR.mxml".format(self.abs_name_path)) for material in self.materials: if not isinstance(material, str): material.tree.write( - "{0}.MATERIAL.exml".format(os.path.join( + "{0}.MATERIAL.mxml".format(os.path.join( self.abs_name_path, str(material['Name']).upper()))) # Write the animation files idle_anim = bpy.context.scene.nmsdk_anim_data.idle_anim @@ -711,24 +832,24 @@ def write(self): 'one of the animations that exists...') # get the value and output it self.anim_data[idle_anim].tree.write( - "{}.ANIM.exml".format(self.abs_name_path)) + "{}.ANIM.mxml".format(self.abs_name_path)) else: for name in list(self.anim_data.keys()): if name != idle_anim: self.anim_data[name].tree.write( os.path.join(self.anims_path, - "{}.ANIM.exml".format(name.upper()))) + "{}.ANIM.mxml".format(name.upper()))) else: self.anim_data[idle_anim].tree.write( - "{}.ANIM.exml".format(self.abs_name_path)) + "{}.ANIM.mxml".format(self.abs_name_path)) def convert_to_mbin(self): - """ Convert all .exml file to .mbin files. """ - print('Converting .exml files to .mbin. Please wait.') + """ Convert all .mxml file to .mbin files. """ + print('Converting .mxml files to .mbin. Please wait.') for directory, _, files in os.walk(self.basepath): for file in files: location = os.path.join(directory, file) - if os.path.splitext(location)[1].lower() == '.exml': + if os.path.splitext(location)[1].lower() == '.mxml': # Force MBINCompiler to overwrite existing files and # ignore errors. mbincompiler_path = bpy.context.scene.nmsdk_default_settings.MBINCompiler_path # noqa diff --git a/ModelExporter/utils.py b/ModelExporter/utils.py index 6111dfe..403639f 100644 --- a/ModelExporter/utils.py +++ b/ModelExporter/utils.py @@ -117,11 +117,12 @@ def movetoindex(lst, i, index): def nmsHash(data): """ Lazy hash function for mesh data - This is simply the last 16 hexadecimal digits of a sha256 hash + This is simply the last 16 hexadecimal digits of a sha256 hash. + Also just take the first 16 verts to save time... """ if isinstance(data, list): d = array('f') - for verts in data: + for verts in data[:16]: d.extend(verts) else: d = data diff --git a/ModelImporter/import_scene.py b/ModelImporter/import_scene.py index 021d6b0..e479a85 100644 --- a/ModelImporter/import_scene.py +++ b/ModelImporter/import_scene.py @@ -76,15 +76,13 @@ def __init__(self, fpath, parent_obj=None, ref_scenes=dict(), # determine the PCBANKS directory self.PCBANKS_dir = get_NMS_dir(self.local_directory) - # Determine the type of file provided and get the exml and mbin file + # Determine the type of file provided and get the mxml and mbin file # paths for that file. - if ftype.lower() == '.exml': + if ftype.lower() == '.mxml': mbin_fpath = (op.join(self.local_directory, self.scene_basename) + '.MBIN') - exml_fpath = fpath + mxml_fpath = fpath elif ftype.lower() == '.mbin': - exml_fpath = (op.join(self.local_directory, self.scene_basename) + - '.EXML') mbin_fpath = fpath else: raise TypeError('Selected file is of the wrong format.') @@ -150,17 +148,15 @@ def __init__(self, fpath, parent_obj=None, ref_scenes=dict(), # change to render with cycles self.scn.render.engine = RENDER_ENGINE - if not op.exists(exml_fpath): + if not op.exists(mbin_fpath): retcode = subprocess.call( - [self.scn.nmsdk_default_settings.MBINCompiler_path, "-q", "-Q", - fpath]) + [self.scn.nmsdk_default_settings.MBINCompiler_path, "-q", "-Q", fpath] + ) if retcode != 0: - print('MBINCompiler failed to run. Please ensure it is ' - 'registered on the path.') + print('MBINCompiler failed to run. Please ensure it is registered on the path.') print('Import failed') self.requires_render = False - raise OSError("MBINCompiler failed to run. See System Console " - "for more details. " + raise OSError("MBINCompiler failed to run. See System Console for more details. " "(Window > Toggle System Console)") self.scene_node_data = SceneNodeData(self._scene_node_data) @@ -235,7 +231,7 @@ def __init__(self, fpath, parent_obj=None, ref_scenes=dict(), # load all the mesh metadata self.mesh_metadata = { - x.IdString: gstream_info( + x.IdString.upper(): gstream_info( x.VertexDataSize, x.VertexDataOffset, x.IndexDataSize, @@ -602,9 +598,9 @@ def _add_empty_to_scene(self, scene_node: SceneNodeData, print(f'Now trying to find a descriptor at: {descriptor_path}') if op.exists(descriptor_path + '.MBIN'): empty_obj.NMSReference_props.is_proc = True - # Also convert the .mbin to exml for parsing if the exml + # Also convert the .mbin to mxml for parsing if the mxml # doesn't already exist. - if not op.exists(descriptor_path + '.EXML'): + if not op.exists(descriptor_path + '.MXML'): retcode = subprocess.call( [self.scn.nmsdk_default_settings.MBINCompiler_path, "-q", "-Q", descriptor_path + '.MBIN']) @@ -613,8 +609,7 @@ def _add_empty_to_scene(self, scene_node: SceneNodeData, ' registered on the path.') print('Import failed') return - self.descriptor_data = read_descriptor( - descriptor_path + '.EXML') + self.descriptor_data = read_descriptor(descriptor_path + '.MXML') else: print("No descriptor found... Scene is not proc-gen") self.local_objects[self.scene_basename] = empty_obj diff --git a/ModelImporter/readers.py b/ModelImporter/readers.py index 9682a59..8d3e163 100644 --- a/ModelImporter/readers.py +++ b/ModelImporter/readers.py @@ -9,7 +9,7 @@ bytes_to_quat, read_bool, read_uint32, returned_read) from serialization.list_header import ListHeader # noqa pylint: disable=relative-beyond-top-level -from utils.utils import exml_to_dict # noqa pylint: disable=relative-beyond-top-level +from utils.utils import mxml_to_dict # noqa pylint: disable=relative-beyond-top-level from serialization.NMS_Structures import TkMaterialData, MBINHeader, NAMEHASH_MAPPING, TkAnimMetadata @@ -22,7 +22,7 @@ def read_anim(fname): # TODO: FIX! """ Reads an anim file. """ - # anim_data = dict() + anim_data = dict() with open(fname, "rb") as f: header = MBINHeader.read(f) @@ -226,6 +226,6 @@ def read_descriptor(fname: str) -> dict: """ with open(fname) as f: # The top level is always a list, let's just extract it immediately. - data = exml_to_dict(f)['List'] + data = mxml_to_dict(f)['List'] data = read_TkModelDescriptorList(data) return data diff --git a/NMS/LOOKUPS.py b/NMS/LOOKUPS.py index 791237e..2a38b2e 100644 --- a/NMS/LOOKUPS.py +++ b/NMS/LOOKUPS.py @@ -1,6 +1,7 @@ # File containing a number of lookup tables to keep it out of the main data -from collections import defaultdict +import numpy as np + MATERIALFLAGS = ['_F01_DIFFUSEMAP', '_F02_SKINNED', '_F03_NORMALMAP', '_F04_', '_F05_INVERT_ALPHA', '_F06_BRIGHT_EDGE', '_F07_UNLIT', @@ -71,3 +72,17 @@ NORMS: 1, TANGS: 1, COLOURS: 2} + +SERIALIZE_FMT_MAP_NEW = { + VERTS: 5131, + UVS: 5131, + NORMS: 36255, + TANGS: 36255, + COLOURS: 5121, +} + +VERT_TYPE_MAP = { + 5121: {'size': 1, 'np_fmt': "4B"}, + 5131: {'size': 2, 'np_fmt': "4e"}, # half-precision floats + 36255: {'size': 1, 'np_fmt': np.int32} +} \ No newline at end of file diff --git a/NMS/classes/Object.py b/NMS/classes/Object.py index 6fd677a..3da93fb 100644 --- a/NMS/classes/Object.py +++ b/NMS/classes/Object.py @@ -4,9 +4,7 @@ from typing import Optional -from .TkSceneNodeData import TkSceneNodeData -from .TkSceneNodeAttributeData import TkSceneNodeAttributeData -from .TkTransformData import TkTransformData +from serialization.NMS_Structures.Structures import TkSceneNodeAttributeData, TkSceneNodeData, TkTransformData from .TkMaterialData import TkMaterialData from .TkPhysicsComponentData import TkPhysicsComponentData from .List import List @@ -45,11 +43,10 @@ def __init__(self, Name: str, **kwargs): # If this isn't specified the default value will be used. self.Transform = kwargs.get('Transform', TkTransformData()) - # default to None so that it is handled properly if there are none. - self.Attributes = None + self.Attributes: list[TkSceneNodeAttributeData] = [] # just a normal list so it is easier to iterate over - self.Children = [] + self.Children: list['Object'] = [] # set to None by default. Every child will have this set to something # when it is added as a child. self.Parent = None @@ -169,21 +166,17 @@ def construct_data(self): if self.was_imported: self.Children.sort(key=lambda x: x.orig_node_idx) # Call each child's process function - if len(self.Children) != 0: - self.Child_Nodes = List() - for child in self.Children: - child.construct_data() - # this will return the self.NodeData object in the child Object - self.Child_Nodes.append(child.get_data()) - else: - self.Child_Nodes = None - + child_nodes: list[TkSceneNodeData] = [] + for child in self.Children: + child.construct_data() + # this will return the self.NodeData object in the child Object + child_nodes.append(child.get_data()) self.NodeData = TkSceneNodeData(Name=self.Name, NameHash=self.NameHash, Type=self._Type, Transform=self.Transform, Attributes=self.Attributes, - Children=self.Child_Nodes) + Children=child_nodes) def rebuild_entity(self): # this is used to rebuild the entity data in case something else is @@ -200,8 +193,21 @@ def rebuild_entity(self): for entity in self.ExtraEntityData[entityname]: self.EntityData[entityname].append(entity) - def original_attribute(self, name: str, - ignore_original: bool = False) -> Optional[tuple]: + def original( + self, + name: str, + fallback: str = "", + ignore_original: bool = False + ) -> Optional[str]: + if ignore_original: + return fallback + attribs = self.orig_node_data.get('Attributes', []) + for attr in attribs: + if attr.get('Name', '') == name: + return str(attr['Value']) + return str(fallback) + + def original_attribute(self, name: str, ignore_original: bool = False) -> Optional[tuple]: """ Returns the value of an attibute from the original imported value, or None if there isn't a value. """ # Create a short-cut in case we actually want to not use an original @@ -242,8 +248,11 @@ def __init__(self, Name: str, **kwargs): def create_attributes(self, data: dict, ignore_original: bool = False): if data is not None: - self.Attributes = List(TkSceneNodeAttributeData( - Name='ATTACHMENT', Value=data['ATTACHMENT'])) + self.Attributes = [ + TkSceneNodeAttributeData( + Name='ATTACHMENT', Value=str(data['ATTACHMENT']) + ) + ] class Light(Object): @@ -256,37 +265,31 @@ def __init__(self, Name: str, **kwargs): self.FOV = kwargs.get('FOV', 360.0) def create_attributes(self, data: dict, ignore_original: bool = False): - self.Attributes = List( + self.Attributes = [ TkSceneNodeAttributeData(Name='FOV', - Value=self.FOV, - fmt='{0:.6f}'), + Value=f'{self.FOV:.6f}'), TkSceneNodeAttributeData(Name='FALLOFF', Value='quadratic'), TkSceneNodeAttributeData(Name='FALLOFF_RATE', - Value=2, - fmt='{0:.6f}'), + Value='2.000000'), TkSceneNodeAttributeData(Name='INTENSITY', - Value=self.Intensity, - fmt='{0:.6f}'), + Value=f'{self.Intensity:.6f}'), TkSceneNodeAttributeData(Name='COL_R', - Value=self.Colour[0], - fmt='{0:.6f}'), + Value=f'{self.Colour[0]:.6f}'), TkSceneNodeAttributeData(Name='COL_G', - Value=self.Colour[1], - fmt='{0:.6f}'), + Value=f'{self.Colour[1]:.6f}'), TkSceneNodeAttributeData(Name='COL_B', - Value=self.Colour[2], - fmt='{0:.6f}'), + Value=f'{self.Colour[2]:.6f}'), # These two values will be hard-coded until they are understood # well enough to modify them to be anything other than their # default values. TkSceneNodeAttributeData(Name='COOKIE_IDX', - Value=-1), + Value='-1'), TkSceneNodeAttributeData(Name='VOLUMETRIC', - Value=0, - fmt='{0:.6f}'), + Value='0.000000'), TkSceneNodeAttributeData(Name='MATERIAL', - Value='MATERIALS/LIGHT.MATERIAL.MBIN')) + Value='MATERIALS/LIGHT.MATERIAL.MBIN') + ] class Joint(Object): @@ -296,10 +299,12 @@ def __init__(self, Name: str, **kwargs): self.JointIndex = kwargs.get("JointIndex", 1) def create_attributes(self, data: dict, ignore_original: bool = False): - self.Attributes = List(TkSceneNodeAttributeData( - Name='JOINTINDEX', - Value=self.JointIndex, - orig=self.original_attribute('JOINTINDEX'))) + self.Attributes = [ + TkSceneNodeAttributeData( + Name='JOINTINDEX', + Value=str(self.original('JOINTINDEX', self.JointIndex)), + ) + ] class Emitter(Object): @@ -309,10 +314,11 @@ def __init__(self, Name: str, **kwargs): def create_attributes(self, data: dict, ignore_original: bool = False): if data is not None: - self.Attributes = List( + self.Attributes = [ TkSceneNodeAttributeData(Name='MATERIAL', - Value=data['MATERIAL']), - TkSceneNodeAttributeData(Name='DATA', Value=data['DATA'])) + Value=str(data['MATERIAL'])), + TkSceneNodeAttributeData(Name='DATA', Value=str(data['DATA'])) + ] class Mesh(Object): @@ -329,6 +335,7 @@ def __init__(self, Name: str, **kwargs): self.Tangents = kwargs.get('Tangents', None) self.CHVerts = kwargs.get('CHVerts', None) self.Colours = kwargs.get('Colours', None) + self.np_indexes = kwargs.get('np_indexes', None) self.IsMesh = True # this will be a list of length 2 with each element being a 4-tuple. self.BBox = kwargs.get('BBox', None) @@ -340,88 +347,96 @@ def __init__(self, Name: str, **kwargs): def create_attributes(self, data: dict, ignore_original: bool = False): # data will be just the information required for the Attributes - self.Attributes = List( - TkSceneNodeAttributeData(Name='BATCHSTARTPHYSI', - Value=data['BATCHSTART'], - orig=self.original_attribute( - 'BATCHSTARTPHYSI', ignore_original)), - TkSceneNodeAttributeData(Name='VERTRSTARTPHYSI', - Value=data['VERTRSTART'], - orig=self.original_attribute( - 'VERTRSTARTPHYSI', ignore_original)), - TkSceneNodeAttributeData(Name='VERTRENDPHYSICS', - Value=data['VERTREND'], - orig=self.original_attribute( - 'VERTRENDPHYSICS', ignore_original)), - TkSceneNodeAttributeData(Name='BATCHSTARTGRAPH', - Value=0, - orig=self.original_attribute( - 'BATCHSTARTGRAPH', ignore_original)), - TkSceneNodeAttributeData(Name='BATCHCOUNT', - Value=data['BATCHCOUNT'], - orig=self.original_attribute( - 'BATCHCOUNT', ignore_original)), - TkSceneNodeAttributeData(Name='VERTRSTARTGRAPH', - Value=0, - orig=self.original_attribute( - 'VERTRSTARTGRAPH', ignore_original)), - TkSceneNodeAttributeData(Name='VERTRENDGRAPHIC', - Value=(data['VERTREND'] - - data['VERTRSTART']), - orig=self.original_attribute( - 'VERTRENDGRAPHIC', ignore_original)), - TkSceneNodeAttributeData(Name='FIRSTSKINMAT', - Value=0, - orig=self.original_attribute( - 'FIRSTSKINMAT')), - TkSceneNodeAttributeData(Name='LASTSKINMAT', - Value=0, - orig=self.original_attribute( - 'LASTSKINMAT')), - TkSceneNodeAttributeData(Name='LODLEVEL', - Value=self.LodLevel, - orig=self.original_attribute('LODLEVEL')), - TkSceneNodeAttributeData(Name='BOUNDHULLST', - Value=data.get('BOUNDHULLST', 0), - orig=self.original_attribute( - 'BOUNDHULLST')), - TkSceneNodeAttributeData(Name='BOUNDHULLED', - Value=data.get('BOUNDHULLED', 0), - orig=self.original_attribute( - 'BOUNDHULLED')), - TkSceneNodeAttributeData(Name='AABBMINX', - Value=data.get('AABBMINX', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMINX')), - TkSceneNodeAttributeData(Name='AABBMINY', - Value=data.get('AABBMINY', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMINY')), - TkSceneNodeAttributeData(Name='AABBMINZ', - Value=data.get('AABBMINZ', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMINZ')), - TkSceneNodeAttributeData(Name='AABBMAXX', - Value=data.get('AABBMAXX', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMAXX')), - TkSceneNodeAttributeData(Name='AABBMAXY', - Value=data.get('AABBMAXY', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMAXY')), - TkSceneNodeAttributeData(Name='AABBMAXZ', - Value=data.get('AABBMAXZ', 0), - fmt='{0:.6f}', - orig=self.original_attribute('AABBMAXZ')), - TkSceneNodeAttributeData(Name='HASH', - Value=data.get('HASH', 0), - orig=self.original_attribute('HASH')), - TkSceneNodeAttributeData(Name='MATERIAL', - Value=data['MATERIAL'], - orig=self.original_attribute('MATERIAL')), - TkSceneNodeAttributeData(Name='MESHLINK', - Value=self.Name + 'Shape', - orig=self.original_attribute('MESHLINK'))) + self.Attributes = [ + TkSceneNodeAttributeData( + Name='BATCHSTARTPHYSI', + Value=self.original('BATCHSTARTPHYSI', data['BATCHSTART'], ignore_original) + ), + TkSceneNodeAttributeData( + Name='VERTRSTARTPHYSI', + Value=self.original('VERTRSTARTPHYSI', data['VERTRSTART'], ignore_original) + ), + TkSceneNodeAttributeData( + Name='VERTRENDPHYSICS', + Value=self.original('VERTRENDPHYSICS', data['VERTREND'], ignore_original) + ), + TkSceneNodeAttributeData( + Name='BATCHSTARTGRAPH', + Value=self.original('BATCHSTARTGRAPH', 0, ignore_original) + ), + TkSceneNodeAttributeData( + Name='BATCHCOUNT', + Value=self.original('BATCHCOUNT', data['BATCHCOUNT'], ignore_original) + ), + TkSceneNodeAttributeData( + Name='VERTRSTARTGRAPH', + Value=self.original('VERTRSTARTGRAPH', 0, ignore_original) + ), + TkSceneNodeAttributeData( + Name='VERTRENDGRAPHIC', + Value=self.original( + 'VERTRENDGRAPHIC', + data['VERTREND'] - data['VERTRSTART'], + ignore_original, + ) + ), + TkSceneNodeAttributeData( + Name='FIRSTSKINMAT', + Value=self.original('FIRSTSKINMAT', 0, ignore_original) + ), + TkSceneNodeAttributeData( + Name='LASTSKINMAT', + Value=self.original('LASTSKINMAT', 0, ignore_original) + ), + TkSceneNodeAttributeData( + Name='LODLEVEL', + Value=self.original('LODLEVEL', self.LodLevel, ignore_original) + ), + TkSceneNodeAttributeData( + Name='BOUNDHULLST', + Value=self.original('BOUNDHULLST', data.get('BOUNDHULLST', 0), ignore_original) + ), + TkSceneNodeAttributeData( + Name='BOUNDHULLED', + Value=self.original('BOUNDHULLED', data.get('BOUNDHULLED', 0), ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMINX', + Value=self.original('AABBMINX', f"{data.get('AABBMINX', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMINY', + Value=self.original('AABBMINY', f"{data.get('AABBMINY', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMINZ', + Value=self.original('AABBMINZ', f"{data.get('AABBMINZ', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMAXX', + Value=self.original('AABBMAXX', f"{data.get('AABBMAXX', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMAXY', + Value=self.original('AABBMAXY', f"{data.get('AABBMAXY', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='AABBMAXZ', + Value=self.original('AABBMAXZ', f"{data.get('AABBMAXZ', 0):.6f}", ignore_original) + ), + TkSceneNodeAttributeData( + Name='HASH', + Value=self.original('HASH', data.get('HASH', "0"), ignore_original) + ), + TkSceneNodeAttributeData( + Name='MATERIAL', + Value=self.original('MATERIAL', data['MATERIAL'], ignore_original) + ), + TkSceneNodeAttributeData( + Name='MESHLINK', + Value=self.original('MESHLINK', self.Name + 'Shape', ignore_original) + ), + ] if self.HasAttachment: self.Attributes.append( TkSceneNodeAttributeData(Name='ATTACHMENT', @@ -442,6 +457,7 @@ def __init__(self, Name: str, **kwargs): self.Normals = kwargs.get('Normals', None) self.Tangents = kwargs.get('Tangents', None) self.CHVerts = kwargs.get('CHVerts', None) + self.np_indexes = kwargs.get('np_indexes', None) else: # just give all 4 values. The required ones will be non-zero (deal # with later in the main file...) @@ -451,76 +467,96 @@ def __init__(self, Name: str, **kwargs): self.Radius = kwargs.get('Radius', 0) def create_attributes(self, data: dict, ignore_original: bool = False): - self.Attributes = List(TkSceneNodeAttributeData(Name="TYPE", - Value=self.CType)) + self.Attributes = [TkSceneNodeAttributeData(Name="TYPE", + Value=self.CType)] if self.CType == 'Mesh': self.Attributes.append( - TkSceneNodeAttributeData(Name='BATCHSTART', - Value=data['BATCHSTART'], - orig=self.original_attribute( - 'BATCHSTART'))) + TkSceneNodeAttributeData( + Name='BATCHSTART', + Value=self.original('BATCHSTART', data['BATCHSTART']) + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='BATCHCOUNT', - Value=data['BATCHCOUNT'], - orig=self.original_attribute( - 'BATCHCOUNT'))) + TkSceneNodeAttributeData( + Name='BATCHCOUNT', + Value=self.original('BATCHCOUNT', data['BATCHCOUNT']) + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='VERTRSTART', - Value=data['VERTRSTART'], - orig=self.original_attribute( - 'VERTRSTART'))) + TkSceneNodeAttributeData( + Name='VERTRSTART', + Value=self.original('VERTRSTART', data['VERTRSTART']) + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='VERTREND', - Value=data['VERTREND'], - orig=self.original_attribute( - 'VERTREND'))) + TkSceneNodeAttributeData( + Name='VERTREND', + Value=self.original('VERTREND', data['VERTREND']) + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='FIRSTSKINMAT', - Value=0, - orig=self.original_attribute( - 'FIRSTSKINMAT'))) + TkSceneNodeAttributeData( + Name='FIRSTSKINMAT', + Value=self.original('FIRSTSKINMAT', "0") + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='LASTSKINMAT', - Value=0, - orig=self.original_attribute( - 'LASTSKINMAT'))) + TkSceneNodeAttributeData( + Name='LASTSKINMAT', + Value=self.original('LASTSKINMAT', "0") + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='BOUNDHULLST', - Value=data.get('BOUNDHULLST', 0), - orig=self.original_attribute( - 'BOUNDHULLST'))) + TkSceneNodeAttributeData( + Name='BOUNDHULLST', + Value=self.original('BOUNDHULLST', data.get('BOUNDHULLST', "0")) + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name='BOUNDHULLED', - Value=data.get('BOUNDHULLED', 0), - orig=self.original_attribute( - 'BOUNDHULLED'))) + TkSceneNodeAttributeData( + Name='BOUNDHULLED', + Value=self.original('BOUNDHULLED', data.get('BOUNDHULLED', "0")) + ) + ) elif self.CType == 'Box': self.Attributes.append( - TkSceneNodeAttributeData(Name="WIDTH", - Value=data['WIDTH'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='WIDTH', + Value=f'{data["WIDTH"]:.06f}', + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name="HEIGHT", - Value=data['HEIGHT'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='HEIGHT', + Value=f'{data["HEIGHT"]:.06f}', + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name="DEPTH", - Value=data['DEPTH'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='DEPTH', + Value=f'{data["DEPTH"]:.06f}', + ) + ) elif self.CType == 'Sphere': self.Attributes.append( - TkSceneNodeAttributeData(Name="RADIUS", - Value=data['RADIUS'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='RADIUS', + Value=f'{data["RADIUS"]:.06f}', + ) + ) elif self.CType in ('Capsule', 'Cylinder'): self.Attributes.append( - TkSceneNodeAttributeData(Name="RADIUS", - Value=data['RADIUS'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='RADIUS', + Value=f'{data["RADIUS"]:.06f}', + ) + ) self.Attributes.append( - TkSceneNodeAttributeData(Name="HEIGHT", - Value=data['HEIGHT'], - fmt='{0:.6f}')) + TkSceneNodeAttributeData( + Name='HEIGHT', + Value=f'{data["HEIGHT"]:.06f}', + ) + ) class Model(Object): @@ -534,9 +570,10 @@ def __init__(self, Name: str, **kwargs): def create_attributes(self, data: dict, ignore_original: bool = False): # Data will be just the information required for the Attributes. - self.Attributes = List( + self.Attributes = [ TkSceneNodeAttributeData(Name='GEOMETRY', - Value=data['GEOMETRY'])) + Value=data['GEOMETRY']) + ] # Add the LOD info for i, dist in enumerate(self.lod_distances): self.Attributes.append( diff --git a/NMS/classes/PointLight.py b/NMS/classes/PointLight.py index 88ea711..b2be8fe 100644 --- a/NMS/classes/PointLight.py +++ b/NMS/classes/PointLight.py @@ -1,8 +1,6 @@ # Custom TkSceneNodeData struct for PointLights -from .List import List -from .TkSceneNodeAttributeData import TkSceneNodeAttributeData -from .TkTransformData import TkTransformData +from serialization.NMS_Structures.Structures import TkSceneNodeAttributeData, TkTransformData class PointLight(): @@ -25,24 +23,19 @@ def __init__(self, **kwargs): self.Type = "LIGHT" def process_attributes(self): - self.Attributes = List() + self.Attributes = [] self.Attributes.append(TkSceneNodeAttributeData(Name="FOV", - Value=self.FOV, - fmt='{0:.6f}')) + Value=f"{self.FOV:.6f}")) self.Attributes.append(TkSceneNodeAttributeData(Name="FALLOFF", Value=self.Falloff)) self.Attributes.append(TkSceneNodeAttributeData(Name="INTENSITY", - Value=self.Intensity, - fmt='{0:.6f}')) + Value=f"{self.Intensity:.6f}")) self.Attributes.append(TkSceneNodeAttributeData(Name="COL_R", - Value=self.Colour[0], - fmt='{0:.6f}')) + Value=f"{self.Colour[0]:.6f}")) self.Attributes.append(TkSceneNodeAttributeData(Name="COL_G", - Value=self.Colour[1], - fmt='{0:.6f}')) + Value=f"{self.Colour[1]:.6f}")) self.Attributes.append(TkSceneNodeAttributeData(Name="COL_B", - Value=self.Colour[2], - fmt='{0:.6f}')) + Value=f"{self.Colour[2]:.6f}")) self.Attributes.append(TkSceneNodeAttributeData( Name="MATERIAL", Value="MATERIALS/LIGHT.MATERIAL.MBIN")) diff --git a/NMS/classes/Struct.py b/NMS/classes/Struct.py index b4ca0b5..3887f1e 100644 --- a/NMS/classes/Struct.py +++ b/NMS/classes/Struct.py @@ -151,7 +151,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): # assigns the value 'value' to self.data[key] # currently no checking so be careful! Incorrect use could lead to - # incorrect exml files!!! + # incorrect mxml files!!! self.data[key] = value def __str__(self): diff --git a/NMS/material_node.py b/NMS/material_node.py index 4f79331..3cbb93b 100644 --- a/NMS/material_node.py +++ b/NMS/material_node.py @@ -26,7 +26,7 @@ def create_material_node(mat_path: str, local_root_directory: str): # Add some material settings: if mat_data.CastShadow: - mat.shadow_method = 'OPAQUE' + mat.use_transparent_shadow = False nodes = mat.node_tree.nodes links = mat.node_tree.links diff --git a/NMSDK.py b/NMSDK.py index 2c0f06d..6fa83d0 100644 --- a/NMSDK.py +++ b/NMSDK.py @@ -173,8 +173,8 @@ class ExportSceneOperator(Operator): name="Don't convert files with MBINCompiler", description="Ticking this will mean MBINCompiler will not be run on " "the produced files. This will leave a mix of .mbin and " - ".exml files. This is generally not recommended unless " - "you are trying to quickly test something as the .exml " + ".mxml files. This is generally not recommended unless " + "you are trying to quickly test something as the .mxml " "files will be not formatted nicely.", default=False, ) @@ -714,8 +714,8 @@ class NMS_Export_Operator(Operator, ExportHelper): name="Don't convert files with MBINCompiler", description="Ticking this will mean MBINCompiler will not be run on " "the produced files. This will leave a mix of .mbin and " - ".exml files. This is generally not recommended unless " - "you are trying to quickly test something as the .exml " + ".mxml files. This is generally not recommended unless " + "you are trying to quickly test something as the .mxml " "files will be not formatted nicely.", default=False, ) @@ -833,9 +833,9 @@ class NMS_Import_Operator(Operator, ImportHelper): bl_label = "Import from SCENE file" # ImportHelper mixin class uses this - filename_ext = ".EXML" + filename_ext = ".MXML" filter_glob: StringProperty( - default="*.scene.exml;*.SCENE.EXML;*.scene.mbin;*.SCENE.MBIN", + default="*.scene.mxml;*.SCENE.MXML;*.scene.mbin;*.SCENE.MBIN", options={"HIDDEN"}) clear_scene: BoolProperty( diff --git a/README.md b/README.md index d98e697..b340b16 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Installing NMSDK is very easy. Head to the [NMSDK Release page](https://github.c Open Blender and open the user settings (Ctrl + Alt + U) (or `File` > `User Preferences...`), and select `Install Add-on from File...` (it is at the bottom left of the window). Select the `.zip` file you just downloaded and blender should install it without any errors. -To make exporting easier, NMSDK will automatically convert all produced `.exml` files to `.mbin` files. For this to work, [MBINCompiler](https://github.com/monkeyman192/MBINCompiler) is required. See below for instructions on downloading and installing the latest version. +To make exporting easier, NMSDK will automatically convert all produced `.mxml` files to `.mbin` files. For this to work, [MBINCompiler](https://github.com/monkeyman192/MBINCompiler) is required. See below for instructions on downloading and installing the latest version. ### Prerequisites diff --git a/Tools/proc_gen.py b/Tools/proc_gen.py index 364b94a..d55af48 100644 --- a/Tools/proc_gen.py +++ b/Tools/proc_gen.py @@ -105,7 +105,7 @@ def create_list(self): contains_scene = False scene_names = [] for file in self.dir_list: - if "SCENE" in file and "EXML" not in file.upper(): + if "SCENE" in file and "MXML" not in file.upper(): # in this case we have option 2. # add the name of the scene file to the list of files contains_scene = True @@ -118,7 +118,7 @@ def create_list(self): # if we make it to this line option 1. is what has happened. # Search through for file in subfolders: - if "SCENE" in file and "EXML" not in file.upper(): + if "SCENE" in file and "MXML" not in file.upper(): contains_scene = True path = self.get_scene_path(os.path.join(self.path_name, folder, file)) @@ -253,23 +253,23 @@ def write(self): if not os.path.exists(self.path): os.makedirs(self.path) self.TkModelDescriptorList.tree.write( - "{}.DESCRIPTOR.exml".format( + "{}.DESCRIPTOR.mxml".format( os.path.join(self.path, self.name.upper()))) self.TkGeometryData.tree.write( - "{}.GEOMETRY.exml".format( + "{}.GEOMETRY.mxml".format( os.path.join(self.path, self.name.upper()))) self.TkSceneNodeData.tree.write( - "{}.SCENE.exml".format( + "{}.SCENE.mxml".format( os.path.join(self.path, self.name.upper()))) def convert_to_mbin(self): # passes all the files produced by - print('Converting all .exml files to .mbin. Please wait while this ' + print('Converting all .mxml files to .mbin. Please wait while this ' 'finishes.') for directory, _, files in os.walk(self.path): for file in files: location = os.path.join(directory, file) - if os.path.splitext(location)[1] == '.exml': + if os.path.splitext(location)[1] == '.mxml': subprocess.call(["MBINCompiler.exe", location]) os.remove(location) diff --git a/Tools/utilities/bh_converter.py b/Tools/utilities/bh_converter.py index 2408474..dd2c908 100644 --- a/Tools/utilities/bh_converter.py +++ b/Tools/utilities/bh_converter.py @@ -22,7 +22,7 @@ def getSubValues(element, func=lambda x: x): if __name__ == '__main__': - with open("COLTEST.GEOMETRY.MBIN.EXML") as f: + with open("COLTEST.GEOMETRY.MBIN.MXML") as f: file = tree.parse(source=f) bhStarts = getSubValues(findTag(file, "BoundHullVertSt"), int) bhEnds = getSubValues(findTag(file, "BoundHullVertEd"), int) diff --git a/Tools/utilities/prettify.py b/Tools/utilities/prettify.py index 20bdb12..d7be98f 100644 --- a/Tools/utilities/prettify.py +++ b/Tools/utilities/prettify.py @@ -24,7 +24,7 @@ def createWidgets(self): button_frame.pack() def process(self): - path_name = filedialog.askopenfilename(title="Specify exml file") + path_name = filedialog.askopenfilename(title="Specify mxml file") prettyPrintXml(path_name) def quit(self): diff --git a/__init__.py b/__init__.py index 7f7487f..c2f72c9 100644 --- a/__init__.py +++ b/__init__.py @@ -1,7 +1,7 @@ bl_info = { "name": "No Man's Sky Development Kit", "author": "gregkwaste, monkeyman192", - "version": (0, 9, 26), + "version": (0, 9, 27), "blender": (4, 0, 0), "location": "File > Export/Import", "description": "Create NMS scene structures and export to NMS File format", diff --git a/docs/api.md b/docs/api.md index 3bb07d1..1de9941 100644 --- a/docs/api.md +++ b/docs/api.md @@ -70,7 +70,7 @@ Import a complete NMS scene into blender. **Parameters**: *path* : string -> The complete file path to a `SCENE.MBIN` or `SCENE.EXML` file to be loaded into blender. +> The complete file path to a `SCENE.MBIN` or `SCENE.MXML` file to be loaded into blender. **Notes**: The entire scene will be loaded into the active scene in blender. @@ -86,7 +86,7 @@ Import part of a scene specified by the id of a mesh node in the scene file. **Parameters**: *path* : string -> The complete file path to a `SCENE.MBIN` or `SCENE.EXML` file to be loaded into blender. +> The complete file path to a `SCENE.MBIN` or `SCENE.MXML` file to be loaded into blender. *mesh_id* : string > The `Name` of the `TkSceneNodeData` in the Scene being loaded. diff --git a/docs/setup.md b/docs/setup.md index f65b12d..c1d13ea 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -9,7 +9,7 @@ Removing the old version is particularly important if you had a very old version Open Blender and open the preferences window (`Edit` > `Preferences...`), and select `Install...` (it is at the top right of the window). Select the `.zip` file you just downloaded and blender should install it without any errors. -To make exporting easier, NMSDK will automatically convert all produced `.exml` files to `.mbin` files. For this to work, *MBINCompiler* is required. See below for instructions on downloading and installing the latest version. +To make exporting easier, NMSDK will automatically convert all produced `.mxml` files to `.mbin` files. For this to work, *MBINCompiler* is required. See below for instructions on downloading and installing the latest version. ### Prerequisites diff --git a/old_docs/Adding Model to Mod Guide.md b/old_docs/Adding Model to Mod Guide.md index 21e3dd6..a8eb9a5 100644 --- a/old_docs/Adding Model to Mod Guide.md +++ b/old_docs/Adding Model to Mod Guide.md @@ -20,4 +20,4 @@ These two lines dictate how regularly the object will get spawned. Higher values You can now copy the entire GcObjectSpawnData.xml Property into the other file in this folder in the same place. -Recompile both exml files back into an mbin and create your mod as usual. \ No newline at end of file +Recompile both mxml files back into an mbin and create your mod as usual. \ No newline at end of file diff --git a/serialization/NMS_Structures/NMS_types.py b/serialization/NMS_Structures/NMS_types.py index 223a33a..a62f96a 100644 --- a/serialization/NMS_Structures/NMS_types.py +++ b/serialization/NMS_Structures/NMS_types.py @@ -1,6 +1,7 @@ +from dataclasses import dataclass from io import BufferedWriter, BufferedReader import struct -from typing import Annotated, Type, TypeVar +from typing import Annotated, Type, TypeVar, Optional, Union import types from serialization.utils import bytes_to_quat @@ -31,6 +32,7 @@ def serialize(cls, buf: BufferedWriter, value: str): buf.write(struct.pack(" list[int]: @classmethod def serialize(cls, buf: BufferedWriter, value): ptr = buf.tell() - buf.write(struct.pack(" list[int]: # buf.write(struct.pack(" 1: + buf.write(struct.pack("<" + fmt, *value)) + else: + buf.write(struct.pack("<" + fmt * len(value), *value)) else: buf.write(struct.pack(fmt, value)) except struct.error: @@ -115,7 +120,7 @@ def write(self, buf: Optional[BufferedWriter] = None, _is_top: bool = True) -> B else: type_._write(buf, val) if _is_top: - for dv in self._ds: + for dv in self._deferred_structs: try: # Move to the end of the file every time buf.seek(0, 2) diff --git a/serialization/serializers.py b/serialization/serializers.py index 45e7f86..c5e34e6 100644 --- a/serialization/serializers.py +++ b/serialization/serializers.py @@ -1,9 +1,14 @@ +from array import array from typing import List from NMS.LOOKUPS import SERIALIZE_FMT_MAP, REV_SEMANTICS from serialization.formats import write_half, write_int_2_10_10_10_rev, ubytes_to_bytes +def serialize_geometry_stream(data): + pass + + def serialize_vertex_stream(requires: List[str], **kwargs): """ Return a serialized version of the vertex data @@ -34,7 +39,7 @@ def serialize_vertex_stream(requires: List[str], **kwargs): return b'' -def serialize_index_stream(indexes): +def serialize_index_stream(indexes: array) -> bytes: """ Return a serialized version of the index data """ diff --git a/tests/import_export_tests/import_export_asteroid.py b/tests/import_export_tests/import_export_asteroid.py index 352bdef..6c0a080 100644 --- a/tests/import_export_tests/import_export_asteroid.py +++ b/tests/import_export_tests/import_export_asteroid.py @@ -14,7 +14,7 @@ if nmsdk_dir not in sys.path: sys.path.append(nmsdk_dir) -from utils.utils import exml_to_dict # noqa pylint: E402 +from utils.utils import mxml_to_dict # noqa pylint: E402 from utils.io import convert_file # noqa pylint: E402 @@ -60,9 +60,9 @@ assert op.exists(out_path) # Now, let's have a look at the scene and check that it matches the # original one (other than where modified) - # First we need to convert it to an exml file... - new_exml_scene = convert_file(out_path) - new_scene = exml_to_dict(new_exml_scene) + # First we need to convert it to an mxml file... + new_mxml_scene = convert_file(out_path) + new_scene = mxml_to_dict(new_mxml_scene) # Let's check to see if the new LOD values are there assert float(new_scene['Attributes'][1]['Value']) == 750.25 assert float(new_scene['Attributes'][2]['Value']) == 1250 @@ -76,8 +76,8 @@ # And then remove it from the comparison del new_scene['Children'][0]['Transform']['ScaleY'] # Then convert the original scene file... - orig_exml_scene = convert_file(ASTEROID_PATH) - orig_scene = exml_to_dict(orig_exml_scene) + orig_mxml_scene = convert_file(ASTEROID_PATH) + orig_scene = mxml_to_dict(orig_mxml_scene) # Delete the modified values from this scene too. del orig_scene['Attributes'][1:4] del orig_scene['Children'][0]['Transform']['ScaleY'] @@ -88,8 +88,8 @@ orig_descriptor = op.join(TEST_DATA_PATH, ASTEROID_BASE_PATH, 'ASTEROIDXL.DESCRIPTOR.MBIN') - orig_descriptor = exml_to_dict(convert_file(orig_descriptor)) + orig_descriptor = mxml_to_dict(convert_file(orig_descriptor)) new_descriptor = op.join(tempdir, ASTEROID_BASE_PATH, 'ASTEROIDXL.DESCRIPTOR.MBIN') - new_descriptor = exml_to_dict(convert_file(new_descriptor)) + new_descriptor = mxml_to_dict(convert_file(new_descriptor)) assert orig_descriptor == new_descriptor diff --git a/tests/import_export_tests/import_export_crystal.py b/tests/import_export_tests/import_export_crystal.py index dcefc1c..268cfe5 100644 --- a/tests/import_export_tests/import_export_crystal.py +++ b/tests/import_export_tests/import_export_crystal.py @@ -12,7 +12,7 @@ if nmsdk_dir not in sys.path: sys.path.append(nmsdk_dir) -from utils.utils import exml_to_dict # noqa pylint: E402 +from utils.utils import mxml_to_dict # noqa pylint: E402 from utils.io import convert_file # noqa pylint: E402 # The NMS game data to test the importing with is in the main test folder. @@ -64,11 +64,11 @@ assert op.exists(out_path) # Now, let's have a look at the scene and check that it matches the # original one... - # First we need to convert it to an exml file... - new_exml_scene = convert_file(out_path) - new_scene = exml_to_dict(new_exml_scene) + # First we need to convert it to an mxml file... + new_mxml_scene = convert_file(out_path) + new_scene = mxml_to_dict(new_mxml_scene) # Then convert the original scene file... - orig_exml_scene = convert_file(CRYSTAL_PATH) - orig_scene = exml_to_dict(orig_exml_scene) + orig_mxml_scene = convert_file(CRYSTAL_PATH) + orig_scene = mxml_to_dict(orig_mxml_scene) # Now let's compare it to the original assert new_scene == orig_scene diff --git a/utils/io.py b/utils/io.py index 57ed657..9729e95 100644 --- a/utils/io.py +++ b/utils/io.py @@ -48,8 +48,8 @@ def base_path(abs_path: str, rel_path: str): For example, if abs_path is a/b/c/d, and rel_path is c/d, then this will return a/b """ - a_parts = list(Path(abs_path).parts) - r_parts = Path(rel_path).parts + a_parts = list(Path(abs_path.lower()).parts) + r_parts = Path(rel_path.lower()).parts for part in r_parts[::-1]: if a_parts[-1] == part: a_parts.pop(-1) @@ -79,10 +79,10 @@ def realize_path(fpath: str, local_root_directory: str) -> str: def convert_file(fpath: str) -> str: - """ Convert an mbin or exml file to an exml or mbin file and return the + """ Convert an mbin or mxml file to an mxml or mbin file and return the path of the produced file. """ - exts = {'.MBIN': '.EXML', '.EXML': '.MBIN'} + exts = {'.MBIN': '.MXML', '.MXML': '.MBIN'} mbincompiler_path = bpy.context.scene.nmsdk_default_settings.MBINCompiler_path # noqa retcode = subprocess.call( [mbincompiler_path, "-y", "-f", "-Q", fpath]) diff --git a/utils/utils.py b/utils/utils.py index d2448eb..092de51 100644 --- a/utils/utils.py +++ b/utils/utils.py @@ -1,7 +1,7 @@ import xml.etree.ElementTree as ET -def exml_to_dict(fpath: str) -> dict: +def mxml_to_dict(fpath: str) -> dict: tree = ET.parse(fpath) root = tree.getroot() return element_to_dict(root)