Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Exporter update #107

Merged
merged 6 commits into from
Feb 4, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ textures/*
# vscode files
.vscode/

# We only want to store .mbin files, any exml files generated during testing should be ignored...
*.EXML
# We only want to store .mbin files, any mxml files generated during testing should be ignored...
*.MXML

# pytest cache files
.pytest_cache/
Expand Down
12 changes: 6 additions & 6 deletions ModelExporter/Descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,10 @@ def __str__(self):
def __len__(self):
return len(self.children)

def to_exml(self):
def to_mxml(self):
main_data = List()
for child in self.children:
main_data.append(child.to_exml())
main_data.append(child.to_mxml())
return TkModelDescriptorList(List=main_data)


Expand All @@ -62,11 +62,11 @@ def __str__(self):
out += str(child)
return out

def to_exml(self):
def to_mxml(self):
# converts this to a TkResourceDescriptorList object
descriptors = List()
for child in self.children:
descriptors.append(child.to_exml())
descriptors.append(child.to_mxml())
return TkResourceDescriptorList(TypeId=self.TypeId,
Descriptors=descriptors)

Expand Down Expand Up @@ -96,7 +96,7 @@ def __str__(self):
out += str(child)
return out

def to_exml(self):
def to_mxml(self):
# first, we need to do some processing on the name.
# if the name starts with the prefix, then we need to sort it out so
# that it is correct
Expand Down Expand Up @@ -132,7 +132,7 @@ def not_proc(ob):
refs = List()
children = List()
for child in self.children:
children.append(child.to_exml())
children.append(child.to_mxml())
# Check how many children there are so we aren't making empty
# TkModelDescriptorList's for nothing.
if len(children) == 0:
Expand Down
81 changes: 62 additions & 19 deletions ModelExporter/addon_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,45 @@
from idprop.types import IDPropertyGroup
from mathutils import Matrix, Vector
# Internal imports
from ModelExporter.utils import get_surr, calc_tangents
from ModelExporter.utils import calc_tangents
from utils.misc import CompareMatrices, get_obj_name
from utils.image_convert import convert_image
from ModelExporter.animations import process_anims
from ModelExporter.export import Export
from ModelExporter.Descriptor import Descriptor
from NMS.classes import (TkMaterialData, TkMaterialFlags,
TkVolumeTriggerType, TkMaterialSampler,
TkTransformData, TkMaterialUniform,
TkRotationComponentData, TkPhysicsComponentData)
TkMaterialUniform, TkRotationComponentData, TkPhysicsComponentData)
from NMS.classes import TkAnimationComponentData, TkAnimationData
from NMS.classes import List, Vector4f
from NMS.classes import TkAttachmentData
from NMS.classes import (Model, Mesh, Locator, Reference, Collision, Light,
Joint)
from NMS.classes.Object import Object, Model, Mesh, Locator, Reference, Collision, Light, Joint
from NMS.LOOKUPS import MATERIALFLAGS
from ModelExporter.ActionTriggerParser import ParseNodes
from serialization.NMS_Structures.Structures import TkTransformData

import numpy as np


ROT_X_MAT = Matrix.Rotation(radians(-90), 4, 'X')


def triangulate_mesh_new(mesh):
""" Triangule the provided mesh.

Note
----
This will modify the original mesh. To avoid this you should ALWAYS pass in
a temporary mesh object and do manipulations on this.
"""
bm = bmesh.new()
bm.from_mesh(mesh)
bmesh.ops.triangulate(bm, faces=bm.faces, ngon_method='EAR_CLIP')
bm.to_mesh(mesh)
bm.free()
del bm


def triangulate_mesh(mesh):
""" Triangule the provided mesh.

Expand All @@ -44,7 +61,6 @@ def triangulate_mesh(mesh):
bm = bmesh.new()
bm.from_mesh(mesh)
data = bmesh.ops.triangulate(bm, faces=bm.faces, ngon_method='EAR_CLIP')
faces = data['faces']
face_mapping = data['face_map']
# This face mapping should be able to be used to map the new triangles back
# to the original polygons so that we can group them. When grouping we need
Expand Down Expand Up @@ -236,7 +252,7 @@ def __init__(self, output_directory, export_directory='CUSTOMMODELS',
entitydata = ParseNodes()
entity = TkAttachmentData(Components=List(entitydata))
entity.make_elements(main=True)
entity.tree.write('{}.ENTITY.exml'.format(
entity.tree.write('{}.ENTITY.mxml'.format(
op.join(self.output_directory, self.export_dir, group_name,
scene_name)))
self.state = {'FINISHED'}
Expand Down Expand Up @@ -555,6 +571,32 @@ def mesh_parser(self, ob, is_coll_mesh: bool = False):
if uvcount < 1:
raise Exception(f"Object {ob.name} missing UV map")

# is_triangulated = all([len(poly.vertices) == 3 for poly in data.polygons])
# if not is_triangulated:
# triangulate_mesh_new(data)

# _num_verts = len(data.vertices)
_num_indexes = len(data.loops)

# np_verts = np.empty((3 * _num_verts, 1))
# data.vertices.foreach_get("co", np_verts)
np_indexes = np.empty((_num_indexes, ), dtype=np.uint32)
data.loops.foreach_get("vertex_index", np_indexes)
# if not is_coll_mesh:
# np_uvs = np.empty((2 * _num_indexes, 1))
# data.uv_layers.active.data.foreach_get("uv", np_uvs)
# np_uvs = np_uvs.reshape((_num_indexes, 2))
# np_uvs[..., 1] = 1 - np_uvs[..., 1]
# np_uvs = np.hstack((np_uvs, np.zeros((_num_indexes, 1)), np.ones((_num_indexes, 1))))

# np_verts.reshape((_num_verts, 3))
# np_verts = np.hstack((np_verts, np.ones((_num_verts, 1))))

# print(np_verts)
# print(np_indexes)
# if not is_coll_mesh:
# print(np_uvs)

# Lists
_num_verts = len(data.vertices)
indexes = []
Expand Down Expand Up @@ -753,7 +795,7 @@ def mesh_parser(self, ob, is_coll_mesh: bool = False):
print(f'Exported collisions with {len(verts)} verts, '
f'{len(indexes)} indexes')

return verts, normals, tangents, uvs, indexes, chverts, colours
return verts, normals, tangents, uvs, indexes, chverts, colours, np_indexes

def recurce_entity(self, parent, obj, list_element=None, index=0):
# this will return the class object of the property recursively
Expand Down Expand Up @@ -792,7 +834,7 @@ def recurce_entity(self, parent, obj, list_element=None, index=0):
properties[prop] = self.recurce_entity(prop_group, prop)
return cls(**properties)

def parse_object(self, ob, parent):
def parse_object(self, ob, parent: Object):
newob = None
# Some objects (eg. imported bounded hulls) shouldn't be exported.
# If the object has this custom property then ignore it.
Expand Down Expand Up @@ -823,7 +865,6 @@ def parse_object(self, ob, parent):

# let's first sort out any entity data that is specified:
if ob.NMSMesh_props.has_entity or ob.NMSLocator_props.has_entity:
print('this has an entity:', ob)
# we need to pull information from two places:
# ob.NMSEntity_props
# check to see if the mesh's entity will get the action trigger
Expand Down Expand Up @@ -881,8 +922,10 @@ def parse_object(self, ob, parent):
# The object will have its name in the scene so that any data
# required can be linked up. This name will be overwritten by the
# exporter to be the path name of the scene.
optdict = {'Name': get_obj_name(ob, None),
'orig_node_data': orig_node_data}
optdict = {
'Name': get_obj_name(ob, None),
'orig_node_data': orig_node_data,
}

# Let's do a check on the values of the scale and the dimensions.
# We can have it so that the user can apply scale, even if by
Expand Down Expand Up @@ -920,7 +963,7 @@ def parse_object(self, ob, parent):
# We'll give them some "fake" vertex data which consists of
# no actual vertex data, but an index that doesn't point to
# anything.
verts, norms, tangs, luvs, indexes, chverts, _ = self.mesh_parser(ob, True) # noqa
verts, norms, tangs, luvs, indexes, chverts, _, np_indexes = self.mesh_parser(ob, True)

# Reset Transforms on meshes

Expand All @@ -929,6 +972,7 @@ def parse_object(self, ob, parent):
optdict['Normals'] = norms
optdict['Tangents'] = tangs
optdict['CHVerts'] = chverts
optdict['np_indexes'] = np_indexes
# Handle Primitives
elif colType == "Box":
optdict['Width'] = dims[0] / factor[0]
Expand All @@ -954,7 +998,7 @@ def parse_object(self, ob, parent):
elif ob.NMSNode_props.node_types == 'Mesh':
# ACTUAL MESH
# Parse object Geometry
verts, norms, tangs, luvs, indexes, chverts, colours = self.mesh_parser(ob) # noqa
verts, norms, tangs, luvs, indexes, chverts, colours, np_indexes = self.mesh_parser(ob)

# check whether the mesh has any child nodes we care about (such as
# a rotation vector)
Expand All @@ -971,8 +1015,7 @@ def parse_object(self, ob, parent):
entitydata.append(rotation_data)

# Create Mesh Object
actualname = get_obj_name(ob, None)
newob = Mesh(Name=actualname,
newob = Mesh(Name=get_obj_name(ob, None),
Transform=transform,
Vertices=verts,
UVs=luvs,
Expand All @@ -983,16 +1026,16 @@ def parse_object(self, ob, parent):
Colours=colours,
ExtraEntityData=entitydata,
HasAttachment=ob.NMSMesh_props.has_entity,
orig_node_data=orig_node_data)
orig_node_data=orig_node_data,
np_indexes=np_indexes)

# Check to see if the mesh's entity will be animation controller,
# if so assign to the anim_controller_obj variable.
if (ob.NMSEntity_props.is_anim_controller and
ob.NMSMesh_props.has_entity):
# tuple, first entry is the name of the entity, the second is
# the actual mesh object...
self.anim_controller_obj = (ob.NMSEntity_props.name_or_path,
newob)
self.anim_controller_obj = (ob.NMSEntity_props.name_or_path, newob)

# Try to parse material
if ob.NMSMesh_props.material_path != "":
Expand Down
Loading