Added scale factor to PSK export

Also a load of cleanup I don't have time to catalog
This commit is contained in:
Colin Basnett
2024-11-26 04:52:28 -08:00
parent e791859217
commit 42a859e24b
8 changed files with 188 additions and 144 deletions

View File

@@ -8,7 +8,8 @@ from bpy.types import Context, Armature, Action, Object, AnimData, TimelineMarke
from bpy_extras.io_utils import ExportHelper from bpy_extras.io_utils import ExportHelper
from bpy_types import Operator from bpy_types import Operator
from .properties import PSA_PG_export, PSA_PG_export_action_list_item, filter_sequences from .properties import PSA_PG_export, PSA_PG_export_action_list_item, filter_sequences, \
get_sequences_from_name_and_frame_range
from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions
from ..writer import write_psa from ..writer import write_psa
from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range from ...shared.helpers import populate_bone_collection_list, get_nla_strips_in_frame_range
@@ -144,7 +145,7 @@ def get_timeline_marker_sequence_frame_ranges(animation_data: AnimData, context:
if next_marker_index < len(sorted_timeline_markers): if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence. # There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_end = sorted_timeline_markers[next_marker_index].frame frame_end = sorted_timeline_markers[next_marker_index].frame
nla_strips = get_nla_strips_in_frame_range(animation_data, marker.frame, frame_end) nla_strips = list(get_nla_strips_in_frame_range(animation_data, marker.frame, frame_end))
if len(nla_strips) > 0: if len(nla_strips) > 0:
frame_end = min(frame_end, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips))) frame_end = min(frame_end, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_start = max(frame_start, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips))) frame_start = max(frame_start, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
@@ -168,20 +169,6 @@ def get_timeline_marker_sequence_frame_ranges(animation_data: AnimData, context:
return sequence_frame_ranges return sequence_frame_ranges
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int) -> List[Tuple[str, int, int]]:
reversed_pattern = r'(.+)/(.+)'
reversed_match = re.match(reversed_pattern, name)
if reversed_match:
forward_name = reversed_match.group(1)
backwards_name = reversed_match.group(2)
return [
(forward_name, frame_start, frame_end),
(backwards_name, frame_end, frame_start)
]
else:
return [(name, frame_start, frame_end)]
def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]: def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]:
frame_start = int(action.frame_range[0]) frame_start = int(action.frame_range[0])
frame_end = int(action.frame_range[1]) frame_end = int(action.frame_range[1])
@@ -266,16 +253,22 @@ class PSA_OT_export(Operator, ExportHelper):
row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT') row.operator(PSA_OT_export_actions_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT') row.operator(PSA_OT_export_actions_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
# ACTIONS from .ui import PSA_UL_export_sequences
if pg.sequence_source == 'ACTIONS':
rows = max(3, min(len(pg.action_list), 10)) def get_sequences_propnames_from_source(sequence_source: str) -> Tuple[str, str]:
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'action_list', pg, 'action_list_index', rows=rows) match sequence_source:
elif pg.sequence_source == 'TIMELINE_MARKERS': case 'ACTIONS':
rows = max(3, min(len(pg.marker_list), 10)) return 'action_list', 'action_list_index'
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'marker_list', pg, 'marker_list_index', rows=rows) case 'TIMELINE_MARKERS':
elif pg.sequence_source == 'NLA_TRACK_STRIPS': return 'marker_list', 'marker_list_index'
rows = max(3, min(len(pg.nla_strip_list), 10)) case 'NLA_TRACK_STRIPS':
sequences_panel.template_list('PSA_UL_export_sequences', '', pg, 'nla_strip_list', pg, 'nla_strip_list_index', rows=rows) return 'nla_strip_list', 'nla_strip_list_index'
case _:
raise ValueError(f'Unhandled sequence source: {sequence_source}')
propname, active_propname = get_sequences_propnames_from_source(pg.sequence_source)
sequences_panel.template_list(PSA_UL_export_sequences.bl_idname, '', pg, propname, pg, active_propname,
rows=max(3, min(len(getattr(pg, propname), 10))))
flow = sequences_panel.grid_flow() flow = sequences_panel.grid_flow()
flow.use_property_split = True flow.use_property_split = True
@@ -379,7 +372,8 @@ class PSA_OT_export(Operator, ExportHelper):
export_sequences: List[PsaBuildSequence] = [] export_sequences: List[PsaBuildSequence] = []
if pg.sequence_source == 'ACTIONS': match pg.sequence_source:
case 'ACTIONS':
for action_item in filter(lambda x: x.is_selected, pg.action_list): for action_item in filter(lambda x: x.is_selected, pg.action_list):
if len(action_item.action.fcurves) == 0: if len(action_item.action.fcurves) == 0:
continue continue
@@ -392,7 +386,7 @@ class PSA_OT_export(Operator, ExportHelper):
export_sequence.compression_ratio = action_item.action.psa_export.compression_ratio export_sequence.compression_ratio = action_item.action.psa_export.compression_ratio
export_sequence.key_quota = action_item.action.psa_export.key_quota export_sequence.key_quota = action_item.action.psa_export.key_quota
export_sequences.append(export_sequence) export_sequences.append(export_sequence)
elif pg.sequence_source == 'TIMELINE_MARKERS': case 'TIMELINE_MARKERS':
for marker_item in filter(lambda x: x.is_selected, pg.marker_list): for marker_item in filter(lambda x: x.is_selected, pg.marker_list):
export_sequence = PsaBuildSequence() export_sequence = PsaBuildSequence()
export_sequence.name = marker_item.name export_sequence.name = marker_item.name
@@ -403,7 +397,7 @@ class PSA_OT_export(Operator, ExportHelper):
map(lambda x: x.action, get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end))) map(lambda x: x.action, get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end)))
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions) export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
export_sequences.append(export_sequence) export_sequences.append(export_sequence)
elif pg.sequence_source == 'NLA_TRACK_STRIPS': case 'NLA_TRACK_STRIPS':
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list): for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
export_sequence = PsaBuildSequence() export_sequence = PsaBuildSequence()
export_sequence.name = nla_strip_item.name export_sequence.name = nla_strip_item.name
@@ -414,7 +408,7 @@ class PSA_OT_export(Operator, ExportHelper):
export_sequence.compression_ratio = nla_strip_item.action.psa_export.compression_ratio export_sequence.compression_ratio = nla_strip_item.action.psa_export.compression_ratio
export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota export_sequence.key_quota = nla_strip_item.action.psa_export.key_quota
export_sequences.append(export_sequence) export_sequences.append(export_sequence)
else: case _:
raise ValueError(f'Unhandled sequence source: {pg.sequence_source}') raise ValueError(f'Unhandled sequence source: {pg.sequence_source}')
options = PsaBuildOptions() options = PsaBuildOptions()
@@ -448,12 +442,14 @@ class PSA_OT_export_actions_select_all(Operator):
@classmethod @classmethod
def get_item_list(cls, context): def get_item_list(cls, context):
pg = context.scene.psa_export pg = context.scene.psa_export
if pg.sequence_source == 'ACTIONS': match pg.sequence_source:
case 'ACTIONS':
return pg.action_list return pg.action_list
elif pg.sequence_source == 'TIMELINE_MARKERS': case 'TIMELINE_MARKERS':
return pg.marker_list return pg.marker_list
elif pg.sequence_source == 'NLA_TRACK_STRIPS': case 'NLA_TRACK_STRIPS':
return pg.nla_strip_list return pg.nla_strip_list
case _:
return None return None
@classmethod @classmethod
@@ -481,12 +477,14 @@ class PSA_OT_export_actions_deselect_all(Operator):
@classmethod @classmethod
def get_item_list(cls, context): def get_item_list(cls, context):
pg = context.scene.psa_export pg = context.scene.psa_export
if pg.sequence_source == 'ACTIONS': match pg.sequence_source:
case 'ACTIONS':
return pg.action_list return pg.action_list
elif pg.sequence_source == 'TIMELINE_MARKERS': case 'TIMELINE_MARKERS':
return pg.marker_list return pg.marker_list
elif pg.sequence_source == 'NLA_TRACK_STRIPS': case 'NLA_TRACK_STRIPS':
return pg.nla_strip_list return pg.nla_strip_list
case _:
return None return None
@classmethod @classmethod

View File

@@ -1,7 +1,7 @@
import re import re
import sys import sys
from fnmatch import fnmatch from fnmatch import fnmatch
from typing import List, Optional from typing import List, Optional, Tuple
from bpy.props import BoolProperty, PointerProperty, EnumProperty, FloatProperty, CollectionProperty, IntProperty, \ from bpy.props import BoolProperty, PointerProperty, EnumProperty, FloatProperty, CollectionProperty, IntProperty, \
StringProperty StringProperty
@@ -42,6 +42,20 @@ class PSA_PG_export_nla_strip_list_item(PropertyGroup):
is_selected: BoolProperty(default=True) is_selected: BoolProperty(default=True)
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int) -> List[Tuple[str, int, int]]:
reversed_pattern = r'(.+)/(.+)'
reversed_match = re.match(reversed_pattern, name)
if reversed_match:
forward_name = reversed_match.group(1)
backwards_name = reversed_match.group(2)
return [
(forward_name, frame_start, frame_end),
(backwards_name, frame_end, frame_start)
]
else:
return [(name, frame_start, frame_end)]
def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None: def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None:
self.nla_strip_list.clear() self.nla_strip_list.clear()
match = re.match(r'^(\d+).+$', self.nla_track) match = re.match(r'^(\d+).+$', self.nla_track)
@@ -52,11 +66,12 @@ def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None:
return return
nla_track = animation_data.nla_tracks[self.nla_track_index] nla_track = animation_data.nla_tracks[self.nla_track_index]
for nla_strip in nla_track.strips: for nla_strip in nla_track.strips:
for sequence_name, frame_start, frame_end in get_sequences_from_name_and_frame_range(nla_strip.name, nla_strip.frame_start, nla_strip.frame_end):
strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add() strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add()
strip.action = nla_strip.action strip.action = nla_strip.action
strip.name = nla_strip.name strip.name = sequence_name
strip.frame_start = nla_strip.frame_start strip.frame_start = frame_start
strip.frame_end = nla_strip.frame_end strip.frame_end = frame_end
def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]: def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]:
@@ -69,8 +84,7 @@ def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimDa
def nla_track_search_cb(self, context: Context, edit_text: str): def nla_track_search_cb(self, context: Context, edit_text: str):
pg = getattr(context.scene, 'psa_export') pg = getattr(context.scene, 'psa_export')
animation_data = get_animation_data(pg, context) animation_data = get_animation_data(pg, context)
if animation_data is None: if animation_data is not None:
return
for index, nla_track in enumerate(animation_data.nla_tracks): for index, nla_track in enumerate(animation_data.nla_tracks):
yield f'{index} - {nla_track.name}' yield f'{index} - {nla_track.name}'

View File

@@ -1,4 +1,4 @@
from typing import cast import typing
from bpy.types import UIList from bpy.types import UIList
@@ -6,6 +6,7 @@ from .properties import PSA_PG_export_action_list_item, filter_sequences
class PSA_UL_export_sequences(UIList): class PSA_UL_export_sequences(UIList):
bl_idname = 'PSA_UL_export_sequences'
def __init__(self): def __init__(self):
super(PSA_UL_export_sequences, self).__init__() super(PSA_UL_export_sequences, self).__init__()
@@ -13,7 +14,7 @@ class PSA_UL_export_sequences(UIList):
self.use_filter_show = True self.use_filter_show = True
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
item = cast(PSA_PG_export_action_list_item, item) item = typing.cast(PSA_PG_export_action_list_item, item)
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
layout.prop(item, 'is_selected', icon_only=True, text=item.name) layout.prop(item, 'is_selected', icon_only=True, text=item.name)
if hasattr(item, 'action') and item.action is not None and item.action.asset_data is not None: if hasattr(item, 'action') and item.action is not None and item.action.asset_data is not None:

View File

@@ -1,9 +1,10 @@
import typing
from typing import Optional from typing import Optional
import bmesh import bmesh
import bpy
import numpy as np import numpy as np
from bpy.types import Armature, Material, Collection, Context from bpy.types import Material, Collection, Context
from mathutils import Matrix
from .data import * from .data import *
from .properties import triangle_type_and_bit_flags_to_poly_flags from .properties import triangle_type_and_bit_flags_to_poly_flags
@@ -23,6 +24,7 @@ class PskBuildOptions(object):
self.object_eval_state = 'EVALUATED' self.object_eval_state = 'EVALUATED'
self.materials: List[Material] = [] self.materials: List[Material] = []
self.should_enforce_bone_name_restrictions = False self.should_enforce_bone_name_restrictions = False
self.scale = 1.0
def get_mesh_objects_for_collection(collection: Collection, should_exclude_hidden_meshes: bool = True): def get_mesh_objects_for_collection(collection: Collection, should_exclude_hidden_meshes: bool = True):
@@ -40,7 +42,7 @@ def get_mesh_objects_for_context(context: Context):
yield obj yield obj
def get_armature_for_mesh_objects(mesh_objects: List[Object]) -> Optional[Object]: def get_armature_for_mesh_objects(mesh_objects: Iterable[Object]) -> Optional[Object]:
# Ensure that there are either no armature modifiers (static mesh) or that there is exactly one armature modifier # Ensure that there are either no armature modifiers (static mesh) or that there is exactly one armature modifier
# object shared between all meshes. # object shared between all meshes.
armature_modifier_objects = set() armature_modifier_objects = set()
@@ -100,6 +102,8 @@ def build_psk(context, input_objects: PskInputObjects, options: PskBuildOptions)
psk = Psk() psk = Psk()
bones = [] bones = []
scale_matrix = Matrix.Scale(options.scale, 4)
if armature_object is None or len(armature_object.data.bones) == 0: if armature_object is None or len(armature_object.data.bones) == 0:
# If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the # If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the
# requirement that a PSK file must have at least one bone. # requirement that a PSK file must have at least one bone.
@@ -151,6 +155,8 @@ def build_psk(context, input_objects: PskInputObjects, options: PskBuildOptions)
rotation = bone_rotation @ local_rotation rotation = bone_rotation @ local_rotation
rotation.conjugate() rotation.conjugate()
location = scale_matrix @ location
psk_bone.location.x = location.x psk_bone.location.x = location.x
psk_bone.location.y = location.y psk_bone.location.y = location.y
psk_bone.location.z = location.z psk_bone.location.z = location.z
@@ -230,11 +236,12 @@ def build_psk(context, input_objects: PskInputObjects, options: PskBuildOptions)
armature_object.data.pose_position = old_pose_position armature_object.data.pose_position = old_pose_position
vertex_offset = len(psk.points) vertex_offset = len(psk.points)
matrix_world = scale_matrix @ mesh_object.matrix_world
# VERTICES # VERTICES
for vertex in mesh_data.vertices: for vertex in mesh_data.vertices:
point = Vector3() point = Vector3()
v = mesh_object.matrix_world @ vertex.co v = matrix_world @ vertex.co
point.x = v.x point.x = v.x
point.y = v.y point.y = v.y
point.z = v.z point.z = v.z

View File

@@ -1,7 +1,7 @@
from typing import List from typing import List
import bpy import bpy
from bpy.props import StringProperty, BoolProperty, EnumProperty from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty
from bpy.types import Operator, Context, Object from bpy.types import Operator, Context, Object
from bpy_extras.io_utils import ExportHelper from bpy_extras.io_utils import ExportHelper
@@ -32,6 +32,7 @@ def get_materials_for_mesh_objects(mesh_objects: List[Object]):
materials.append(material) materials.append(material)
return materials return materials
def populate_material_list(mesh_objects, material_list): def populate_material_list(mesh_objects, material_list):
materials = get_materials_for_mesh_objects(mesh_objects) materials = get_materials_for_mesh_objects(mesh_objects)
material_list.clear() material_list.clear()
@@ -107,6 +108,13 @@ class PSK_OT_export_collection(Operator, ExportHelper):
name='Visible Only', name='Visible Only',
description='Export only visible meshes' description='Export only visible meshes'
) )
scale: FloatProperty(
name='Scale',
default=1.0,
description='Scale factor to apply to the exported mesh and armature',
min=0.0001,
soft_max=100.0
)
def execute(self, context): def execute(self, context):
collection = bpy.data.collections.get(self.collection) collection = bpy.data.collections.get(self.collection)
@@ -122,6 +130,7 @@ class PSK_OT_export_collection(Operator, ExportHelper):
options.object_eval_state = self.object_eval_state options.object_eval_state = self.object_eval_state
options.materials = get_materials_for_mesh_objects(input_objects.mesh_objects) options.materials = get_materials_for_mesh_objects(input_objects.mesh_objects)
options.should_enforce_bone_name_restrictions = self.should_enforce_bone_name_restrictions options.should_enforce_bone_name_restrictions = self.should_enforce_bone_name_restrictions
options.scale = self.scale
try: try:
result = build_psk(context, input_objects, options) result = build_psk(context, input_objects, options)
@@ -141,6 +150,12 @@ class PSK_OT_export_collection(Operator, ExportHelper):
def draw(self, context: Context): def draw(self, context: Context):
layout = self.layout layout = self.layout
flow = layout.grid_flow(row_major=True)
flow.use_property_split = True
flow.use_property_decorate = False
flow.prop(self, 'scale')
# MESH # MESH
mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False) mesh_header, mesh_panel = layout.panel('Mesh', default_closed=False)
mesh_header.label(text='Mesh', icon='MESH_DATA') mesh_header.label(text='Mesh', icon='MESH_DATA')
@@ -260,6 +275,7 @@ class PSK_OT_export(Operator, ExportHelper):
options.object_eval_state = pg.object_eval_state options.object_eval_state = pg.object_eval_state
options.materials = [m.material for m in pg.material_list] options.materials = [m.material for m in pg.material_list]
options.should_enforce_bone_name_restrictions = pg.should_enforce_bone_name_restrictions options.should_enforce_bone_name_restrictions = pg.should_enforce_bone_name_restrictions
options.scale = pg.scale
try: try:
result = build_psk(context, input_objects, options) result = build_psk(context, input_objects, options)

View File

@@ -1,4 +1,4 @@
from bpy.props import EnumProperty, CollectionProperty, IntProperty, BoolProperty, PointerProperty from bpy.props import EnumProperty, CollectionProperty, IntProperty, BoolProperty, PointerProperty, FloatProperty
from bpy.types import PropertyGroup, Material from bpy.types import PropertyGroup, Material
from ...shared.types import PSX_PG_bone_collection_list_item from ...shared.types import PSX_PG_bone_collection_list_item
@@ -42,6 +42,13 @@ class PSK_PG_export(PropertyGroup):
description='Enforce that bone names must only contain letters, numbers, spaces, hyphens and underscores.\n\n' description='Enforce that bone names must only contain letters, numbers, spaces, hyphens and underscores.\n\n'
'Depending on the engine, improper bone names might not be referenced correctly by scripts' 'Depending on the engine, improper bone names might not be referenced correctly by scripts'
) )
scale: FloatProperty(
name='Scale',
default=1.0,
description='Scale factor to apply to the exported mesh',
min=0.0001,
soft_max=100.0
)
classes = ( classes = (

View File

@@ -36,37 +36,39 @@ def read_psk(path: str) -> Psk:
while fp.read(1): while fp.read(1):
fp.seek(-1, 1) fp.seek(-1, 1)
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section))) section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
if section.name == b'ACTRHEAD': match section.name:
case b'ACTRHEAD':
pass pass
elif section.name == b'PNTS0000': case b'PNTS0000':
_read_types(fp, Vector3, section, psk.points) _read_types(fp, Vector3, section, psk.points)
elif section.name == b'VTXW0000': case b'VTXW0000':
if section.data_size == ctypes.sizeof(Psk.Wedge16): if section.data_size == ctypes.sizeof(Psk.Wedge16):
_read_types(fp, Psk.Wedge16, section, psk.wedges) _read_types(fp, Psk.Wedge16, section, psk.wedges)
elif section.data_size == ctypes.sizeof(Psk.Wedge32): elif section.data_size == ctypes.sizeof(Psk.Wedge32):
_read_types(fp, Psk.Wedge32, section, psk.wedges) _read_types(fp, Psk.Wedge32, section, psk.wedges)
else: else:
raise RuntimeError('Unrecognized wedge format') raise RuntimeError('Unrecognized wedge format')
elif section.name == b'FACE0000': case b'FACE0000':
_read_types(fp, Psk.Face, section, psk.faces) _read_types(fp, Psk.Face, section, psk.faces)
elif section.name == b'MATT0000': case b'MATT0000':
_read_types(fp, Psk.Material, section, psk.materials) _read_types(fp, Psk.Material, section, psk.materials)
elif section.name == b'REFSKELT': case b'REFSKELT':
_read_types(fp, Psk.Bone, section, psk.bones) _read_types(fp, Psk.Bone, section, psk.bones)
elif section.name == b'RAWWEIGHTS': case b'RAWWEIGHTS':
_read_types(fp, Psk.Weight, section, psk.weights) _read_types(fp, Psk.Weight, section, psk.weights)
elif section.name == b'FACE3200': case b'FACE3200':
_read_types(fp, Psk.Face32, section, psk.faces) _read_types(fp, Psk.Face32, section, psk.faces)
elif section.name == b'VERTEXCOLOR': case b'VERTEXCOLOR':
_read_types(fp, Color, section, psk.vertex_colors) _read_types(fp, Color, section, psk.vertex_colors)
elif section.name.startswith(b'EXTRAUVS'): case b'VTXNORMS':
_read_types(fp, Vector2, section, psk.extra_uvs)
elif section.name == b'VTXNORMS':
_read_types(fp, Vector3, section, psk.vertex_normals) _read_types(fp, Vector3, section, psk.vertex_normals)
elif section.name == b'MRPHINFO': case b'MRPHINFO':
_read_types(fp, Psk.MorphInfo, section, psk.morph_infos) _read_types(fp, Psk.MorphInfo, section, psk.morph_infos)
elif section.name == b'MRPHDATA': case b'MRPHDATA':
_read_types(fp, Psk.MorphData, section, psk.morph_data) _read_types(fp, Psk.MorphData, section, psk.morph_data)
case _:
if section.name.startswith(b'EXTRAUVS'):
_read_types(fp, Vector2, section, psk.extra_uvs)
else: else:
# Section is not handled, skip it. # Section is not handled, skip it.
fp.seek(section.data_size * section.data_count, os.SEEK_CUR) fp.seek(section.data_size * section.data_count, os.SEEK_CUR)

View File

@@ -1,9 +1,10 @@
import re import re
import typing from typing import List, Iterable, cast
from typing import List, Iterable
import bpy.types import bpy
from bpy.types import NlaStrip, Object, AnimData from bpy.props import CollectionProperty
from bpy.types import AnimData, Object
from bpy.types import Armature
def rgb_to_srgb(c: float): def rgb_to_srgb(c: float):
@@ -13,10 +14,9 @@ def rgb_to_srgb(c: float):
return 12.92 * c return 12.92 * c
def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, frame_max: float) -> List[NlaStrip]: def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, frame_max: float):
if animation_data is None: if animation_data is None:
return [] return
strips = []
for nla_track in animation_data.nla_tracks: for nla_track in animation_data.nla_tracks:
if nla_track.mute: if nla_track.mute:
continue continue
@@ -24,11 +24,10 @@ def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, fr
if (strip.frame_start < frame_min and strip.frame_end > frame_max) or \ if (strip.frame_start < frame_min and strip.frame_end > frame_max) or \
(frame_min <= strip.frame_start < frame_max) or \ (frame_min <= strip.frame_start < frame_max) or \
(frame_min < strip.frame_end <= frame_max): (frame_min < strip.frame_end <= frame_max):
strips.append(strip) yield strip
return strips
def populate_bone_collection_list(armature_object: Object, bone_collection_list: bpy.props.CollectionProperty) -> None: def populate_bone_collection_list(armature_object: Object, bone_collection_list: CollectionProperty) -> None:
""" """
Updates the bone collections collection. Updates the bone collections collection.
@@ -53,7 +52,7 @@ def populate_bone_collection_list(armature_object: Object, bone_collection_list:
bone_collection_list.clear() bone_collection_list.clear()
armature = armature_object.data armature = cast(Armature, armature_object.data)
if armature is None: if armature is None:
return return
@@ -82,7 +81,7 @@ def check_bone_names(bone_names: Iterable[str]):
f'You can bypass this by disabling "Enforce Bone Name Restrictions" in the export settings.') f'You can bypass this by disabling "Enforce Bone Name Restrictions" in the export settings.')
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: List[int]) -> List[str]: def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: Iterable[int]) -> List[str]:
""" """
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections. Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
@@ -90,13 +89,13 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
:param armature_object: Blender object with type 'ARMATURE' :param armature_object: Blender object with type 'ARMATURE'
:param bone_filter_mode: One of ['ALL', 'BONE_COLLECTIONS'] :param bone_filter_mode: One of ['ALL', 'BONE_COLLECTIONS']
:param bone_collection_indices: List of bone collection indices to be exported. :param bone_collection_indices: A list of bone collection indices to export.
:return: A sorted list of bone indices that should be exported. :return: A sorted list of bone indices that should be exported.
""" """
if armature_object is None or armature_object.type != 'ARMATURE': if armature_object is None or armature_object.type != 'ARMATURE':
raise ValueError('An armature object must be supplied') raise ValueError('An armature object must be supplied')
armature_data = typing.cast(bpy.types.Armature, armature_object.data) armature_data = cast(Armature, armature_object.data)
bones = armature_data.bones bones = armature_data.bones
bone_names = [x.name for x in bones] bone_names = [x.name for x in bones]