From 4937f8f779b7c7d17805fc19c0aecf51a1d2d0f5 Mon Sep 17 00:00:00 2001 From: Colin Basnett Date: Mon, 27 Jun 2022 18:10:37 -0700 Subject: [PATCH] Refactoring to reduce pointless class encapsulation when just a function would do. --- io_scene_psk_psa/helpers.py | 12 +- io_scene_psk_psa/psa/builder.py | 491 ++++++++++++++++--------------- io_scene_psk_psa/psa/exporter.py | 77 +++-- io_scene_psk_psa/psa/importer.py | 345 +++++++++++----------- io_scene_psk_psa/psa/reader.py | 4 +- io_scene_psk_psa/psk/builder.py | 438 ++++++++++++++------------- io_scene_psk_psa/psk/exporter.py | 91 +++--- io_scene_psk_psa/psk/importer.py | 369 ++++++++++++----------- io_scene_psk_psa/psk/reader.py | 89 +++--- 9 files changed, 956 insertions(+), 960 deletions(-) diff --git a/io_scene_psk_psa/helpers.py b/io_scene_psk_psa/helpers.py index 3057bf6..03c6b6a 100644 --- a/io_scene_psk_psa/helpers.py +++ b/io_scene_psk_psa/helpers.py @@ -32,22 +32,22 @@ def rgb_to_srgb(c): return 12.92 * c -def get_nla_strips_ending_at_frame(object, frame) -> List[NlaStrip]: - if object is None or object.animation_data is None: +def get_nla_strips_ending_at_frame(animation_data, frame) -> List[NlaStrip]: + if animation_data is None: return [] strips = [] - for nla_track in object.animation_data.nla_tracks: + for nla_track in animation_data.nla_tracks: for strip in nla_track.strips: if strip.frame_end == frame: strips.append(strip) return strips -def get_nla_strips_in_timeframe(object, frame_min, frame_max) -> List[NlaStrip]: - if object is None or object.animation_data is None: +def get_nla_strips_in_timeframe(animation_data, frame_min, frame_max) -> List[NlaStrip]: + if animation_data is None: return [] strips = [] - for nla_track in object.animation_data.nla_tracks: + for nla_track in animation_data.nla_tracks: if nla_track.mute: continue for strip in nla_track.strips: diff --git a/io_scene_psk_psa/psa/builder.py b/io_scene_psk_psa/psa/builder.py index d52297c..c0170c0 100644 --- a/io_scene_psk_psa/psa/builder.py +++ b/io_scene_psk_psa/psa/builder.py @@ -1,14 +1,15 @@ from typing import Dict, Iterable from bpy.types import Action -from mathutils import Matrix from .data import * from ..helpers import * -class PsaBuilderOptions(object): +class PsaBuildOptions(object): def __init__(self): + self.should_override_animation_data = False + self.animation_data_override = None self.fps_source = 'SCENE' self.fps_custom = 30.0 self.sequence_source = 'ACTIONS' @@ -23,260 +24,262 @@ class PsaBuilderOptions(object): self.root_motion = False -class PsaBuilder(object): - def __init__(self): - pass - - def get_sequence_fps(self, context, options: PsaBuilderOptions, actions: Iterable[Action]) -> float: - if options.fps_source == 'SCENE': +def get_sequence_fps(context, options: PsaBuildOptions, actions: Iterable[Action]) -> float: + if options.fps_source == 'SCENE': + return context.scene.render.fps + if options.fps_source == 'CUSTOM': + return options.fps_custom + elif options.fps_source == 'ACTION_METADATA': + # Get the minimum value of action metadata FPS values. + fps_list = [] + for action in filter(lambda x: 'psa_sequence_fps' in x, actions): + fps = action['psa_sequence_fps'] + if type(fps) == int or type(fps) == float: + fps_list.append(fps) + if len(fps_list) > 0: + return min(fps_list) + else: + # No valid action metadata to use, fallback to scene FPS return context.scene.render.fps - if options.fps_source == 'CUSTOM': - return options.fps_custom - elif options.fps_source == 'ACTION_METADATA': - # Get the minimum value of action metadata FPS values. - fps_list = [] - for action in filter(lambda x: 'psa_sequence_fps' in x, actions): - fps = action['psa_sequence_fps'] - if type(fps) == int or type(fps) == float: - fps_list.append(fps) - if len(fps_list) > 0: - return min(fps_list) - else: - # No valid action metadata to use, fallback to scene FPS - return context.scene.render.fps + else: + raise RuntimeError(f'Invalid FPS source "{options.fps_source}"') + + +def get_timeline_marker_sequence_frame_ranges(animation_data, context, options: PsaBuildOptions) -> Dict: + # Timeline markers need to be sorted so that we can determine the sequence start and end positions. + sequence_frame_ranges = dict() + sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame)) + sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers)) + + for marker_name in options.marker_names: + marker = context.scene.timeline_markers[marker_name] + frame_min = marker.frame + # Determine the final frame of the sequence based on the next marker. + # If no subsequent marker exists, use the maximum frame_end from all NLA strips. + marker_index = sorted_timeline_marker_names.index(marker_name) + next_marker_index = marker_index + 1 + frame_max = 0 + if next_marker_index < len(sorted_timeline_markers): + # There is a next marker. Use that next marker's frame position as the last frame of this sequence. + frame_max = sorted_timeline_markers[next_marker_index].frame + if options.should_trim_timeline_marker_sequences: + nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_max) + if len(nla_strips) > 0: + frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips))) + frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips))) + else: + # No strips in between this marker and the next, just export this as a one-frame animation. + frame_max = frame_min else: - raise RuntimeError(f'Invalid FPS source "{options.fps_source}"') - - def build(self, context, options: PsaBuilderOptions) -> Psa: - active_object = context.view_layer.objects.active - - if active_object.type != 'ARMATURE': - raise RuntimeError('Selected object must be an Armature') - - armature = active_object - - if armature.animation_data is None: - raise RuntimeError('No animation data for armature') - - # Ensure that we actually have items that we are going to be exporting. - if options.sequence_source == 'ACTIONS' and len(options.actions) == 0: - raise RuntimeError('No actions were selected for export') - elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0: - raise RuntimeError('No timeline markers were selected for export') - - psa = Psa() - - bones = list(armature.data.bones) - - # The order of the armature bones and the pose bones is not guaranteed to be the same. - # As a result, we need to reconstruct the list of pose bones in the same order as the - # armature bones. - bone_names = [x.name for x in bones] - pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones] - pose_bones.sort(key=lambda x: x[0]) - pose_bones = [x[1] for x in pose_bones] - - # Get a list of all the bone indices and instigator bones for the bone filter settings. - export_bone_names = get_export_bone_names(armature, options.bone_filter_mode, options.bone_group_indices) - bone_indices = [bone_names.index(x) for x in export_bone_names] - - # Make the bone lists contain only the bones that are going to be exported. - bones = [bones[bone_index] for bone_index in bone_indices] - pose_bones = [pose_bones[bone_index] for bone_index in bone_indices] - - # No bones are going to be exported. - if len(bones) == 0: - raise RuntimeError('No bones available for export') - - # Check that all bone names are valid. - check_bone_names(map(lambda bone: bone.name, bones)) - - # Build list of PSA bones. - for bone in bones: - psa_bone = Psa.Bone() - psa_bone.name = bytes(bone.name, encoding='windows-1252') - - try: - parent_index = bones.index(bone.parent) - psa_bone.parent_index = parent_index - psa.bones[parent_index].children_count += 1 - except ValueError: - psa_bone.parent_index = -1 - - if bone.parent is not None: - rotation = bone.matrix.to_quaternion() - rotation.x = -rotation.x - rotation.y = -rotation.y - rotation.z = -rotation.z - quat_parent = bone.parent.matrix.to_quaternion().inverted() - parent_head = quat_parent @ bone.parent.head - parent_tail = quat_parent @ bone.parent.tail - location = (parent_tail - parent_head) + bone.head - else: - location = armature.matrix_local @ bone.head - rot_matrix = bone.matrix @ armature.matrix_local.to_3x3() - rotation = rot_matrix.to_quaternion() - - psa_bone.location.x = location.x - psa_bone.location.y = location.y - psa_bone.location.z = location.z - - psa_bone.rotation.x = rotation.x - psa_bone.rotation.y = rotation.y - psa_bone.rotation.z = rotation.z - psa_bone.rotation.w = rotation.w - - psa.bones.append(psa_bone) - - # Populate the export sequence list. - class NlaState: - def __init__(self): - self.frame_min = 0 - self.frame_max = 0 - self.action = None - - class ExportSequence: - def __init__(self): - self.name = '' - self.nla_state = NlaState() - self.fps = 30.0 - - export_sequences = [] - - if options.sequence_source == 'ACTIONS': - for action in options.actions: - if len(action.fcurves) == 0: + # There is no next marker. + # Find the final frame of all the NLA strips and use that as the last frame of this sequence. + for nla_track in animation_data.nla_tracks: + if nla_track.mute: continue - export_sequence = ExportSequence() - export_sequence.nla_state.action = action - export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names) - frame_min, frame_max = [int(x) for x in action.frame_range] - export_sequence.nla_state.frame_min = frame_min - export_sequence.nla_state.frame_max = frame_max - export_sequence.fps = self.get_sequence_fps(context, options, [action]) - export_sequences.append(export_sequence) - pass - elif options.sequence_source == 'TIMELINE_MARKERS': - sequence_frame_ranges = self.get_timeline_marker_sequence_frame_ranges(armature, context, options) + for strip in nla_track.strips: + frame_max = max(frame_max, strip.frame_end) - for name, (frame_min, frame_max) in sequence_frame_ranges.items(): - export_sequence = ExportSequence() - export_sequence.name = name - export_sequence.nla_state.action = None - export_sequence.nla_state.frame_min = frame_min - export_sequence.nla_state.frame_max = frame_max - nla_strips_actions = set( - map(lambda x: x.action, get_nla_strips_in_timeframe(active_object, frame_min, frame_max))) - export_sequence.fps = self.get_sequence_fps(context, options, nla_strips_actions) - export_sequences.append(export_sequence) + if frame_min > frame_max: + continue + + sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max) + + return sequence_frame_ranges + + +def build_psa(context, options: PsaBuildOptions) -> Psa: + active_object = context.view_layer.objects.active + + if active_object.type != 'ARMATURE': + raise RuntimeError('Selected object must be an Armature') + + if options.should_override_animation_data: + animation_data_object = options.animation_data_override + else: + animation_data_object = active_object + + animation_data = animation_data_object.animation_data + + if animation_data is None: + raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'') + + # Ensure that we actually have items that we are going to be exporting. + if options.sequence_source == 'ACTIONS' and len(options.actions) == 0: + raise RuntimeError('No actions were selected for export') + elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0: + raise RuntimeError('No timeline markers were selected for export') + + psa = Psa() + + armature = active_object + bones = list(armature.data.bones) + + # The order of the armature bones and the pose bones is not guaranteed to be the same. + # As a result, we need to reconstruct the list of pose bones in the same order as the + # armature bones. + bone_names = [x.name for x in bones] + pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones] + pose_bones.sort(key=lambda x: x[0]) + pose_bones = [x[1] for x in pose_bones] + + # Get a list of all the bone indices and instigator bones for the bone filter settings. + export_bone_names = get_export_bone_names(armature, options.bone_filter_mode, options.bone_group_indices) + bone_indices = [bone_names.index(x) for x in export_bone_names] + + # Make the bone lists contain only the bones that are going to be exported. + bones = [bones[bone_index] for bone_index in bone_indices] + pose_bones = [pose_bones[bone_index] for bone_index in bone_indices] + + # No bones are going to be exported. + if len(bones) == 0: + raise RuntimeError('No bones available for export') + + # Check that all bone names are valid. + check_bone_names(map(lambda bone: bone.name, bones)) + + # Build list of PSA bones. + for bone in bones: + psa_bone = Psa.Bone() + psa_bone.name = bytes(bone.name, encoding='windows-1252') + + try: + parent_index = bones.index(bone.parent) + psa_bone.parent_index = parent_index + psa.bones[parent_index].children_count += 1 + except ValueError: + psa_bone.parent_index = -1 + + if bone.parent is not None: + rotation = bone.matrix.to_quaternion() + rotation.x = -rotation.x + rotation.y = -rotation.y + rotation.z = -rotation.z + inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted() + parent_head = inverse_parent_rotation @ bone.parent.head + parent_tail = inverse_parent_rotation @ bone.parent.tail + location = (parent_tail - parent_head) + bone.head else: - raise ValueError(f'Unhandled sequence source: {options.sequence_source}') + location = armature.matrix_local @ bone.head + rot_matrix = bone.matrix @ armature.matrix_local.to_3x3() + rotation = rot_matrix.to_quaternion() - # Add prefixes and suffices to the names of the export sequences and strip whitespace. - for export_sequence in export_sequences: - export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'.strip() + psa_bone.location.x = location.x + psa_bone.location.y = location.y + psa_bone.location.z = location.z - # Now build the PSA sequences. - # We actually alter the timeline frame and simply record the resultant pose bone matrices. - frame_start_index = 0 + psa_bone.rotation.x = rotation.x + psa_bone.rotation.y = rotation.y + psa_bone.rotation.z = rotation.z + psa_bone.rotation.w = rotation.w - for export_sequence in export_sequences: - armature.animation_data.action = export_sequence.nla_state.action - context.view_layer.update() + psa.bones.append(psa_bone) - psa_sequence = Psa.Sequence() + # Populate the export sequence list. + class NlaState: + def __init__(self): + self.frame_min = 0 + self.frame_max = 0 + self.action = None - frame_min = export_sequence.nla_state.frame_min - frame_max = export_sequence.nla_state.frame_max - frame_count = frame_max - frame_min + 1 + class ExportSequence: + def __init__(self): + self.name = '' + self.nla_state = NlaState() + self.fps = 30.0 - psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252') - psa_sequence.frame_count = frame_count - psa_sequence.frame_start_index = frame_start_index - psa_sequence.fps = export_sequence.fps + export_sequences = [] - frame_count = frame_max - frame_min + 1 - - for frame in range(frame_count): - context.scene.frame_set(frame_min + frame) - - for pose_bone in pose_bones: - key = Psa.Key() - - if pose_bone.parent is not None: - pose_bone_matrix = pose_bone.matrix - pose_bone_parent_matrix = pose_bone.parent.matrix - pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix - else: - if options.root_motion: - # Export root motion - pose_bone_matrix = armature.matrix_world @ pose_bone.matrix - else: - pose_bone_matrix = pose_bone.matrix - - location = pose_bone_matrix.to_translation() - rotation = pose_bone_matrix.to_quaternion().normalized() - - if pose_bone.parent is not None: - rotation.conjugate() - - key.location.x = location.x - key.location.y = location.y - key.location.z = location.z - key.rotation.x = rotation.x - key.rotation.y = rotation.y - key.rotation.z = rotation.z - key.rotation.w = rotation.w - key.time = 1.0 / psa_sequence.fps - - psa.keys.append(key) - - psa_sequence.bone_count = len(pose_bones) - psa_sequence.track_time = frame_count - - frame_start_index += frame_count - - psa.sequences[export_sequence.name] = psa_sequence - - return psa - - def get_timeline_marker_sequence_frame_ranges(self, object, context, options: PsaBuilderOptions) -> Dict: - # Timeline markers need to be sorted so that we can determine the sequence start and end positions. - sequence_frame_ranges = dict() - sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame)) - sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers)) - - for marker_name in options.marker_names: - marker = context.scene.timeline_markers[marker_name] - frame_min = marker.frame - # Determine the final frame of the sequence based on the next marker. - # If no subsequent marker exists, use the maximum frame_end from all NLA strips. - marker_index = sorted_timeline_marker_names.index(marker_name) - next_marker_index = marker_index + 1 - frame_max = 0 - if next_marker_index < len(sorted_timeline_markers): - # There is a next marker. Use that next marker's frame position as the last frame of this sequence. - frame_max = sorted_timeline_markers[next_marker_index].frame - if options.should_trim_timeline_marker_sequences: - nla_strips = get_nla_strips_in_timeframe(object, marker.frame, frame_max) - if len(nla_strips) > 0: - frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips))) - frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips))) - else: - # No strips in between this marker and the next, just export this as a one-frame animation. - frame_max = frame_min - else: - # There is no next marker. - # Find the final frame of all the NLA strips and use that as the last frame of this sequence. - for nla_track in object.animation_data.nla_tracks: - if nla_track.mute: - continue - for strip in nla_track.strips: - frame_max = max(frame_max, strip.frame_end) - - if frame_min > frame_max: + if options.sequence_source == 'ACTIONS': + for action in options.actions: + if len(action.fcurves) == 0: continue + export_sequence = ExportSequence() + export_sequence.nla_state.action = action + export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names) + frame_min, frame_max = [int(x) for x in action.frame_range] + export_sequence.nla_state.frame_min = frame_min + export_sequence.nla_state.frame_max = frame_max + export_sequence.fps = get_sequence_fps(context, options, [action]) + export_sequences.append(export_sequence) + pass + elif options.sequence_source == 'TIMELINE_MARKERS': + sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, options) - sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max) + for name, (frame_min, frame_max) in sequence_frame_ranges.items(): + export_sequence = ExportSequence() + export_sequence.name = name + export_sequence.nla_state.action = None + export_sequence.nla_state.frame_min = frame_min + export_sequence.nla_state.frame_max = frame_max + nla_strips_actions = set( + map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, frame_min, frame_max))) + export_sequence.fps = get_sequence_fps(context, options, nla_strips_actions) + export_sequences.append(export_sequence) + else: + raise ValueError(f'Unhandled sequence source: {options.sequence_source}') - return sequence_frame_ranges + # Add prefixes and suffices to the names of the export sequences and strip whitespace. + for export_sequence in export_sequences: + export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'.strip() + + # Now build the PSA sequences. + # We actually alter the timeline frame and simply record the resultant pose bone matrices. + frame_start_index = 0 + + for export_sequence in export_sequences: + # Link the action to the animation data and update view layer. + animation_data.action = export_sequence.nla_state.action + context.view_layer.update() + + frame_min = export_sequence.nla_state.frame_min + frame_max = export_sequence.nla_state.frame_max + frame_count = frame_max - frame_min + 1 + + psa_sequence = Psa.Sequence() + psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252') + psa_sequence.frame_count = frame_count + psa_sequence.frame_start_index = frame_start_index + psa_sequence.fps = export_sequence.fps + + for frame in range(frame_count): + context.scene.frame_set(frame_min + frame) + + for pose_bone in pose_bones: + key = Psa.Key() + + if pose_bone.parent is not None: + pose_bone_matrix = pose_bone.matrix + pose_bone_parent_matrix = pose_bone.parent.matrix + pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix + else: + if options.root_motion: + # Export root motion + pose_bone_matrix = armature.matrix_world @ pose_bone.matrix + else: + pose_bone_matrix = pose_bone.matrix + + location = pose_bone_matrix.to_translation() + rotation = pose_bone_matrix.to_quaternion().normalized() + + if pose_bone.parent is not None: + rotation.conjugate() + + key.location.x = location.x + key.location.y = location.y + key.location.z = location.z + key.rotation.x = rotation.x + key.rotation.y = rotation.y + key.rotation.z = rotation.z + key.rotation.w = rotation.w + key.time = 1.0 / psa_sequence.fps + + psa.keys.append(key) + + psa_sequence.bone_count = len(pose_bones) + psa_sequence.track_time = frame_count + + frame_start_index += frame_count + + psa.sequences[export_sequence.name] = psa_sequence + + return psa diff --git a/io_scene_psk_psa/psa/exporter.py b/io_scene_psk_psa/psa/exporter.py index bc1d1a1..00bd0e8 100644 --- a/io_scene_psk_psa/psa/exporter.py +++ b/io_scene_psk_psa/psa/exporter.py @@ -10,18 +10,13 @@ from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatPrope from bpy.types import Action, Operator, PropertyGroup, UIList from bpy_extras.io_utils import ExportHelper -from .builder import PsaBuilder, PsaBuilderOptions +from .builder import PsaBuildOptions, build_psa from .data import * from ..helpers import * from ..types import BoneGroupListItem -class PsaExporter(object): - def __init__(self, psa: Psa): - self.psa: Psa = psa - - # This method is shared by both PSA/K file formats, move this? - @staticmethod +def export_psa(psa: Psa, path: str): def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None): section = Section() section.name = name @@ -32,13 +27,11 @@ class PsaExporter(object): if data is not None: for datum in data: fp.write(datum) - - def export(self, path: str): - with open(path, 'wb') as fp: - self.write_section(fp, b'ANIMHEAD') - self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones) - self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values())) - self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys) + with open(path, 'wb') as fp: + write_section(fp, b'ANIMHEAD') + write_section(fp, b'BONENAMES', Psa.Bone, psa.bones) + write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values())) + write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys) class PsaExportActionListItem(PropertyGroup): @@ -64,6 +57,10 @@ def should_use_original_sequence_names_updated(_, context): update_action_names(context) +def psa_export_property_group_animation_data_override_poll(_context, obj): + return obj.animation_data is not None + + class PsaExportPropertyGroup(PropertyGroup): root_motion: BoolProperty( name='Root Motion', @@ -71,6 +68,15 @@ class PsaExportPropertyGroup(PropertyGroup): default=False, description='When set, the root bone will be transformed as it appears in the scene', ) + should_override_animation_data: BoolProperty( + name='Override Animation Data', + options=set(), + default=False + ) + animation_data_override: PointerProperty( + type=bpy.types.Object, + poll=psa_export_property_group_animation_data_override_poll + ) sequence_source: EnumProperty( name='Source', options=set(), @@ -154,6 +160,10 @@ def is_bone_filter_mode_item_available(context, identifier): return True +def should_action_be_selected_by_default(action): + return action is not None and action.asset_data is None + + class PsaExportOperator(Operator, ExportHelper): bl_idname = 'psa_export.operator' bl_label = 'Export' @@ -191,8 +201,11 @@ class PsaExportOperator(Operator, ExportHelper): # SOURCE layout.prop(pg, 'sequence_source', text='Source') - # ROOT MOTION - layout.prop(pg, 'root_motion', text='Root Motion') + if pg.sequence_source == 'TIMELINE_MARKERS': + # ANIMDATA SOURCE + layout.prop(pg, 'should_override_animation_data') + if pg.should_override_animation_data: + layout.prop(pg, 'animation_data_override') # SELECT ALL/NONE row = layout.row(align=True) @@ -249,15 +262,17 @@ class PsaExportOperator(Operator, ExportHelper): layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows) - def should_action_be_selected_by_default(self, action): - return action is not None and action.asset_data is None + layout.separator() + + # ROOT MOTION + layout.prop(pg, 'root_motion', text='Root Motion') def is_action_for_armature(self, action): if len(action.fcurves) == 0: return False bone_names = set([x.name for x in self.armature.data.bones]) for fcurve in action.fcurves: - match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path) + match = re.match(r'pose\.bones\["(.+)"].\w+', fcurve.data_path) if not match: continue bone_name = match.group(1) @@ -273,7 +288,7 @@ class PsaExportOperator(Operator, ExportHelper): if context.view_layer.objects.active.type != 'ARMATURE': raise RuntimeError('The selected object must be an armature') - def invoke(self, context, event): + def invoke(self, context, _event): try: self._check_context(context) except RuntimeError as e: @@ -290,7 +305,7 @@ class PsaExportOperator(Operator, ExportHelper): item = pg.action_list.add() item.action = action item.name = action.name - item.is_selected = self.should_action_be_selected_by_default(action) + item.is_selected = should_action_be_selected_by_default(action) update_action_names(context) @@ -318,7 +333,9 @@ class PsaExportOperator(Operator, ExportHelper): actions = [x.action for x in pg.action_list if x.is_selected] marker_names = [x.name for x in pg.marker_list if x.is_selected] - options = PsaBuilderOptions() + options = PsaBuildOptions() + options.should_override_animation_data = pg.should_override_animation_data + options.animation_data_override = pg.animation_data_override options.fps_source = pg.fps_source options.fps_custom = pg.fps_custom options.sequence_source = pg.sequence_source @@ -332,16 +349,14 @@ class PsaExportOperator(Operator, ExportHelper): options.sequence_name_suffix = pg.sequence_name_suffix options.root_motion = pg.root_motion - builder = PsaBuilder() - try: - psa = builder.build(context, options) + psa = build_psa(context, options) except RuntimeError as e: self.report({'ERROR_INVALID_CONTEXT'}, str(e)) return {'CANCELLED'} - exporter = PsaExporter(psa) - exporter.export(self.filepath) + export_psa(psa, self.filepath) + return {'FINISHED'} @@ -368,8 +383,7 @@ def filter_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_c return flt_flags -def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[ - PsaExportActionListItem]: +def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[PsaExportActionListItem]: visible_sequences = [] for i, flag in enumerate(filter_sequences(pg, sequences)): if bool(flag & (1 << 30)): @@ -401,10 +415,9 @@ class PSA_UL_ExportSequenceList(UIList): subrow = row.row(align=True) subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER') - - def filter_items(self, context, data, property): + def filter_items(self, context, data, prop): pg = context.scene.psa_export - actions = getattr(data, property) + actions = getattr(data, prop) flt_flags = filter_sequences(pg, actions) flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name') return flt_flags, flt_neworder diff --git a/io_scene_psk_psa/psa/importer.py b/io_scene_psk_psa/psa/importer.py index 6d3151f..35e1cce 100644 --- a/io_scene_psk_psa/psa/importer.py +++ b/io_scene_psk_psa/psa/importer.py @@ -26,198 +26,195 @@ class PsaImportOptions(object): self.action_name_prefix = '' -class PsaImporter(object): - def __init__(self): - pass +def import_psa(psa_reader: PsaReader, armature_object, options: PsaImportOptions): + sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names) + armature_data = armature_object.data - def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions): - sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names) - armature_data = armature_object.data + class ImportBone(object): + def __init__(self, psa_bone: Psa.Bone): + self.psa_bone: Psa.Bone = psa_bone + self.parent: Optional[ImportBone] = None + self.armature_bone = None + self.pose_bone = None + self.orig_loc: Vector = Vector() + self.orig_quat: Quaternion = Quaternion() + self.post_quat: Quaternion = Quaternion() + self.fcurves = [] - class ImportBone(object): - def __init__(self, psa_bone: Psa.Bone): - self.psa_bone: Psa.Bone = psa_bone - self.parent: Optional[ImportBone] = None - self.armature_bone = None - self.pose_bone = None - self.orig_loc: Vector = Vector() - self.orig_quat: Quaternion = Quaternion() - self.post_quat: Quaternion = Quaternion() - self.fcurves = [] + def calculate_fcurve_data(import_bone: ImportBone, key_data: []): + # Convert world-space transforms to local-space transforms. + key_rotation = Quaternion(key_data[0:4]) + key_location = Vector(key_data[4:]) + q = import_bone.post_quat.copy() + q.rotate(import_bone.orig_quat) + quat = q + q = import_bone.post_quat.copy() + if import_bone.parent is None: + q.rotate(key_rotation.conjugated()) + else: + q.rotate(key_rotation) + quat.rotate(q.conjugated()) + loc = key_location - import_bone.orig_loc + loc.rotate(import_bone.post_quat.conjugated()) + return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z - def calculate_fcurve_data(import_bone: ImportBone, key_data: []): - # Convert world-space transforms to local-space transforms. - key_rotation = Quaternion(key_data[0:4]) - key_location = Vector(key_data[4:]) - q = import_bone.post_quat.copy() - q.rotate(import_bone.orig_quat) - quat = q - q = import_bone.post_quat.copy() - if import_bone.parent is None: - q.rotate(key_rotation.conjugated()) + # Create an index mapping from bones in the PSA to bones in the target armature. + psa_to_armature_bone_indices = {} + armature_bone_names = [x.name for x in armature_data.bones] + psa_bone_names = [] + for psa_bone_index, psa_bone in enumerate(psa_reader.bones): + psa_bone_name = psa_bone.name.decode('windows-1252') + psa_bone_names.append(psa_bone_name) + try: + psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name) + except ValueError: + pass + + # Report if there are missing bones in the target armature. + missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names)) + if len(missing_bone_names) > 0: + print( + f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:') + print(list(sorted(missing_bone_names))) + del armature_bone_names + + # Create intermediate bone data for import operations. + import_bones = [] + import_bones_dict = dict() + + for psa_bone_index, psa_bone in enumerate(psa_reader.bones): + bone_name = psa_bone.name.decode('windows-1252') + if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones + # PSA bone does not map to armature bone, skip it and leave an empty bone in its place. + import_bones.append(None) + continue + import_bone = ImportBone(psa_bone) + import_bone.armature_bone = armature_data.bones[bone_name] + import_bone.pose_bone = armature_object.pose.bones[bone_name] + import_bones_dict[bone_name] = import_bone + import_bones.append(import_bone) + + for import_bone in filter(lambda x: x is not None, import_bones): + armature_bone = import_bone.armature_bone + if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names: + import_bone.parent = import_bones_dict[armature_bone.parent.name] + # Calculate the original location & rotation of each bone (in world-space maybe?) + if armature_bone.get('orig_quat') is not None: + # TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect + # (animations are flipped 180 around Z) + import_bone.orig_quat = Quaternion(armature_bone['orig_quat']) + import_bone.orig_loc = Vector(armature_bone['orig_loc']) + import_bone.post_quat = Quaternion(armature_bone['post_quat']) + else: + if import_bone.parent is not None: + import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation + import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated()) + import_bone.orig_quat = armature_bone.matrix_local.to_quaternion() + import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated()) + import_bone.orig_quat.conjugate() else: - q.rotate(key_rotation) - quat.rotate(q.conjugated()) - loc = key_location - import_bone.orig_loc - loc.rotate(import_bone.post_quat.conjugated()) - return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z + import_bone.orig_loc = armature_bone.matrix_local.translation.copy() + import_bone.orig_quat = armature_bone.matrix_local.to_quaternion() + import_bone.post_quat = import_bone.orig_quat.conjugated() - # Create an index mapping from bones in the PSA to bones in the target armature. - psa_to_armature_bone_indices = {} - armature_bone_names = [x.name for x in armature_data.bones] - psa_bone_names = [] - for psa_bone_index, psa_bone in enumerate(psa_reader.bones): - psa_bone_name = psa_bone.name.decode('windows-1252') - psa_bone_names.append(psa_bone_name) - try: - psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name) - except ValueError: - pass + # Create and populate the data for new sequences. + actions = [] + for sequence in sequences: + # Add the action. + sequence_name = sequence.name.decode('windows-1252') + action_name = options.action_name_prefix + sequence_name - # Report if there are missing bones in the target armature. - missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names)) - if len(missing_bone_names) > 0: - print( - f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:') - print(list(sorted(missing_bone_names))) - del armature_bone_names + if options.should_overwrite and action_name in bpy.data.actions: + action = bpy.data.actions[action_name] + else: + action = bpy.data.actions.new(name=action_name) - # Create intermediate bone data for import operations. - import_bones = [] - import_bones_dict = dict() + if options.should_write_keyframes: + # Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2) + while len(action.fcurves) > 0: + action.fcurves.remove(action.fcurves[-1]) - for psa_bone_index, psa_bone in enumerate(psa_reader.bones): - bone_name = psa_bone.name.decode('windows-1252') - if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones - # PSA bone does not map to armature bone, skip it and leave an empty bone in its place. - import_bones.append(None) - continue - import_bone = ImportBone(psa_bone) - import_bone.armature_bone = armature_data.bones[bone_name] - import_bone.pose_bone = armature_object.pose.bones[bone_name] - import_bones_dict[bone_name] = import_bone - import_bones.append(import_bone) + # Create f-curves for the rotation and location of each bone. + for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items(): + import_bone = import_bones[psa_bone_index] + pose_bone = import_bone.pose_bone + rotation_data_path = pose_bone.path_from_id('rotation_quaternion') + location_data_path = pose_bone.path_from_id('location') + import_bone.fcurves = [ + action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name), # Qw + action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name), # Qx + action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name), # Qy + action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name), # Qz + action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name), # Lx + action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly + action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz + ] - for import_bone in filter(lambda x: x is not None, import_bones): - armature_bone = import_bone.armature_bone - if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names: - import_bone.parent = import_bones_dict[armature_bone.parent.name] - # Calculate the original location & rotation of each bone (in world-space maybe?) - if armature_bone.get('orig_quat') is not None: - # TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z) - import_bone.orig_quat = Quaternion(armature_bone['orig_quat']) - import_bone.orig_loc = Vector(armature_bone['orig_loc']) - import_bone.post_quat = Quaternion(armature_bone['post_quat']) - else: - if import_bone.parent is not None: - import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation - import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated()) - import_bone.orig_quat = armature_bone.matrix_local.to_quaternion() - import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated()) - import_bone.orig_quat.conjugate() - else: - import_bone.orig_loc = armature_bone.matrix_local.translation.copy() - import_bone.orig_quat = armature_bone.matrix_local.to_quaternion() - import_bone.post_quat = import_bone.orig_quat.conjugated() + # Read the sequence data matrix from the PSA. + sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) + keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8) - # Create and populate the data for new sequences. - actions = [] - for sequence in sequences: - # Add the action. - sequence_name = sequence.name.decode('windows-1252') - action_name = options.action_name_prefix + sequence_name + # Convert the sequence's data from world-space to local-space. + for bone_index, import_bone in enumerate(import_bones): + if import_bone is None: + continue + for frame_index in range(sequence.frame_count): + # This bone has writeable keyframes for this frame. + key_data = sequence_data_matrix[frame_index, bone_index] + # Calculate the local-space key data for the bone. + sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data) - if options.should_overwrite and action_name in bpy.data.actions: - action = bpy.data.actions[action_name] - else: - action = bpy.data.actions.new(name=action_name) - - if options.should_write_keyframes: - # Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2) - while len(action.fcurves) > 0: - action.fcurves.remove(action.fcurves[-1]) - - # Create f-curves for the rotation and location of each bone. - for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items(): - import_bone = import_bones[psa_bone_index] - pose_bone = import_bone.pose_bone - rotation_data_path = pose_bone.path_from_id('rotation_quaternion') - location_data_path = pose_bone.path_from_id('location') - import_bone.fcurves = [ - action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name), # Qw - action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name), # Qx - action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name), # Qy - action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name), # Qz - action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name), # Lx - action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly - action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz - ] - - # Read the sequence data matrix from the PSA. - sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) - keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8) - - # Convert the sequence's data from world-space to local-space. + # Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an + # insufficiently large change in the data from the last written frame. + if options.should_clean_keys: + threshold = 0.001 for bone_index, import_bone in enumerate(import_bones): if import_bone is None: continue - for frame_index in range(sequence.frame_count): + for fcurve_index in range(len(import_bone.fcurves)): + # Get all the keyframe data for the bone's f-curve data from the sequence data matrix. + fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index] + last_written_datum = 0 + for frame_index, datum in enumerate(fcurve_frame_data): + # If the f-curve data is not different enough to the last written frame, un-mark this data for writing. + if frame_index > 0 and abs(datum - last_written_datum) < threshold: + keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0 + else: + last_written_datum = datum + + # Write the keyframes out! + for frame_index in range(sequence.frame_count): + for bone_index, import_bone in enumerate(import_bones): + if import_bone is None: + continue + bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index]) + if bone_has_writeable_keyframes: # This bone has writeable keyframes for this frame. key_data = sequence_data_matrix[frame_index, bone_index] - # Calculate the local-space key data for the bone. - sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data) + for fcurve, should_write, datum in zip(import_bone.fcurves, + keyframe_write_matrix[frame_index, bone_index], + key_data): + if should_write: + fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'}) - # Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an - # insufficiently large change in the data from the last written frame. - if options.should_clean_keys: - threshold = 0.001 - for bone_index, import_bone in enumerate(import_bones): - if import_bone is None: - continue - for fcurve_index in range(len(import_bone.fcurves)): - # Get all the keyframe data for the bone's f-curve data from the sequence data matrix. - fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index] - last_written_datum = 0 - for frame_index, datum in enumerate(fcurve_frame_data): - # If the f-curve data is not different enough to the last written frame, un-mark this data for writing. - if frame_index > 0 and abs(datum - last_written_datum) < threshold: - keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0 - else: - last_written_datum = datum + # Write + if options.should_write_metadata: + action['psa_sequence_name'] = sequence_name + action['psa_sequence_fps'] = sequence.fps - # Write the keyframes out! - for frame_index in range(sequence.frame_count): - for bone_index, import_bone in enumerate(import_bones): - if import_bone is None: - continue - bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index]) - if bone_has_writeable_keyframes: - # This bone has writeable keyframes for this frame. - key_data = sequence_data_matrix[frame_index, bone_index] - for fcurve, should_write, datum in zip(import_bone.fcurves, - keyframe_write_matrix[frame_index, bone_index], - key_data): - if should_write: - fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'}) + action.use_fake_user = options.should_use_fake_user - # Write - if options.should_write_metadata: - action['psa_sequence_name'] = sequence_name - action['psa_sequence_fps'] = sequence.fps + actions.append(action) - action.use_fake_user = options.should_use_fake_user - - actions.append(action) - - # If the user specifies, store the new animations as strips on a non-contributing NLA track. - if options.should_stash: - if armature_object.animation_data is None: - armature_object.animation_data_create() - for action in actions: - nla_track = armature_object.animation_data.nla_tracks.new() - nla_track.name = action.name - nla_track.mute = True - nla_track.strips.new(name=action.name, start=0, action=action) + # If the user specifies, store the new animations as strips on a non-contributing NLA track. + if options.should_stash: + if armature_object.animation_data is None: + armature_object.animation_data_create() + for action in actions: + nla_track = armature_object.animation_data.nla_tracks.new() + nla_track.name = action.name + nla_track.mute = True + nla_track.strips.new(name=action.name, start=0, action=action) class PsaImportActionListItem(PropertyGroup): @@ -403,7 +400,7 @@ class PsaImportSequencesFromText(Operator): class PsaImportSequencesSelectAll(Operator): bl_idname = 'psa_import.sequences_select_all' bl_label = 'All' - bl_description = 'Select all visible sequences' + bl_description = 'Select all sequences' bl_options = {'INTERNAL'} @classmethod @@ -589,7 +586,7 @@ class PsaImportOperator(Operator): options.should_write_metadata = pg.should_write_metadata options.should_write_keyframes = pg.should_write_keyframes - PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options) + import_psa(psa_reader, context.view_layer.objects.active, options) self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)') diff --git a/io_scene_psk_psa/psa/reader.py b/io_scene_psk_psa/psa/reader.py index c8f41d0..24e5c66 100644 --- a/io_scene_psk_psa/psa/reader.py +++ b/io_scene_psk_psa/psa/reader.py @@ -7,8 +7,8 @@ from .data import * class PsaReader(object): """ - This class reads the sequences and bone information immediately upon instantiation and hold onto a file handle. - The key data is not read into memory upon instantiation due to it's potentially very large size. + This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle. + The keyframe data is not read into memory upon instantiation due to it's potentially very large size. To read the key data for a particular sequence, call `read_sequence_keys`. """ diff --git a/io_scene_psk_psa/psk/builder.py b/io_scene_psk_psa/psk/builder.py index fc2d1d3..be9d382 100644 --- a/io_scene_psk_psa/psk/builder.py +++ b/io_scene_psk_psa/psk/builder.py @@ -12,265 +12,261 @@ class PskInputObjects(object): self.armature_object = None -class PskBuilderOptions(object): +class PskBuildOptions(object): def __init__(self): self.bone_filter_mode = 'ALL' self.bone_group_indices = [] self.use_raw_mesh_data = True -class PskBuilder(object): - def __init__(self): - pass +def get_psk_input_objects(context) -> PskInputObjects: + input_objects = PskInputObjects() + for selected_object in context.view_layer.objects.selected: + if selected_object.type != 'MESH': + raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh') - @staticmethod - def get_input_objects(context) -> PskInputObjects: - input_objects = PskInputObjects() - for selected_object in context.view_layer.objects.selected: - if selected_object.type != 'MESH': - raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh') + input_objects.mesh_objects = context.view_layer.objects.selected - input_objects.mesh_objects = context.view_layer.objects.selected + if len(input_objects.mesh_objects) == 0: + raise RuntimeError('At least one mesh must be selected') - if len(input_objects.mesh_objects) == 0: - raise RuntimeError('At least one mesh must be selected') + for mesh_object in input_objects.mesh_objects: + if len(mesh_object.data.materials) == 0: + raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material') - for mesh_object in input_objects.mesh_objects: - if len(mesh_object.data.materials) == 0: - raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material') + # Ensure that there are either no armature modifiers (static mesh) + # or that there is exactly one armature modifier object shared between + # all selected meshes + armature_modifier_objects = set() - # Ensure that there are either no armature modifiers (static mesh) - # or that there is exactly one armature modifier object shared between - # all selected meshes - armature_modifier_objects = set() + for mesh_object in input_objects.mesh_objects: + modifiers = [x for x in mesh_object.modifiers if x.type == 'ARMATURE'] + if len(modifiers) == 0: + continue + elif len(modifiers) > 1: + raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier') + armature_modifier_objects.add(modifiers[0].object) - for mesh_object in input_objects.mesh_objects: - modifiers = [x for x in mesh_object.modifiers if x.type == 'ARMATURE'] - if len(modifiers) == 0: - continue - elif len(modifiers) > 1: - raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier') - armature_modifier_objects.add(modifiers[0].object) + if len(armature_modifier_objects) > 1: + raise RuntimeError('All selected meshes must have the same armature modifier') + elif len(armature_modifier_objects) == 1: + input_objects.armature_object = list(armature_modifier_objects)[0] - if len(armature_modifier_objects) > 1: - raise RuntimeError('All selected meshes must have the same armature modifier') - elif len(armature_modifier_objects) == 1: - input_objects.armature_object = list(armature_modifier_objects)[0] + return input_objects - return input_objects - def build(self, context, options: PskBuilderOptions) -> Psk: - input_objects = PskBuilder.get_input_objects(context) +def build_psk(context, options: PskBuildOptions) -> Psk: + input_objects = get_psk_input_objects(context) - armature_object = input_objects.armature_object + armature_object = input_objects.armature_object - psk = Psk() - bones = [] - materials = OrderedDict() + psk = Psk() + bones = [] + materials = OrderedDict() - if armature_object is None: - # If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement - # that a PSK file must have at least one bone. + if armature_object is None: + # If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement + # that a PSK file must have at least one bone. + psk_bone = Psk.Bone() + psk_bone.name = bytes('root', encoding='windows-1252') + psk_bone.flags = 0 + psk_bone.children_count = 0 + psk_bone.parent_index = 0 + psk_bone.location = Vector3.zero() + psk_bone.rotation = Quaternion.identity() + psk.bones.append(psk_bone) + else: + bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices) + bones = [armature_object.data.bones[bone_name] for bone_name in bone_names] + + # Check that all bone names are valid. + check_bone_names(map(lambda x: x.name, bones)) + + for bone in bones: psk_bone = Psk.Bone() - psk_bone.name = bytes('root', encoding='windows-1252') + psk_bone.name = bytes(bone.name, encoding='windows-1252') psk_bone.flags = 0 psk_bone.children_count = 0 - psk_bone.parent_index = 0 - psk_bone.location = Vector3.zero() - psk_bone.rotation = Quaternion.identity() - psk.bones.append(psk_bone) - else: - bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices) - bones = [armature_object.data.bones[bone_name] for bone_name in bone_names] - # Check that all bone names are valid. - check_bone_names(map(lambda x: x.name, bones)) + try: + parent_index = bones.index(bone.parent) + psk_bone.parent_index = parent_index + psk.bones[parent_index].children_count += 1 + except ValueError: + psk_bone.parent_index = 0 - for bone in bones: - psk_bone = Psk.Bone() - psk_bone.name = bytes(bone.name, encoding='windows-1252') - psk_bone.flags = 0 - psk_bone.children_count = 0 - - try: - parent_index = bones.index(bone.parent) - psk_bone.parent_index = parent_index - psk.bones[parent_index].children_count += 1 - except ValueError: - psk_bone.parent_index = 0 - - if bone.parent is not None: - rotation = bone.matrix.to_quaternion() - rotation.x = -rotation.x - rotation.y = -rotation.y - rotation.z = -rotation.z - quat_parent = bone.parent.matrix.to_quaternion().inverted() - parent_head = quat_parent @ bone.parent.head - parent_tail = quat_parent @ bone.parent.tail - location = (parent_tail - parent_head) + bone.head - else: - location = armature_object.matrix_local @ bone.head - rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3() - rotation = rot_matrix.to_quaternion() - - psk_bone.location.x = location.x - psk_bone.location.y = location.y - psk_bone.location.z = location.z - - psk_bone.rotation.x = rotation.x - psk_bone.rotation.y = rotation.y - psk_bone.rotation.z = rotation.z - psk_bone.rotation.w = rotation.w - - psk.bones.append(psk_bone) - - for input_mesh_object in input_objects.mesh_objects: - - # MATERIALS - material_indices = [] - for i, material in enumerate(input_mesh_object.data.materials): - if material is None: - raise RuntimeError('Material cannot be empty (index ' + str(i) + ')') - if material.name in materials: - # Material already evaluated, just get its index. - material_index = list(materials.keys()).index(material.name) - else: - # New material. - psk_material = Psk.Material() - psk_material.name = bytes(material.name, encoding='windows-1252') - psk_material.texture_index = len(psk.materials) - psk.materials.append(psk_material) - materials[material.name] = material - material_index = psk_material.texture_index - material_indices.append(material_index) - - if options.use_raw_mesh_data: - mesh_object = input_mesh_object - mesh_data = input_mesh_object.data + if bone.parent is not None: + rotation = bone.matrix.to_quaternion() + rotation.x = -rotation.x + rotation.y = -rotation.y + rotation.z = -rotation.z + quat_parent = bone.parent.matrix.to_quaternion().inverted() + parent_head = quat_parent @ bone.parent.head + parent_tail = quat_parent @ bone.parent.tail + location = (parent_tail - parent_head) + bone.head else: - # Create a copy of the mesh object after non-armature modifiers are applied. + location = armature_object.matrix_local @ bone.head + rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3() + rotation = rot_matrix.to_quaternion() - # Temporarily deactivate any armature modifiers on the input mesh object. - active_armature_modifiers = [x for x in filter(lambda x: x.type == 'ARMATURE' and x.is_active, input_mesh_object.modifiers)] - for modifier in active_armature_modifiers: - modifier.show_viewport = False + psk_bone.location.x = location.x + psk_bone.location.y = location.y + psk_bone.location.z = location.z - depsgraph = context.evaluated_depsgraph_get() - bm = bmesh.new() - bm.from_object(input_mesh_object, depsgraph) - mesh_data = bpy.data.meshes.new('') - bm.to_mesh(mesh_data) - del bm - mesh_object = bpy.data.objects.new('', mesh_data) - mesh_object.matrix_world = input_mesh_object.matrix_world + psk_bone.rotation.x = rotation.x + psk_bone.rotation.y = rotation.y + psk_bone.rotation.z = rotation.z + psk_bone.rotation.w = rotation.w - # Copy the vertex groups - for vertex_group in input_mesh_object.vertex_groups: - mesh_object.vertex_groups.new(name=vertex_group.name) + psk.bones.append(psk_bone) - # Reactivate previously active armature modifiers - for modifier in active_armature_modifiers: - modifier.show_viewport = True + for input_mesh_object in input_objects.mesh_objects: - vertex_offset = len(psk.points) + # MATERIALS + material_indices = [] + for i, material in enumerate(input_mesh_object.data.materials): + if material is None: + raise RuntimeError('Material cannot be empty (index ' + str(i) + ')') + if material.name in materials: + # Material already evaluated, just get its index. + material_index = list(materials.keys()).index(material.name) + else: + # New material. + psk_material = Psk.Material() + psk_material.name = bytes(material.name, encoding='windows-1252') + psk_material.texture_index = len(psk.materials) + psk.materials.append(psk_material) + materials[material.name] = material + material_index = psk_material.texture_index + material_indices.append(material_index) - # VERTICES - for vertex in mesh_data.vertices: - point = Vector3() - v = mesh_object.matrix_world @ vertex.co - point.x = v.x - point.y = v.y - point.z = v.z - psk.points.append(point) + if options.use_raw_mesh_data: + mesh_object = input_mesh_object + mesh_data = input_mesh_object.data + else: + # Create a copy of the mesh object after non-armature modifiers are applied. - uv_layer = mesh_data.uv_layers.active.data + # Temporarily deactivate any armature modifiers on the input mesh object. + active_armature_modifiers = [x for x in filter(lambda x: x.type == 'ARMATURE' and x.is_active, input_mesh_object.modifiers)] + for modifier in active_armature_modifiers: + modifier.show_viewport = False - # WEDGES - mesh_data.calc_loop_triangles() + depsgraph = context.evaluated_depsgraph_get() + bm = bmesh.new() + bm.from_object(input_mesh_object, depsgraph) + mesh_data = bpy.data.meshes.new('') + bm.to_mesh(mesh_data) + del bm + mesh_object = bpy.data.objects.new('', mesh_data) + mesh_object.matrix_world = input_mesh_object.matrix_world - # Build a list of non-unique wedges. - wedges = [] - for loop_index, loop in enumerate(mesh_data.loops): - wedge = Psk.Wedge() - wedge.point_index = loop.vertex_index + vertex_offset - wedge.u, wedge.v = uv_layer[loop_index].uv - wedge.v = 1.0 - wedge.v - wedges.append(wedge) + # Copy the vertex groups + for vertex_group in input_mesh_object.vertex_groups: + mesh_object.vertex_groups.new(name=vertex_group.name) - # Assign material indices to the wedges. - for triangle in mesh_data.loop_triangles: - for loop_index in triangle.loops: - wedges[loop_index].material_index = material_indices[triangle.material_index] + # Reactivate previously active armature modifiers + for modifier in active_armature_modifiers: + modifier.show_viewport = True - # Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices - wedge_indices = {} - loop_wedge_indices = [-1] * len(mesh_data.loops) - for loop_index, wedge in enumerate(wedges): - wedge_hash = hash(wedge) - if wedge_hash in wedge_indices: - loop_wedge_indices[loop_index] = wedge_indices[wedge_hash] - else: - wedge_index = len(psk.wedges) - wedge_indices[wedge_hash] = wedge_index - psk.wedges.append(wedge) - loop_wedge_indices[loop_index] = wedge_index + vertex_offset = len(psk.points) - # FACES - poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True) - for f in mesh_data.loop_triangles: - face = Psk.Face() - face.material_index = material_indices[f.material_index] - face.wedge_indices[0] = loop_wedge_indices[f.loops[2]] - face.wedge_indices[1] = loop_wedge_indices[f.loops[1]] - face.wedge_indices[2] = loop_wedge_indices[f.loops[0]] - face.smoothing_groups = poly_groups[f.polygon_index] - psk.faces.append(face) + # VERTICES + for vertex in mesh_data.vertices: + point = Vector3() + v = mesh_object.matrix_world @ vertex.co + point.x = v.x + point.y = v.y + point.z = v.z + psk.points.append(point) - # WEIGHTS - if armature_object is not None: - # Because the vertex groups may contain entries for which there is no matching bone in the armature, - # we must filter them out and not export any weights for these vertex groups. - bone_names = [x.name for x in bones] - vertex_group_names = [x.name for x in mesh_object.vertex_groups] - vertex_group_bone_indices = dict() - for vertex_group_index, vertex_group_name in enumerate(vertex_group_names): + uv_layer = mesh_data.uv_layers.active.data + + # WEDGES + mesh_data.calc_loop_triangles() + + # Build a list of non-unique wedges. + wedges = [] + for loop_index, loop in enumerate(mesh_data.loops): + wedge = Psk.Wedge() + wedge.point_index = loop.vertex_index + vertex_offset + wedge.u, wedge.v = uv_layer[loop_index].uv + wedge.v = 1.0 - wedge.v + wedges.append(wedge) + + # Assign material indices to the wedges. + for triangle in mesh_data.loop_triangles: + for loop_index in triangle.loops: + wedges[loop_index].material_index = material_indices[triangle.material_index] + + # Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices + wedge_indices = {} + loop_wedge_indices = [-1] * len(mesh_data.loops) + for loop_index, wedge in enumerate(wedges): + wedge_hash = hash(wedge) + if wedge_hash in wedge_indices: + loop_wedge_indices[loop_index] = wedge_indices[wedge_hash] + else: + wedge_index = len(psk.wedges) + wedge_indices[wedge_hash] = wedge_index + psk.wedges.append(wedge) + loop_wedge_indices[loop_index] = wedge_index + + # FACES + poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True) + for f in mesh_data.loop_triangles: + face = Psk.Face() + face.material_index = material_indices[f.material_index] + face.wedge_indices[0] = loop_wedge_indices[f.loops[2]] + face.wedge_indices[1] = loop_wedge_indices[f.loops[1]] + face.wedge_indices[2] = loop_wedge_indices[f.loops[0]] + face.smoothing_groups = poly_groups[f.polygon_index] + psk.faces.append(face) + + # WEIGHTS + if armature_object is not None: + # Because the vertex groups may contain entries for which there is no matching bone in the armature, + # we must filter them out and not export any weights for these vertex groups. + bone_names = [x.name for x in bones] + vertex_group_names = [x.name for x in mesh_object.vertex_groups] + vertex_group_bone_indices = dict() + for vertex_group_index, vertex_group_name in enumerate(vertex_group_names): + try: + vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name) + except ValueError: + # The vertex group does not have a matching bone in the list of bones to be exported. + # Check to see if there is an associated bone for this vertex group that exists in the armature. + # If there is, we can traverse the ancestors of that bone to find an alternate bone to use for + # weighting the vertices belonging to this vertex group. + if vertex_group_name in armature_object.data.bones: + bone = armature_object.data.bones[vertex_group_name] + while bone is not None: + try: + bone_index = bone_names.index(bone.name) + vertex_group_bone_indices[vertex_group_index] = bone_index + break + except ValueError: + bone = bone.parent + for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups): + if vertex_group_index not in vertex_group_bone_indices: + # Vertex group has no associated bone, skip it. + continue + bone_index = vertex_group_bone_indices[vertex_group_index] + for vertex_index in range(len(mesh_data.vertices)): try: - vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name) - except ValueError: - # The vertex group does not have a matching bone in the list of bones to be exported. - # Check to see if there is an associated bone for this vertex group that exists in the armature. - # If there is, we can traverse the ancestors of that bone to find an alternate bone to use for - # weighting the vertices belonging to this vertex group. - if vertex_group_name in armature_object.data.bones: - bone = armature_object.data.bones[vertex_group_name] - while bone is not None: - try: - bone_index = bone_names.index(bone.name) - vertex_group_bone_indices[vertex_group_index] = bone_index - break - except ValueError: - bone = bone.parent - for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups): - if vertex_group_index not in vertex_group_bone_indices: - # Vertex group has no associated bone, skip it. + weight = vertex_group.weight(vertex_index) + except RuntimeError: continue - bone_index = vertex_group_bone_indices[vertex_group_index] - for vertex_index in range(len(mesh_data.vertices)): - try: - weight = vertex_group.weight(vertex_index) - except RuntimeError: - continue - if weight == 0.0: - continue - w = Psk.Weight() - w.bone_index = bone_index - w.point_index = vertex_offset + vertex_index - w.weight = weight - psk.weights.append(w) + if weight == 0.0: + continue + w = Psk.Weight() + w.bone_index = bone_index + w.point_index = vertex_offset + vertex_index + w.weight = weight + psk.weights.append(w) - if not options.use_raw_mesh_data: - bpy.data.objects.remove(mesh_object) - bpy.data.meshes.remove(mesh_data) - del mesh_data + if not options.use_raw_mesh_data: + bpy.data.objects.remove(mesh_object) + bpy.data.meshes.remove(mesh_data) + del mesh_data - return psk + return psk diff --git a/io_scene_psk_psa/psk/exporter.py b/io_scene_psk_psa/psk/exporter.py index 40cebcb..5d08d75 100644 --- a/io_scene_psk_psa/psk/exporter.py +++ b/io_scene_psk_psa/psk/exporter.py @@ -4,7 +4,7 @@ from bpy.props import BoolProperty, StringProperty, CollectionProperty, IntPrope from bpy.types import Operator, PropertyGroup from bpy_extras.io_utils import ExportHelper -from .builder import PskBuilder, PskBuilderOptions +from .builder import build_psk, PskBuildOptions, get_psk_input_objects from .data import * from ..helpers import populate_bone_group_list from ..types import BoneGroupListItem @@ -15,55 +15,50 @@ MAX_BONE_COUNT = 256 MAX_MATERIAL_COUNT = 256 -class PskExporter(object): +def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None): + section = Section() + section.name = name + if data_type is not None and data is not None: + section.data_size = sizeof(data_type) + section.data_count = len(data) + fp.write(section) + if data is not None: + for datum in data: + fp.write(datum) - def __init__(self, psk: Psk): - self.psk: Psk = psk - @staticmethod - def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None): - section = Section() - section.name = name - if data_type is not None and data is not None: - section.data_size = sizeof(data_type) - section.data_count = len(data) - fp.write(section) - if data is not None: - for datum in data: - fp.write(datum) +def export_psk(psk: Psk, path: str): + if len(psk.wedges) > MAX_WEDGE_COUNT: + raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}') + if len(psk.bones) > MAX_BONE_COUNT: + raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}') + if len(psk.points) > MAX_POINT_COUNT: + raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}') + if len(psk.materials) > MAX_MATERIAL_COUNT: + raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}') - def export(self, path: str): - if len(self.psk.wedges) > MAX_WEDGE_COUNT: - raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}') - if len(self.psk.bones) > MAX_BONE_COUNT: - raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}') - if len(self.psk.points) > MAX_POINT_COUNT: - raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}') - if len(self.psk.materials) > MAX_MATERIAL_COUNT: - raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}') + with open(path, 'wb') as fp: + _write_section(fp, b'ACTRHEAD') + _write_section(fp, b'PNTS0000', Vector3, psk.points) - with open(path, 'wb') as fp: - self.write_section(fp, b'ACTRHEAD') - self.write_section(fp, b'PNTS0000', Vector3, self.psk.points) + wedges = [] + for index, w in enumerate(psk.wedges): + wedge = Psk.Wedge16() + wedge.material_index = w.material_index + wedge.u = w.u + wedge.v = w.v + wedge.point_index = w.point_index + wedges.append(wedge) - wedges = [] - for index, w in enumerate(self.psk.wedges): - wedge = Psk.Wedge16() - wedge.material_index = w.material_index - wedge.u = w.u - wedge.v = w.v - wedge.point_index = w.point_index - wedges.append(wedge) - - self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges) - self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces) - self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials) - self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones) - self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights) + _write_section(fp, b'VTXW0000', Psk.Wedge16, wedges) + _write_section(fp, b'FACE0000', Psk.Face, psk.faces) + _write_section(fp, b'MATT0000', Psk.Material, psk.materials) + _write_section(fp, b'REFSKELT', Psk.Bone, psk.bones) + _write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights) def is_bone_filter_mode_item_available(context, identifier): - input_objects = PskBuilder.get_input_objects(context) + input_objects = get_psk_input_objects(context) armature_object = input_objects.armature_object if identifier == 'BONE_GROUPS': if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups: @@ -88,7 +83,7 @@ class PskExportOperator(Operator, ExportHelper): def invoke(self, context, event): try: - input_objects = PskBuilder.get_input_objects(context) + input_objects = get_psk_input_objects(context) except RuntimeError as e: self.report({'ERROR_INVALID_CONTEXT'}, str(e)) return {'CANCELLED'} @@ -105,7 +100,7 @@ class PskExportOperator(Operator, ExportHelper): @classmethod def poll(cls, context): try: - PskBuilder.get_input_objects(context) + get_psk_input_objects(context) except RuntimeError as e: cls.poll_message_set(str(e)) return False @@ -136,15 +131,13 @@ class PskExportOperator(Operator, ExportHelper): def execute(self, context): pg = context.scene.psk_export - builder = PskBuilder() - options = PskBuilderOptions() + options = PskBuildOptions() options.bone_filter_mode = pg.bone_filter_mode options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected] options.use_raw_mesh_data = pg.use_raw_mesh_data try: - psk = builder.build(context, options) - exporter = PskExporter(psk) - exporter.export(self.filepath) + psk = build_psk(context, options) + export_psk(psk, self.filepath) except RuntimeError as e: self.report({'ERROR_INVALID_CONTEXT'}, str(e)) return {'CANCELLED'} diff --git a/io_scene_psk_psa/psk/importer.py b/io_scene_psk_psa/psk/importer.py index e2758df..17b3c45 100644 --- a/io_scene_psk_psa/psk/importer.py +++ b/io_scene_psk_psa/psk/importer.py @@ -12,7 +12,7 @@ from bpy_extras.io_utils import ImportHelper from mathutils import Quaternion, Vector, Matrix from .data import Psk -from .reader import PskReader +from .reader import read_psk from ..helpers import rgb_to_srgb @@ -26,209 +26,205 @@ class PskImportOptions(object): self.bone_length = 1.0 -class PskImporter(object): - def __init__(self): +def import_psk(psk: Psk, context, options: PskImportOptions): + # ARMATURE + armature_data = bpy.data.armatures.new(options.name) + armature_object = bpy.data.objects.new(options.name, armature_data) + armature_object.show_in_front = True + + context.scene.collection.objects.link(armature_object) + + try: + bpy.ops.object.mode_set(mode='OBJECT') + except: pass - def import_psk(self, psk: Psk, context, options: PskImportOptions): - # ARMATURE - armature_data = bpy.data.armatures.new(options.name) - armature_object = bpy.data.objects.new(options.name, armature_data) - armature_object.show_in_front = True + armature_object.select_set(state=True) + bpy.context.view_layer.objects.active = armature_object - context.scene.collection.objects.link(armature_object) + bpy.ops.object.mode_set(mode='EDIT') + # Intermediate bone type for the purpose of construction. + class ImportBone(object): + def __init__(self, index: int, psk_bone: Psk.Bone): + self.index: int = index + self.psk_bone: Psk.Bone = psk_bone + self.parent: Optional[ImportBone] = None + self.local_rotation: Quaternion = Quaternion() + self.local_translation: Vector = Vector() + self.world_rotation_matrix: Matrix = Matrix() + self.world_matrix: Matrix = Matrix() + self.vertex_group = None + self.orig_quat: Quaternion = Quaternion() + self.orig_loc: Vector = Vector() + self.post_quat: Quaternion = Quaternion() + + import_bones = [] + + for bone_index, psk_bone in enumerate(psk.bones): + import_bone = ImportBone(bone_index, psk_bone) + psk_bone.parent_index = max(0, psk_bone.parent_index) + import_bone.local_rotation = Quaternion(tuple(psk_bone.rotation)) + import_bone.local_translation = Vector(tuple(psk_bone.location)) + if psk_bone.parent_index == 0 and bone_index == 0: + import_bone.world_rotation_matrix = import_bone.local_rotation.to_matrix() + import_bone.world_matrix = Matrix.Translation(import_bone.local_translation) + import_bones.append(import_bone) + + for bone_index, bone in enumerate(import_bones): + if bone.psk_bone.parent_index == 0 and bone_index == 0: + continue + parent = import_bones[bone.psk_bone.parent_index] + bone.parent = parent + bone.world_matrix = parent.world_rotation_matrix.to_4x4() + translation = bone.local_translation.copy() + translation.rotate(parent.world_rotation_matrix) + bone.world_matrix.translation = parent.world_matrix.translation + translation + bone.world_rotation_matrix = bone.local_rotation.conjugated().to_matrix() + bone.world_rotation_matrix.rotate(parent.world_rotation_matrix) + + for import_bone in import_bones: + bone_name = import_bone.psk_bone.name.decode('utf-8') + edit_bone = armature_data.edit_bones.new(bone_name) + + if import_bone.parent is not None: + edit_bone.parent = armature_data.edit_bones[import_bone.psk_bone.parent_index] + else: + import_bone.local_rotation.conjugate() + + edit_bone.tail = Vector((0.0, options.bone_length, 0.0)) + edit_bone_matrix = import_bone.local_rotation.conjugated() + edit_bone_matrix.rotate(import_bone.world_matrix) + edit_bone_matrix = edit_bone_matrix.to_matrix().to_4x4() + edit_bone_matrix.translation = import_bone.world_matrix.translation + edit_bone.matrix = edit_bone_matrix + + # Store bind pose information in the bone's custom properties. + # This information is used when importing animations from PSA files. + edit_bone['orig_quat'] = import_bone.local_rotation + edit_bone['orig_loc'] = import_bone.local_translation + edit_bone['post_quat'] = import_bone.local_rotation.conjugated() + + # MESH + mesh_data = bpy.data.meshes.new(options.name) + mesh_object = bpy.data.objects.new(options.name, mesh_data) + + # MATERIALS + for material in psk.materials: + # TODO: re-use of materials should be an option + bpy_material = bpy.data.materials.new(material.name.decode('utf-8')) + mesh_data.materials.append(bpy_material) + + bm = bmesh.new() + + # VERTICES + for point in psk.points: + bm.verts.new(tuple(point)) + + bm.verts.ensure_lookup_table() + + degenerate_face_indices = set() + for face_index, face in enumerate(psk.faces): + point_indices = [bm.verts[psk.wedges[i].point_index] for i in reversed(face.wedge_indices)] try: - bpy.ops.object.mode_set(mode='OBJECT') - except: - pass + bm_face = bm.faces.new(point_indices) + bm_face.material_index = face.material_index + except ValueError: + degenerate_face_indices.add(face_index) - armature_object.select_set(state=True) - bpy.context.view_layer.objects.active = armature_object + if len(degenerate_face_indices) > 0: + print(f'WARNING: Discarded {len(degenerate_face_indices)} degenerate face(s).') - bpy.ops.object.mode_set(mode='EDIT') + bm.to_mesh(mesh_data) - # Intermediate bone type for the purpose of construction. - class ImportBone(object): - def __init__(self, index: int, psk_bone: Psk.Bone): - self.index: int = index - self.psk_bone: Psk.Bone = psk_bone - self.parent: Optional[ImportBone] = None - self.local_rotation: Quaternion = Quaternion() - self.local_translation: Vector = Vector() - self.world_rotation_matrix: Matrix = Matrix() - self.world_matrix: Matrix = Matrix() - self.vertex_group = None - self.orig_quat: Quaternion = Quaternion() - self.orig_loc: Vector = Vector() - self.post_quat: Quaternion = Quaternion() + # TEXTURE COORDINATES + data_index = 0 + uv_layer = mesh_data.uv_layers.new(name='VTXW0000') + for face_index, face in enumerate(psk.faces): + if face_index in degenerate_face_indices: + continue + face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)] + for wedge in face_wedges: + uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v + data_index += 1 - import_bones = [] + # EXTRA UVS + if psk.has_extra_uvs and options.should_import_extra_uvs: + extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges)) + wedge_index_offset = 0 + for extra_uv_index in range(extra_uv_channel_count): + data_index = 0 + uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}') + for face_index, face in enumerate(psk.faces): + if face_index in degenerate_face_indices: + continue + for wedge_index in reversed(face.wedge_indices): + u, v = psk.extra_uvs[wedge_index_offset + wedge_index] + uv_layer.data[data_index].uv = u, 1.0 - v + data_index += 1 + wedge_index_offset += len(psk.wedges) - for bone_index, psk_bone in enumerate(psk.bones): - import_bone = ImportBone(bone_index, psk_bone) - psk_bone.parent_index = max(0, psk_bone.parent_index) - import_bone.local_rotation = Quaternion(tuple(psk_bone.rotation)) - import_bone.local_translation = Vector(tuple(psk_bone.location)) - if psk_bone.parent_index == 0 and bone_index == 0: - import_bone.world_rotation_matrix = import_bone.local_rotation.to_matrix() - import_bone.world_matrix = Matrix.Translation(import_bone.local_translation) - import_bones.append(import_bone) + # VERTEX COLORS + if psk.has_vertex_colors and options.should_import_vertex_colors: + size = (len(psk.points), 4) + vertex_colors = np.full(size, inf) + vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR') + ambiguous_vertex_color_point_indices = [] - for bone_index, bone in enumerate(import_bones): - if bone.psk_bone.parent_index == 0 and bone_index == 0: - continue - parent = import_bones[bone.psk_bone.parent_index] - bone.parent = parent - bone.world_matrix = parent.world_rotation_matrix.to_4x4() - translation = bone.local_translation.copy() - translation.rotate(parent.world_rotation_matrix) - bone.world_matrix.translation = parent.world_matrix.translation + translation - bone.world_rotation_matrix = bone.local_rotation.conjugated().to_matrix() - bone.world_rotation_matrix.rotate(parent.world_rotation_matrix) - - for import_bone in import_bones: - bone_name = import_bone.psk_bone.name.decode('utf-8') - edit_bone = armature_data.edit_bones.new(bone_name) - - if import_bone.parent is not None: - edit_bone.parent = armature_data.edit_bones[import_bone.psk_bone.parent_index] + for wedge_index, wedge in enumerate(psk.wedges): + point_index = wedge.point_index + psk_vertex_color = psk.vertex_colors[wedge_index].normalized() + if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color: + ambiguous_vertex_color_point_indices.append(point_index) else: - import_bone.local_rotation.conjugate() + vertex_colors[point_index] = psk_vertex_color - edit_bone.tail = Vector((0.0, options.bone_length, 0.0)) - edit_bone_matrix = import_bone.local_rotation.conjugated() - edit_bone_matrix.rotate(import_bone.world_matrix) - edit_bone_matrix = edit_bone_matrix.to_matrix().to_4x4() - edit_bone_matrix.translation = import_bone.world_matrix.translation - edit_bone.matrix = edit_bone_matrix + if options.vertex_color_space == 'SRGBA': + for i in range(vertex_colors.shape[0]): + vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3])) - # Store bind pose information in the bone's custom properties. - # This information is used when importing animations from PSA files. - edit_bone['orig_quat'] = import_bone.local_rotation - edit_bone['orig_loc'] = import_bone.local_translation - edit_bone['post_quat'] = import_bone.local_rotation.conjugated() + for loop_index, loop in enumerate(mesh_data.loops): + vertex_color = vertex_colors[loop.vertex_index] + if vertex_color is not None: + vertex_color_data.data[loop_index].color = vertex_color + else: + vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0 - # MESH - mesh_data = bpy.data.meshes.new(options.name) - mesh_object = bpy.data.objects.new(options.name, mesh_data) + if len(ambiguous_vertex_color_point_indices) > 0: + print(f'WARNING: {len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.') - # MATERIALS - for material in psk.materials: - # TODO: re-use of materials should be an option - bpy_material = bpy.data.materials.new(material.name.decode('utf-8')) - mesh_data.materials.append(bpy_material) + # VERTEX NORMALS + if psk.has_vertex_normals and options.should_import_vertex_normals: + mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons)) + normals = [] + for vertex_normal in psk.vertex_normals: + normals.append(tuple(vertex_normal)) + mesh_data.normals_split_custom_set_from_vertices(normals) + mesh_data.use_auto_smooth = True - bm = bmesh.new() + bm.normal_update() + bm.free() - # VERTICES - for point in psk.points: - bm.verts.new(tuple(point)) + # Get a list of all bones that have weights associated with them. + vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights)) + for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))): + import_bone.vertex_group = mesh_object.vertex_groups.new( + name=import_bone.psk_bone.name.decode('windows-1252')) - bm.verts.ensure_lookup_table() + for weight in psk.weights: + import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD') - degenerate_face_indices = set() - for face_index, face in enumerate(psk.faces): - point_indices = [bm.verts[psk.wedges[i].point_index] for i in reversed(face.wedge_indices)] - try: - bm_face = bm.faces.new(point_indices) - bm_face.material_index = face.material_index - except ValueError: - degenerate_face_indices.add(face_index) + # Add armature modifier to our mesh object. + armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE') + armature_modifier.object = armature_object + mesh_object.parent = armature_object - if len(degenerate_face_indices) > 0: - print(f'WARNING: Discarded {len(degenerate_face_indices)} degenerate face(s).') + context.scene.collection.objects.link(mesh_object) - bm.to_mesh(mesh_data) - - # TEXTURE COORDINATES - data_index = 0 - uv_layer = mesh_data.uv_layers.new(name='VTXW0000') - for face_index, face in enumerate(psk.faces): - if face_index in degenerate_face_indices: - continue - face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)] - for wedge in face_wedges: - uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v - data_index += 1 - - # EXTRA UVS - if psk.has_extra_uvs and options.should_import_extra_uvs: - extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges)) - wedge_index_offset = 0 - for extra_uv_index in range(extra_uv_channel_count): - data_index = 0 - uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}') - for face_index, face in enumerate(psk.faces): - if face_index in degenerate_face_indices: - continue - for wedge_index in reversed(face.wedge_indices): - u, v = psk.extra_uvs[wedge_index_offset + wedge_index] - uv_layer.data[data_index].uv = u, 1.0 - v - data_index += 1 - wedge_index_offset += len(psk.wedges) - - # VERTEX COLORS - if psk.has_vertex_colors and options.should_import_vertex_colors: - size = (len(psk.points), 4) - vertex_colors = np.full(size, inf) - vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR') - ambiguous_vertex_color_point_indices = [] - - for wedge_index, wedge in enumerate(psk.wedges): - point_index = wedge.point_index - psk_vertex_color = psk.vertex_colors[wedge_index].normalized() - if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color: - ambiguous_vertex_color_point_indices.append(point_index) - else: - vertex_colors[point_index] = psk_vertex_color - - if options.vertex_color_space == 'SRGBA': - for i in range(vertex_colors.shape[0]): - vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3])) - - for loop_index, loop in enumerate(mesh_data.loops): - vertex_color = vertex_colors[loop.vertex_index] - if vertex_color is not None: - vertex_color_data.data[loop_index].color = vertex_color - else: - vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0 - - if len(ambiguous_vertex_color_point_indices) > 0: - print(f'WARNING: {len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.') - - # VERTEX NORMALS - if psk.has_vertex_normals and options.should_import_vertex_normals: - mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons)) - normals = [] - for vertex_normal in psk.vertex_normals: - normals.append(tuple(vertex_normal)) - mesh_data.normals_split_custom_set_from_vertices(normals) - mesh_data.use_auto_smooth = True - - bm.normal_update() - bm.free() - - # Get a list of all bones that have weights associated with them. - vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights)) - for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))): - import_bone.vertex_group = mesh_object.vertex_groups.new( - name=import_bone.psk_bone.name.decode('windows-1252')) - - for weight in psk.weights: - import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD') - - # Add armature modifier to our mesh object. - armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE') - armature_modifier.object = armature_object - mesh_object.parent = armature_object - - context.scene.collection.objects.link(mesh_object) - - try: - bpy.ops.object.mode_set(mode='OBJECT') - except: - pass + try: + bpy.ops.object.mode_set(mode='OBJECT') + except: + pass class PskImportPropertyGroup(PropertyGroup): @@ -286,8 +282,9 @@ class PskImportOperator(Operator, ImportHelper): def execute(self, context): pg = context.scene.psk_import - reader = PskReader() - psk = reader.read(self.filepath) + + psk = read_psk(self.filepath) + options = PskImportOptions() options.name = os.path.splitext(os.path.basename(self.filepath))[0] options.should_import_extra_uvs = pg.should_import_extra_uvs @@ -295,7 +292,9 @@ class PskImportOperator(Operator, ImportHelper): options.should_import_vertex_normals = pg.should_import_vertex_normals options.vertex_color_space = pg.vertex_color_space options.bone_length = pg.bone_length - PskImporter().import_psk(psk, context, options) + + import_psk(psk, context, options) + return {'FINISHED'} def draw(self, context): diff --git a/io_scene_psk_psa/psk/reader.py b/io_scene_psk_psa/psk/reader.py index 7fde8b6..c68a1af 100644 --- a/io_scene_psk_psa/psk/reader.py +++ b/io_scene_psk_psa/psk/reader.py @@ -3,53 +3,48 @@ import ctypes from .data import * -class PskReader(object): +def _read_types(fp, data_class: ctypes.Structure, section: Section, data): + buffer_length = section.data_size * section.data_count + buffer = fp.read(buffer_length) + offset = 0 + for _ in range(section.data_count): + data.append(data_class.from_buffer_copy(buffer, offset)) + offset += section.data_size - def __init__(self): - pass - @staticmethod - def read_types(fp, data_class: ctypes.Structure, section: Section, data): - buffer_length = section.data_size * section.data_count - buffer = fp.read(buffer_length) - offset = 0 - for _ in range(section.data_count): - data.append(data_class.from_buffer_copy(buffer, offset)) - offset += section.data_size - - def read(self, path) -> Psk: - psk = Psk() - with open(path, 'rb') as fp: - while fp.read(1): - fp.seek(-1, 1) - section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section))) - if section.name == b'ACTRHEAD': - pass - elif section.name == b'PNTS0000': - PskReader.read_types(fp, Vector3, section, psk.points) - elif section.name == b'VTXW0000': - if section.data_size == ctypes.sizeof(Psk.Wedge16): - PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges) - elif section.data_size == ctypes.sizeof(Psk.Wedge32): - PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges) - else: - raise RuntimeError('Unrecognized wedge format') - elif section.name == b'FACE0000': - PskReader.read_types(fp, Psk.Face, section, psk.faces) - elif section.name == b'MATT0000': - PskReader.read_types(fp, Psk.Material, section, psk.materials) - elif section.name == b'REFSKELT': - PskReader.read_types(fp, Psk.Bone, section, psk.bones) - elif section.name == b'RAWWEIGHTS': - PskReader.read_types(fp, Psk.Weight, section, psk.weights) - elif section.name == b'FACE3200': - PskReader.read_types(fp, Psk.Face32, section, psk.faces) - elif section.name == b'VERTEXCOLOR': - PskReader.read_types(fp, Color, section, psk.vertex_colors) - elif section.name.startswith(b'EXTRAUVS'): - PskReader.read_types(fp, Vector2, section, psk.extra_uvs) - elif section.name == b'VTXNORMS': - PskReader.read_types(fp, Vector3, section, psk.vertex_normals) +def read_psk(path) -> Psk: + psk = Psk() + with open(path, 'rb') as fp: + while fp.read(1): + fp.seek(-1, 1) + section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section))) + if section.name == b'ACTRHEAD': + pass + elif section.name == b'PNTS0000': + _read_types(fp, Vector3, section, psk.points) + elif section.name == b'VTXW0000': + if section.data_size == ctypes.sizeof(Psk.Wedge16): + _read_types(fp, Psk.Wedge16, section, psk.wedges) + elif section.data_size == ctypes.sizeof(Psk.Wedge32): + _read_types(fp, Psk.Wedge32, section, psk.wedges) else: - raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"') - return psk + raise RuntimeError('Unrecognized wedge format') + elif section.name == b'FACE0000': + _read_types(fp, Psk.Face, section, psk.faces) + elif section.name == b'MATT0000': + _read_types(fp, Psk.Material, section, psk.materials) + elif section.name == b'REFSKELT': + _read_types(fp, Psk.Bone, section, psk.bones) + elif section.name == b'RAWWEIGHTS': + _read_types(fp, Psk.Weight, section, psk.weights) + elif section.name == b'FACE3200': + _read_types(fp, Psk.Face32, section, psk.faces) + elif section.name == b'VERTEXCOLOR': + _read_types(fp, Color, section, psk.vertex_colors) + elif section.name.startswith(b'EXTRAUVS'): + _read_types(fp, Vector2, section, psk.extra_uvs) + elif section.name == b'VTXNORMS': + _read_types(fp, Vector3, section, psk.vertex_normals) + else: + raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"') + return psk