From 563172ae2360aae0b54ea467c7043b161a90a385 Mon Sep 17 00:00:00 2001 From: Colin Basnett Date: Tue, 7 Nov 2023 18:38:24 -0800 Subject: [PATCH] Initial commit for handling of SCALEKEYS block --- io_scene_psk_psa/psa/data.py | 20 ++++++++ io_scene_psk_psa/psa/import_/operators.py | 3 ++ io_scene_psk_psa/psa/import_/properties.py | 2 + io_scene_psk_psa/psa/importer.py | 31 +++++++++++-- io_scene_psk_psa/psa/reader.py | 53 +++++++++++++++++++++- 5 files changed, 104 insertions(+), 5 deletions(-) diff --git a/io_scene_psk_psa/psa/data.py b/io_scene_psk_psa/psa/data.py index 8d5d8e5..89bc159 100644 --- a/io_scene_psk_psa/psa/data.py +++ b/io_scene_psk_psa/psa/data.py @@ -58,7 +58,27 @@ class Psa: def __repr__(self) -> str: return repr((self.location, self.rotation, self.time)) + class ScaleKey(Structure): + _fields_ = [ + ('scale', Vector3), + ('time', c_float) + ] + + @property + def data(self): + yield self.scale.x + yield self.scale.y + yield self.scale.z + + def __repr__(self) -> str: + return repr((self.scale, self.time)) + def __init__(self): self.bones: List[Psa.Bone] = [] self.sequences: typing.OrderedDict[str, Psa.Sequence] = OrderedDict() self.keys: List[Psa.Key] = [] + self.scale_keys: List[Psa.ScaleKey] = [] + + @property + def has_scale_keys(self): + return len(self.scale_keys) > 0 diff --git a/io_scene_psk_psa/psa/import_/operators.py b/io_scene_psk_psa/psa/import_/operators.py index 08af962..7aff586 100644 --- a/io_scene_psk_psa/psa/import_/operators.py +++ b/io_scene_psk_psa/psa/import_/operators.py @@ -164,6 +164,7 @@ class PSA_OT_import(Operator, ImportHelper): options.should_overwrite = pg.should_overwrite options.should_write_metadata = pg.should_write_metadata options.should_write_keyframes = pg.should_write_keyframes + options.should_write_scale_keys = pg.should_write_scale_keys options.should_convert_to_samples = pg.should_convert_to_samples options.bone_mapping_mode = pg.bone_mapping_mode options.fps_source = pg.fps_source @@ -245,6 +246,8 @@ class PSA_OT_import(Operator, ImportHelper): col.prop(pg, 'fps_source') if pg.fps_source == 'CUSTOM': col.prop(pg, 'fps_custom') + # Scale Keys + col.prop(pg, 'should_write_scale_keys') col = layout.column(heading='Options') col.use_property_split = True diff --git a/io_scene_psk_psa/psa/import_/properties.py b/io_scene_psk_psa/psa/import_/properties.py index b7e14f5..fcf86d1 100644 --- a/io_scene_psk_psa/psa/import_/properties.py +++ b/io_scene_psk_psa/psa/import_/properties.py @@ -41,6 +41,8 @@ class PSA_PG_import(PropertyGroup): description='If an action with a matching name already exists, the existing action ' 'will have it\'s data overwritten instead of a new action being created') should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set) + should_write_scale_keys: BoolProperty(default=True, name='Scale Keys', options=empty_set, description= + 'Import scale keys, if available') should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set, description='Additional data will be written to the custom properties of the ' 'Action (e.g., frame rate)') diff --git a/io_scene_psk_psa/psa/importer.py b/io_scene_psk_psa/psa/importer.py index c837e11..30d8154 100644 --- a/io_scene_psk_psa/psa/importer.py +++ b/io_scene_psk_psa/psa/importer.py @@ -1,5 +1,5 @@ import typing -from typing import List, Optional +from typing import List, Optional, Iterable import bpy import numpy @@ -18,6 +18,7 @@ class PsaImportOptions(object): self.should_overwrite = False self.should_write_keyframes = True self.should_write_metadata = True + self.should_write_scale_keys = True self.action_name_prefix = '' self.should_convert_to_samples = False self.bone_mapping_mode = 'CASE_INSENSITIVE' @@ -35,9 +36,10 @@ class ImportBone(object): self.original_rotation: Quaternion = Quaternion() self.post_rotation: Quaternion = Quaternion() self.fcurves: List[FCurve] = [] + self.scale_fcurves: List[FCurve] = [] -def _calculate_fcurve_data(import_bone: ImportBone, key_data: typing.Iterable[float]): +def _calculate_fcurve_data(import_bone: ImportBone, key_data: Iterable[float]): # Convert world-space transforms to local-space transforms. key_rotation = Quaternion(key_data[0:4]) key_location = Vector(key_data[4:]) @@ -168,7 +170,6 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, action = bpy.data.actions.new(name=action_name) # Calculate the target FPS. - target_fps = sequence.fps if options.fps_source == 'CUSTOM': target_fps = options.fps_custom elif options.fps_source == 'SCENE': @@ -200,6 +201,14 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz ] + + if options.should_write_scale_keys: + scale_data_path = pose_bone.path_from_id('scale') + import_bone.fcurves += [ + action.fcurves.new(scale_data_path, index=0, action_group=pose_bone.name), # Sx + action.fcurves.new(scale_data_path, index=1, action_group=pose_bone.name), # Sy + action.fcurves.new(scale_data_path, index=2, action_group=pose_bone.name), # Sz + ] # Read the sequence data matrix from the PSA. sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) @@ -227,6 +236,22 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, for fcurve_keyframe in fcurve.keyframe_points: fcurve_keyframe.interpolation = 'LINEAR' + if options.should_write_scale_keys: + sequence_scale_data_matrix = psa_reader.read_sequence_scale_key_data_matrix(sequence_name) + # Write the scale keys out. + fcurve_data = numpy.zeros(2 * sequence.frame_count, dtype=float) + # Populate the keyframe time data. + fcurve_data[0::2] = [x * keyframe_time_dilation for x in range(sequence.frame_count)] + for bone_index, import_bone in enumerate(import_bones): + if import_bone is None: + continue + for fcurve_index, fcurve in enumerate(import_bone.scale_fcurves): + fcurve_data[1::2] = sequence_scale_data_matrix[:, bone_index, fcurve_index] + fcurve.keyframe_points.add(sequence.frame_count) + fcurve.keyframe_points.foreach_set('co', fcurve_data) + for fcurve_keyframe in fcurve.keyframe_points: + fcurve_keyframe.interpolation = 'LINEAR' + if options.should_convert_to_samples: # Bake the curve to samples. for fcurve in action.fcurves: diff --git a/io_scene_psk_psa/psa/reader.py b/io_scene_psk_psa/psa/reader.py index 6b74c2f..b7aaffc 100644 --- a/io_scene_psk_psa/psa/reader.py +++ b/io_scene_psk_psa/psa/reader.py @@ -1,4 +1,5 @@ import ctypes +from typing import Optional import numpy as np @@ -32,6 +33,7 @@ class PsaReader(object): def __init__(self, path): self.keys_data_offset: int = 0 + self.scale_keys_data_offset: Optional[int] = None self.fp = open(path, 'rb') self.psa: Psa = self._read(self.fp) @@ -65,9 +67,9 @@ class PsaReader(object): Reads and returns the key data for a sequence. @param sequence_name: The name of the sequence. - @return: A list of Psa.Keys. + @return: A list of keys for the sequence. """ - # Set the file reader to the beginning of the keys data + # Set the file reader to the beginning of the key data. sequence = self.psa.sequences[sequence_name] data_size = sizeof(Psa.Key) bone_count = len(self.psa.bones) @@ -83,6 +85,49 @@ class PsaReader(object): offset += data_size return keys + def read_sequence_scale_key_data_matrix(self, sequence_name: str) -> np.ndarray: + """ + Reads and returns the scale key data matrix for the given sequence. + @param sequence_name: The name of the sequence. + @return: An FxBx3 matrix where F is the number of frames, B is the number of bones. + """ + sequence = self.psa.sequences[sequence_name] + scale_keys = self.read_sequence_scale_keys(sequence_name) + bone_count = len(self.bones) + matrix_size = sequence.frame_count, bone_count, 3 + matrix = np.ones(matrix_size) + keys_iter = iter(scale_keys) + for frame_index in range(sequence.frame_count): + for bone_index in range(bone_count): + matrix[frame_index, bone_index, :] = iter(next(keys_iter).scale) + return matrix + + def read_sequence_scale_keys(self, sequence_name: str) -> List[Psa.ScaleKey]: + """ + Reads and returns the scale key data for a sequence. + + Throws a RuntimeError exception if the sequence does not contain scale keys (use Psa.has_scale_keys to check). + @param sequence_name: The name of the sequence. + @return: A list of scale keys for the sequence. + """ + if not self.psa.has_scale_keys: + raise RuntimeError('The PSA file does not contain scale keys.') + # Set the file reader to the beginning of the key data. + sequence = self.psa.sequences[sequence_name] + data_size = sizeof(Psa.ScaleKey) + bone_count = len(self.psa.bones) + buffer_length = data_size * bone_count * sequence.frame_count + sequence_scale_keys_offset = self.keys_data_offset + (sequence.frame_start_index * bone_count * data_size) + self.fp.seek(sequence_scale_keys_offset, 0) + buffer = self.fp.read(buffer_length) + offset = 0 + scale_keys = [] + for _ in range(sequence.frame_count * bone_count): + scale_key = Psa.ScaleKey.from_buffer_copy(buffer, offset) + scale_keys.append(scale_key) + offset += data_size + return scale_keys + @staticmethod def _read_types(fp, data_class, section: Section, data): buffer_length = section.data_size * section.data_count @@ -112,6 +157,10 @@ class PsaReader(object): # Skip keys on this pass. We will keep this file open and read from it as needed. self.keys_data_offset = fp.tell() fp.seek(section.data_size * section.data_count, 1) + elif section.name == b'SCALEKEYS': + # Skip scale keys on this pass. We will keep this file open and read from it as needed. + self.scale_keys_data_offset = fp.tell() + fp.seek(section.data_size * section.data_count, 1) else: fp.seek(section.data_size * section.data_count, 1) print(f'Unrecognized section in PSA: "{section.name}"')