Implement #142: Add support for SCALEKEYS

This commit is contained in:
Colin Basnett
2026-02-16 17:52:08 -08:00
parent d66d4499e5
commit 4a9815edc2
9 changed files with 81 additions and 13 deletions

View File

@@ -20,9 +20,10 @@ RUN BLENDER_EXECUTABLE=$(blender-downloader $BLENDER_VERSION --extract --remove-
RUN pip install pytest-cov
# Source the environment variables and install Python dependencies
# TODO: would be nice to have these installed in the bash script below.
RUN . /etc/environment && \
$BLENDER_PYTHON -m ensurepip && \
$BLENDER_PYTHON -m pip install pytest pytest-cov psk-psa-py
$BLENDER_PYTHON -m pip install pytest pytest-cov psk-psa-py==0.0.4
# Persist BLENDER_EXECUTABLE as an environment variable
RUN echo $(cat /blender_executable_path) > /tmp/blender_executable_path_env && \

View File

@@ -1,6 +1,6 @@
schema_version = "1.0.0"
id = "io_scene_psk_psa"
version = "9.0.2"
version = "9.1.0"
name = "Unreal PSK/PSA (.psk/.psa)"
tagline = "Import and export PSK and PSA files used in Unreal Engine"
maintainer = "Colin Basnett <cmbasnett@gmail.com>"
@@ -14,7 +14,7 @@ license = [
"SPDX:GPL-3.0-or-later",
]
wheels = [
'./wheels/psk_psa_py-0.0.1-py3-none-any.whl'
'./wheels/psk_psa_py-0.0.4-py3-none-any.whl'
]
[build]

View File

@@ -110,7 +110,7 @@ def load_psa_file(context, filepath: str):
try:
# Read the file and populate the action list.
p = os.path.abspath(filepath)
psa_reader = PsaReader(p)
psa_reader = PsaReader.from_path(p)
for sequence in psa_reader.sequences.values():
item = pg.sequence_list.add()
item.action_name = sequence.name.decode('windows-1252')
@@ -142,7 +142,7 @@ class PSA_OT_import_drag_and_drop(Operator, PsaImportMixin):
for file in self.files:
psa_path = str(os.path.join(self.directory, file.name))
psa_reader = PsaReader(psa_path)
psa_reader = PsaReader.from_path(psa_path)
sequence_names = list(psa_reader.sequences.keys())
options = psa_import_options_from_property_group(self, sequence_names)
@@ -188,6 +188,7 @@ def psa_import_options_from_property_group(pg: PsaImportMixin, sequence_names: I
options.should_overwrite = pg.should_overwrite
options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes
options.should_write_scale_keys = pg.should_write_scale_keys
options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping = BoneMapping(
is_case_sensitive=pg.bone_mapping_is_case_sensitive,
@@ -215,7 +216,7 @@ def _import_psa(context,
except Exception as e:
warnings.append(f'Failed to read PSA config file: {e}')
psa_reader = PsaReader(filepath)
psa_reader = PsaReader.from_path(filepath)
result = import_psa(context, psa_reader, armature_object, options)
result.warnings.extend(warnings)
@@ -242,7 +243,7 @@ class PSA_OT_import_all(Operator, PsaImportMixin):
def execute(self, context):
sequence_names = []
with PsaReader(self.filepath) as psa_reader:
with PsaReader.from_path(self.filepath) as psa_reader:
sequence_names.extend(psa_reader.sequences.keys())
options = PsaImportOptions(
@@ -376,6 +377,7 @@ class PSA_OT_import(Operator, ImportHelper, PsaImportMixin):
col.use_property_decorate = False
col.prop(self, 'should_write_keyframes')
col.prop(self, 'should_write_metadata')
col.prop(self, 'should_write_scale_keys')
if self.should_write_keyframes:
col = col.column(heading='Keyframes')
@@ -426,6 +428,7 @@ def draw_psa_import_options_no_panels(layout, pg: PsaImportMixin):
col.use_property_decorate = False
col.prop(pg, 'should_write_keyframes')
col.prop(pg, 'should_write_metadata')
col.prop(pg, 'should_write_scale_keys')
if pg.should_write_keyframes:
col = col.column(heading='Keyframes')

View File

@@ -66,6 +66,7 @@ class PsaImportMixin:
should_write_metadata: BoolProperty(default=True, name='Metadata', options=set(),
description='Additional data will be written to the custom properties of the '
'Action (e.g., frame rate)')
should_write_scale_keys: BoolProperty(default=True, name='Scale Keys', options=set())
sequence_filter_name: StringProperty(default='', options={'TEXTEDIT_UPDATE'})
sequence_filter_is_selected: BoolProperty(default=False, options=set(), name='Only Show Selected',
description='Only show selected sequences')

View File

@@ -25,6 +25,7 @@ class PsaImportMixin:
should_overwrite: bool
should_write_keyframes: bool
should_write_metadata: bool
should_write_scale_keys: bool
sequence_filter_name: str
sequence_filter_is_selected: bool
sequence_use_filter_invert: bool

View File

@@ -40,6 +40,7 @@ class PsaImportOptions(object):
should_use_fake_user: bool = False,
should_write_keyframes: bool = True,
should_write_metadata: bool = True,
should_write_scale_keys: bool = True,
translation_scale: float = 1.0
):
self.action_name_prefix = action_name_prefix
@@ -55,6 +56,7 @@ class PsaImportOptions(object):
self.should_use_fake_user = should_use_fake_user
self.should_write_keyframes = should_write_keyframes
self.should_write_metadata = should_write_metadata
self.should_write_scale_keys = should_write_scale_keys
self.translation_scale = translation_scale
@@ -73,7 +75,7 @@ class ImportBone(object):
def _calculate_fcurve_data(import_bone: ImportBone, key_data: Sequence[float]):
# Convert world-space transforms to local-space transforms.
key_rotation = Quaternion(key_data[0:4])
key_location = Vector(key_data[4:])
key_location = Vector(key_data[4:7])
q = import_bone.post_rotation.copy()
q.rotate(import_bone.original_rotation)
rotation = q
@@ -85,7 +87,8 @@ def _calculate_fcurve_data(import_bone: ImportBone, key_data: Sequence[float]):
rotation.rotate(q.conjugated())
location = key_location - import_bone.original_location
location.rotate(import_bone.post_rotation.conjugated())
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z
scale = Vector(key_data[7:10])
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z, scale.x, scale.y, scale.z
class PsaImportResult:
@@ -169,6 +172,34 @@ def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step:
return resampled_sequence_data_matrix
def _read_sequence_data_matrix(psa_reader: PsaReader, sequence_name: str) -> np.ndarray:
"""
Reads and returns the data matrix for the given sequence.
The order of the data in the third axis is Qw, Qx, Qy, Qz, Lx, Ly, Lz, Sx, Sy, Sz
@param sequence_name: The name of the sequence.
@return: An FxBx10 matrix where F is the number of frames, B is the number of bones.
"""
sequence = psa_reader.sequences[sequence_name]
keys = psa_reader.read_sequence_keys(sequence_name)
bone_count = len(psa_reader.bones)
matrix_size = sequence.frame_count, bone_count, 10
matrix = np.ones(matrix_size)
keys_iter = iter(keys)
# Populate rotation and location data.
for frame_index in range(sequence.frame_count):
for bone_index in range(bone_count):
matrix[frame_index, bone_index, :7] = list(next(keys_iter).data)
# Populate scale data, if it exists.
scale_keys = psa_reader.read_sequence_scale_keys(sequence_name)
if len(scale_keys) > 0:
scale_keys_iter = iter(scale_keys)
for frame_index in range(sequence.frame_count):
for bone_index in range(bone_count):
matrix[frame_index, bone_index, 7:] = list(next(scale_keys_iter).data)
return matrix
def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, options: PsaImportOptions) -> PsaImportResult:
assert context.window_manager
@@ -311,8 +342,10 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
pose_bone = import_bone.pose_bone
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
location_data_path = pose_bone.path_from_id('location')
scale_data_path = pose_bone.path_from_id('scale')
add_rotation_fcurves = (bone_track_flags & REMOVE_TRACK_ROTATION) == 0
add_location_fcurves = (bone_track_flags & REMOVE_TRACK_LOCATION) == 0
add_scale_fcurves = psa_reader.has_scale_keys and options.should_write_scale_keys
import_bone.fcurves = [
channelbag.fcurves.ensure(rotation_data_path, index=0, group_name=pose_bone.name) if add_rotation_fcurves else None, # Qw
channelbag.fcurves.ensure(rotation_data_path, index=1, group_name=pose_bone.name) if add_rotation_fcurves else None, # Qx
@@ -321,14 +354,17 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
channelbag.fcurves.ensure(location_data_path, index=0, group_name=pose_bone.name) if add_location_fcurves else None, # Lx
channelbag.fcurves.ensure(location_data_path, index=1, group_name=pose_bone.name) if add_location_fcurves else None, # Ly
channelbag.fcurves.ensure(location_data_path, index=2, group_name=pose_bone.name) if add_location_fcurves else None, # Lz
channelbag.fcurves.ensure(scale_data_path, index=0, group_name=pose_bone.name) if add_scale_fcurves else None, # Sx
channelbag.fcurves.ensure(scale_data_path, index=1, group_name=pose_bone.name) if add_scale_fcurves else None, # Sy
channelbag.fcurves.ensure(scale_data_path, index=2, group_name=pose_bone.name) if add_scale_fcurves else None, # Sz
]
# Read the sequence data matrix from the PSA.
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
sequence_data_matrix = _read_sequence_data_matrix(psa_reader, sequence_name)
if options.translation_scale != 1.0:
# Scale the translation data.
sequence_data_matrix[:, :, 4:] *= options.translation_scale
sequence_data_matrix[:, :, 4:7] *= options.translation_scale
# Convert the sequence's data from world-space to local-space.
for bone_index, import_bone in enumerate(import_bones):
@@ -366,7 +402,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
if options.should_convert_to_samples:
# Bake the curve to samples.
for fcurve in action.fcurves:
for fcurve in channelbag.fcurves:
fcurve.convert_to_samples(start=0, end=sequence.frame_count)
# Write meta-data.

View File

@@ -37,3 +37,29 @@ def test_psa_import_all():
EXPECTED_ACTION_COUNT = 135
assert len(bpy.data.actions) == EXPECTED_ACTION_COUNT, \
f"Expected {EXPECTED_ACTION_COUNT} actions, but found {len(bpy.data.actions)}."
def test_psa_import_convert_to_samples():
assert bpy.ops.psk.import_file(
filepath=SHREK_PSK_FILEPATH,
components='ALL',
) == {'FINISHED'}, "PSK import failed."
armature_object = bpy.data.objects.get('Shrek', None)
assert armature_object is not None, "Armature object not found in the scene."
assert armature_object.type == 'ARMATURE', "Object is not of type ARMATURE."
# Select the armature object
bpy.context.view_layer.objects.active = armature_object
armature_object.select_set(True)
# Import the associated PSA file with import_all operator, and convert to samples.
assert bpy.ops.psa.import_all(
filepath=SHREK_PSA_FILEPATH,
should_convert_to_samples=True
) == {'FINISHED'}, "PSA import failed."
# TODO: More thorough tests on the imported data for the animations.
EXPECTED_ACTION_COUNT = 135
assert len(bpy.data.actions) == EXPECTED_ACTION_COUNT, \
f"Expected {EXPECTED_ACTION_COUNT} actions, but found {len(bpy.data.actions)}."

View File

@@ -1,3 +1,3 @@
pytest
pytest-cov
psk-psa-py
psk-psa-py == 0.0.4