Compare commits

...

4 Commits

Author SHA1 Message Date
Colin Basnett
9d3ef50907 Merge branch 'blender-4.1' into scale_keys
# Conflicts:
#	io_scene_psk_psa/psa/import_/operators.py
#	io_scene_psk_psa/psa/importer.py
2024-03-14 19:22:16 -07:00
Colin Basnett
bfdf1eb736 PSK files imported with custom normals will now have Auto Smooth enabled by default (#67) 2024-01-22 11:26:09 -08:00
Colin Basnett
f2b5858635 Merge branch 'master' into scale_keys 2024-01-20 15:41:54 -08:00
Colin Basnett
563172ae23 Initial commit for handling of SCALEKEYS block 2023-11-07 18:38:24 -08:00
6 changed files with 104 additions and 4 deletions

View File

@@ -58,7 +58,27 @@ class Psa:
def __repr__(self) -> str: def __repr__(self) -> str:
return repr((self.location, self.rotation, self.time)) return repr((self.location, self.rotation, self.time))
class ScaleKey(Structure):
_fields_ = [
('scale', Vector3),
('time', c_float)
]
@property
def data(self):
yield self.scale.x
yield self.scale.y
yield self.scale.z
def __repr__(self) -> str:
return repr((self.scale, self.time))
def __init__(self): def __init__(self):
self.bones: List[Psa.Bone] = [] self.bones: List[Psa.Bone] = []
self.sequences: typing.OrderedDict[str, Psa.Sequence] = OrderedDict() self.sequences: typing.OrderedDict[str, Psa.Sequence] = OrderedDict()
self.keys: List[Psa.Key] = [] self.keys: List[Psa.Key] = []
self.scale_keys: List[Psa.ScaleKey] = []
@property
def has_scale_keys(self):
return len(self.scale_keys) > 0

View File

@@ -153,6 +153,7 @@ class PSA_OT_import(Operator, ImportHelper):
options.should_overwrite = pg.should_overwrite options.should_overwrite = pg.should_overwrite
options.should_write_metadata = pg.should_write_metadata options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes options.should_write_keyframes = pg.should_write_keyframes
options.should_write_scale_keys = pg.should_write_scale_keys
options.should_convert_to_samples = pg.should_convert_to_samples options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping_mode = pg.bone_mapping_mode options.bone_mapping_mode = pg.bone_mapping_mode
options.fps_source = pg.fps_source options.fps_source = pg.fps_source
@@ -233,6 +234,7 @@ class PSA_OT_import(Operator, ImportHelper):
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'should_write_keyframes') col.prop(pg, 'should_write_keyframes')
col.prop(pg, 'should_write_metadata') col.prop(pg, 'should_write_metadata')
col.prop(pg, 'should_write_scale_keys')
if pg.should_write_keyframes: if pg.should_write_keyframes:
col = col.column(heading='Keyframes') col = col.column(heading='Keyframes')

View File

@@ -47,6 +47,8 @@ class PSA_PG_import(PropertyGroup):
description='If an action with a matching name already exists, the existing action ' description='If an action with a matching name already exists, the existing action '
'will have it\'s data overwritten instead of a new action being created') 'will have it\'s data overwritten instead of a new action being created')
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set) should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set)
should_write_scale_keys: BoolProperty(default=True, name='Scale Keys', options=empty_set, description=
'Import scale keys, if available')
should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set, should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set,
description='Additional data will be written to the custom properties of the ' description='Additional data will be written to the custom properties of the '
'Action (e.g., frame rate)') 'Action (e.g., frame rate)')

View File

@@ -1,5 +1,5 @@
import typing import typing
from typing import List, Optional from typing import List, Optional, Iterable
import bpy import bpy
import numpy as np import numpy as np
@@ -19,6 +19,7 @@ class PsaImportOptions(object):
self.should_overwrite = False self.should_overwrite = False
self.should_write_keyframes = True self.should_write_keyframes = True
self.should_write_metadata = True self.should_write_metadata = True
self.should_write_scale_keys = True
self.action_name_prefix = '' self.action_name_prefix = ''
self.should_convert_to_samples = False self.should_convert_to_samples = False
self.bone_mapping_mode = 'CASE_INSENSITIVE' self.bone_mapping_mode = 'CASE_INSENSITIVE'
@@ -38,9 +39,10 @@ class ImportBone(object):
self.original_rotation: Quaternion = Quaternion() self.original_rotation: Quaternion = Quaternion()
self.post_rotation: Quaternion = Quaternion() self.post_rotation: Quaternion = Quaternion()
self.fcurves: List[FCurve] = [] self.fcurves: List[FCurve] = []
self.scale_fcurves: List[FCurve] = []
def _calculate_fcurve_data(import_bone: ImportBone, key_data: typing.Iterable[float]): def _calculate_fcurve_data(import_bone: ImportBone, key_data: Iterable[float]):
# Convert world-space transforms to local-space transforms. # Convert world-space transforms to local-space transforms.
key_rotation = Quaternion(key_data[0:4]) key_rotation = Quaternion(key_data[0:4])
key_location = Vector(key_data[4:]) key_location = Vector(key_data[4:])
@@ -256,6 +258,14 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name) if add_location_fcurves else None, # Ly action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name) if add_location_fcurves else None, # Ly
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name) if add_location_fcurves else None, # Lz action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name) if add_location_fcurves else None, # Lz
] ]
if options.should_write_scale_keys:
scale_data_path = pose_bone.path_from_id('scale')
import_bone.fcurves += [
action.fcurves.new(scale_data_path, index=0, action_group=pose_bone.name), # Sx
action.fcurves.new(scale_data_path, index=1, action_group=pose_bone.name), # Sy
action.fcurves.new(scale_data_path, index=2, action_group=pose_bone.name), # Sz
]
# Read the sequence data matrix from the PSA. # Read the sequence data matrix from the PSA.
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
@@ -293,6 +303,22 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
for fcurve_keyframe in fcurve.keyframe_points: for fcurve_keyframe in fcurve.keyframe_points:
fcurve_keyframe.interpolation = 'LINEAR' fcurve_keyframe.interpolation = 'LINEAR'
if options.should_write_scale_keys:
sequence_scale_data_matrix = psa_reader.read_sequence_scale_key_data_matrix(sequence_name)
# Write the scale keys out.
fcurve_data = numpy.zeros(2 * sequence.frame_count, dtype=float)
# Populate the keyframe time data.
fcurve_data[0::2] = [x * keyframe_time_dilation for x in range(sequence.frame_count)]
for bone_index, import_bone in enumerate(import_bones):
if import_bone is None:
continue
for fcurve_index, fcurve in enumerate(import_bone.scale_fcurves):
fcurve_data[1::2] = sequence_scale_data_matrix[:, bone_index, fcurve_index]
fcurve.keyframe_points.add(sequence.frame_count)
fcurve.keyframe_points.foreach_set('co', fcurve_data)
for fcurve_keyframe in fcurve.keyframe_points:
fcurve_keyframe.interpolation = 'LINEAR'
if options.should_convert_to_samples: if options.should_convert_to_samples:
# Bake the curve to samples. # Bake the curve to samples.
for fcurve in action.fcurves: for fcurve in action.fcurves:

View File

@@ -1,4 +1,5 @@
import ctypes import ctypes
from typing import Optional
import numpy as np import numpy as np
@@ -31,6 +32,7 @@ class PsaReader(object):
def __init__(self, path): def __init__(self, path):
self.keys_data_offset: int = 0 self.keys_data_offset: int = 0
self.scale_keys_data_offset: Optional[int] = None
self.fp = open(path, 'rb') self.fp = open(path, 'rb')
self.psa: Psa = self._read(self.fp) self.psa: Psa = self._read(self.fp)
@@ -64,9 +66,9 @@ class PsaReader(object):
Reads and returns the key data for a sequence. Reads and returns the key data for a sequence.
@param sequence_name: The name of the sequence. @param sequence_name: The name of the sequence.
@return: A list of Psa.Keys. @return: A list of keys for the sequence.
""" """
# Set the file reader to the beginning of the keys data # Set the file reader to the beginning of the key data.
sequence = self.psa.sequences[sequence_name] sequence = self.psa.sequences[sequence_name]
data_size = sizeof(Psa.Key) data_size = sizeof(Psa.Key)
bone_count = len(self.psa.bones) bone_count = len(self.psa.bones)
@@ -82,6 +84,49 @@ class PsaReader(object):
offset += data_size offset += data_size
return keys return keys
def read_sequence_scale_key_data_matrix(self, sequence_name: str) -> np.ndarray:
"""
Reads and returns the scale key data matrix for the given sequence.
@param sequence_name: The name of the sequence.
@return: An FxBx3 matrix where F is the number of frames, B is the number of bones.
"""
sequence = self.psa.sequences[sequence_name]
scale_keys = self.read_sequence_scale_keys(sequence_name)
bone_count = len(self.bones)
matrix_size = sequence.frame_count, bone_count, 3
matrix = np.ones(matrix_size)
keys_iter = iter(scale_keys)
for frame_index in range(sequence.frame_count):
for bone_index in range(bone_count):
matrix[frame_index, bone_index, :] = iter(next(keys_iter).scale)
return matrix
def read_sequence_scale_keys(self, sequence_name: str) -> List[Psa.ScaleKey]:
"""
Reads and returns the scale key data for a sequence.
Throws a RuntimeError exception if the sequence does not contain scale keys (use Psa.has_scale_keys to check).
@param sequence_name: The name of the sequence.
@return: A list of scale keys for the sequence.
"""
if not self.psa.has_scale_keys:
raise RuntimeError('The PSA file does not contain scale keys.')
# Set the file reader to the beginning of the key data.
sequence = self.psa.sequences[sequence_name]
data_size = sizeof(Psa.ScaleKey)
bone_count = len(self.psa.bones)
buffer_length = data_size * bone_count * sequence.frame_count
sequence_scale_keys_offset = self.keys_data_offset + (sequence.frame_start_index * bone_count * data_size)
self.fp.seek(sequence_scale_keys_offset, 0)
buffer = self.fp.read(buffer_length)
offset = 0
scale_keys = []
for _ in range(sequence.frame_count * bone_count):
scale_key = Psa.ScaleKey.from_buffer_copy(buffer, offset)
scale_keys.append(scale_key)
offset += data_size
return scale_keys
@staticmethod @staticmethod
def _read_types(fp, data_class, section: Section, data): def _read_types(fp, data_class, section: Section, data):
buffer_length = section.data_size * section.data_count buffer_length = section.data_size * section.data_count
@@ -111,6 +156,10 @@ class PsaReader(object):
# Skip keys on this pass. We will keep this file open and read from it as needed. # Skip keys on this pass. We will keep this file open and read from it as needed.
self.keys_data_offset = fp.tell() self.keys_data_offset = fp.tell()
fp.seek(section.data_size * section.data_count, 1) fp.seek(section.data_size * section.data_count, 1)
elif section.name == b'SCALEKEYS':
# Skip scale keys on this pass. We will keep this file open and read from it as needed.
self.scale_keys_data_offset = fp.tell()
fp.seek(section.data_size * section.data_count, 1)
else: else:
fp.seek(section.data_size * section.data_count, 1) fp.seek(section.data_size * section.data_count, 1)
print(f'Unrecognized section in PSA: "{section.name}"') print(f'Unrecognized section in PSA: "{section.name}"')

View File

@@ -231,6 +231,7 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
for vertex_normal in psk.vertex_normals: for vertex_normal in psk.vertex_normals:
normals.append(tuple(vertex_normal)) normals.append(tuple(vertex_normal))
mesh_data.normals_split_custom_set_from_vertices(normals) mesh_data.normals_split_custom_set_from_vertices(normals)
mesh_data.use_auto_smooth = True
else: else:
mesh_data.shade_smooth() mesh_data.shade_smooth()