Compare commits

..

17 Commits

Author SHA1 Message Date
Colin Basnett
9d3ef50907 Merge branch 'blender-4.1' into scale_keys
# Conflicts:
#	io_scene_psk_psa/psa/import_/operators.py
#	io_scene_psk_psa/psa/importer.py
2024-03-14 19:22:16 -07:00
Colin Basnett
fb02742381 Reorganizing & renaming some things for clarity and correctness 2024-03-14 19:08:32 -07:00
Colin Basnett
d4d46bea66 PSA import dialog now uses new Blender 4.1 UI panels 2024-03-14 19:06:29 -07:00
Colin Basnett
a93450eab9 Added PSA file handler 2024-03-14 19:06:03 -07:00
Colin Basnett
c65fdaa6a4 Fixing PEP warnings 2024-03-14 19:04:12 -07:00
Colin Basnett
6b8088225a Fix for root bone being incorrectly oriented if it wasn't at the identity rotation in the bind pose 2024-03-14 18:55:28 -07:00
Colin Basnett
e27b078866 Now handling PSKX files in the PSK file handler 2024-03-14 18:53:53 -07:00
Colin Basnett
b67c734687 Merge branch 'master' into blender-4.1 2024-03-11 18:46:24 -07:00
Colin Basnett
226e403925 Fix for syntax error 2024-03-11 18:46:18 -07:00
Colin Basnett
5d3c7cc570 Fixed PSA import resampling logic 2024-02-29 16:03:47 -08:00
Colin Basnett
11bf205fe2 Added PSA resampling on import + some fixes for 4.1 2024-02-13 14:03:04 -08:00
Colin Basnett
f7bbe911ea Removed use_auto_smooth...again 2024-02-13 00:19:12 -08:00
Colin Basnett
8c49c8f34e Merge branch 'master' into blender-4.1 2024-02-12 18:02:59 -08:00
Colin Basnett
bfdf1eb736 PSK files imported with custom normals will now have Auto Smooth enabled by default (#67) 2024-01-22 11:26:09 -08:00
Colin Basnett
f2b5858635 Merge branch 'master' into scale_keys 2024-01-20 15:41:54 -08:00
Colin Basnett
e9ba117fa9 Added file handler for PSK/PSKX files 2024-01-20 14:48:18 -08:00
Colin Basnett
563172ae23 Initial commit for handling of SCALEKEYS block 2023-11-07 18:38:24 -08:00
13 changed files with 252 additions and 154 deletions

View File

@@ -10,7 +10,7 @@ This Blender addon allows you to import and export meshes and animations to and
| Blender Version | Addon Version | Long Term Support | | Blender Version | Addon Version | Long Term Support |
|--------------------------------------------------------------|--------------------------------------------------------------------------------|-------------------| |--------------------------------------------------------------|--------------------------------------------------------------------------------|-------------------|
| 4.0+ | [latest](https://github.com/DarklightGames/io_scene_psk_psa/releases/latest) | TBD | | 4.0+ | [latest](https://github.com/DarklightGames/io_scene_psk_psa/releases/latest) | TBD |
| [3.4 - 3.6](https://www.blender.org/download/lts/3-6/) | [5.0.6](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/5.0.6) | ✅️ June 2025 | | [3.4 - 3.6](https://www.blender.org/download/lts/3-6/) | [5.0.5](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/5.0.5) | ✅️ June 2025 |
| [2.93 - 3.3](https://www.blender.org/download/releases/3-3/) | [4.3.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/4.3.0) | ✅️ September 2024 | | [2.93 - 3.3](https://www.blender.org/download/releases/3-3/) | [4.3.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/4.3.0) | ✅️ September 2024 |
Bug fixes will be issued for legacy addon versions that are under [Blender's LTS maintenance period](https://www.blender.org/download/lts/). Once the LTS period has ended, legacy addon versions will no longer be supported by the maintainers of this repository, although we will accept pull requests for bug fixes. Bug fixes will be issued for legacy addon versions that are under [Blender's LTS maintenance period](https://www.blender.org/download/lts/). Once the LTS period has ended, legacy addon versions will no longer be supported by the maintainers of this repository, although we will accept pull requests for bug fixes.

View File

@@ -3,7 +3,7 @@ from bpy.app.handlers import persistent
bl_info = { bl_info = {
'name': 'PSK/PSA Importer/Exporter', 'name': 'PSK/PSA Importer/Exporter',
'author': 'Colin Basnett, Yurii Ti', 'author': 'Colin Basnett, Yurii Ti',
'version': (6, 2, 1), 'version': (6, 2, 0),
'blender': (4, 0, 0), 'blender': (4, 0, 0),
'description': 'PSK/PSA Import/Export (.psk/.psa)', 'description': 'PSK/PSA Import/Export (.psk/.psa)',
'warning': '', 'warning': '',

View File

@@ -14,7 +14,7 @@ class PsaConfig:
def _load_config_file(file_path: str) -> ConfigParser: def _load_config_file(file_path: str) -> ConfigParser:
''' """
UEViewer exports a dialect of INI files that is not compatible with Python's ConfigParser. UEViewer exports a dialect of INI files that is not compatible with Python's ConfigParser.
Specifically, it allows values in this format: Specifically, it allows values in this format:
@@ -24,7 +24,7 @@ def _load_config_file(file_path: str) -> ConfigParser:
This is not allowed in Python's ConfigParser, which requires a '=' character after each key name. This is not allowed in Python's ConfigParser, which requires a '=' character after each key name.
To work around this, we'll modify the file to add the '=' character after each key name if it is missing. To work around this, we'll modify the file to add the '=' character after each key name if it is missing.
''' """
with open(file_path, 'r') as f: with open(file_path, 'r') as f:
lines = f.read().split('\n') lines = f.read().split('\n')
@@ -41,7 +41,7 @@ def _load_config_file(file_path: str) -> ConfigParser:
def _get_bone_flags_from_value(value: str) -> int: def _get_bone_flags_from_value(value: str) -> int:
match value: match value:
case 'all': case 'all':
return (REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION) return REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION
case 'trans': case 'trans':
return REMOVE_TRACK_LOCATION return REMOVE_TRACK_LOCATION
case 'rot': case 'rot':

View File

@@ -58,7 +58,27 @@ class Psa:
def __repr__(self) -> str: def __repr__(self) -> str:
return repr((self.location, self.rotation, self.time)) return repr((self.location, self.rotation, self.time))
class ScaleKey(Structure):
_fields_ = [
('scale', Vector3),
('time', c_float)
]
@property
def data(self):
yield self.scale.x
yield self.scale.y
yield self.scale.z
def __repr__(self) -> str:
return repr((self.scale, self.time))
def __init__(self): def __init__(self):
self.bones: List[Psa.Bone] = [] self.bones: List[Psa.Bone] = []
self.sequences: typing.OrderedDict[str, Psa.Sequence] = OrderedDict() self.sequences: typing.OrderedDict[str, Psa.Sequence] = OrderedDict()
self.keys: List[Psa.Key] = [] self.keys: List[Psa.Key] = []
self.scale_keys: List[Psa.ScaleKey] = []
@property
def has_scale_keys(self):
return len(self.scale_keys) > 0

View File

@@ -91,14 +91,15 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float: def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
if fps_source == 'SCENE': match fps_source:
case 'SCENE':
return context.scene.render.fps return context.scene.render.fps
elif fps_source == 'CUSTOM': case 'CUSTOM':
return fps_custom return fps_custom
elif fps_source == 'ACTION_METADATA': case 'ACTION_METADATA':
# Get the minimum value of action metadata FPS values. # Get the minimum value of action metadata FPS values.
return min([action.psa_export.fps for action in actions]) return min([action.psa_export.fps for action in actions])
else: case _:
raise RuntimeError(f'Invalid FPS source "{fps_source}"') raise RuntimeError(f'Invalid FPS source "{fps_source}"')

View File

@@ -32,7 +32,6 @@ class PSA_UL_export_sequences(UIList):
subrow = row.row(align=True) subrow = row.row(align=True)
subrow.prop(pg, 'sequence_filter_name', text='') subrow.prop(pg, 'sequence_filter_name', text='')
subrow.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT') subrow.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
# subrow.prop(pg, 'sequence_use_filter_sort_reverse', text='', icon='SORT_ASC')
if pg.sequence_source == 'ACTIONS': if pg.sequence_source == 'ACTIONS':
subrow = row.row(align=True) subrow = row.row(align=True)
@@ -44,7 +43,6 @@ class PSA_UL_export_sequences(UIList):
pg = getattr(context.scene, 'psa_export') pg = getattr(context.scene, 'psa_export')
actions = getattr(data, prop) actions = getattr(data, prop)
flt_flags = filter_sequences(pg, actions) flt_flags = filter_sequences(pg, actions)
# flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
flt_neworder = list(range(len(actions))) flt_neworder = list(range(len(actions)))
return flt_flags, flt_neworder return flt_flags, flt_neworder

View File

@@ -2,7 +2,7 @@ import os
from pathlib import Path from pathlib import Path
from bpy.props import StringProperty from bpy.props import StringProperty
from bpy.types import Operator, Event, Context from bpy.types import Operator, Event, Context, FileHandler
from bpy_extras.io_utils import ImportHelper from bpy_extras.io_utils import ImportHelper
from .properties import get_visible_sequences from .properties import get_visible_sequences
@@ -89,23 +89,6 @@ class PSA_OT_import_sequences_deselect_all(Operator):
return {'FINISHED'} return {'FINISHED'}
class PSA_OT_import_select_file(Operator):
bl_idname = 'psa_import.select_file'
bl_label = 'Select'
bl_options = {'INTERNAL'}
bl_description = 'Select a PSA file from which to import animations'
filepath: StringProperty(subtype='FILE_PATH')
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
def execute(self, context):
getattr(context.scene, 'psa_import').psa_file_path = self.filepath
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def load_psa_file(context, filepath: str): def load_psa_file(context, filepath: str):
pg = context.scene.psa_import pg = context.scene.psa_import
pg.sequence_list.clear() pg.sequence_list.clear()
@@ -170,6 +153,7 @@ class PSA_OT_import(Operator, ImportHelper):
options.should_overwrite = pg.should_overwrite options.should_overwrite = pg.should_overwrite
options.should_write_metadata = pg.should_write_metadata options.should_write_metadata = pg.should_write_metadata
options.should_write_keyframes = pg.should_write_keyframes options.should_write_keyframes = pg.should_write_keyframes
options.should_write_scale_keys = pg.should_write_scale_keys
options.should_convert_to_samples = pg.should_convert_to_samples options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping_mode = pg.bone_mapping_mode options.bone_mapping_mode = pg.bone_mapping_mode
options.fps_source = pg.fps_source options.fps_source = pg.fps_source
@@ -207,18 +191,18 @@ class PSA_OT_import(Operator, ImportHelper):
layout = self.layout layout = self.layout
pg = getattr(context.scene, 'psa_import') pg = getattr(context.scene, 'psa_import')
sequences_header, sequences_panel = layout.panel('sequences_panel_id', default_closed=False)
sequences_header.label(text='Sequences')
if sequences_panel:
if pg.psa_error: if pg.psa_error:
row = layout.row() row = sequences_panel.row()
row.label(text='Select a PSA file', icon='ERROR') row.label(text='Select a PSA file', icon='ERROR')
else: else:
box = layout.box()
box.label(text=f'Sequences ({len(pg.sequence_list)})', icon='ARMATURE_DATA')
# Select buttons. # Select buttons.
rows = max(3, min(len(pg.sequence_list), 10)) rows = max(3, min(len(pg.sequence_list), 10))
row = box.row() row = sequences_panel.row()
col = row.column() col = row.column()
row2 = col.row(align=True) row2 = col.row(align=True)
@@ -230,44 +214,60 @@ class PSA_OT_import(Operator, ImportHelper):
col = col.row() col = col.row()
col.template_list('PSA_UL_import_sequences', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows) col.template_list('PSA_UL_import_sequences', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
col = layout.column(heading='') col = sequences_panel.column(heading='')
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'fps_source')
if pg.fps_source == 'CUSTOM':
col.prop(pg, 'fps_custom')
col.prop(pg, 'should_overwrite') col.prop(pg, 'should_overwrite')
col.prop(pg, 'should_use_action_name_prefix')
if pg.should_use_action_name_prefix:
col.prop(pg, 'action_name_prefix')
col = layout.column(heading='Write') data_header, data_panel = layout.panel('data_panel_id', default_closed=False)
data_header.label(text='Data')
if data_panel:
col = data_panel.column(heading='Write')
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'should_write_keyframes') col.prop(pg, 'should_write_keyframes')
col.prop(pg, 'should_write_metadata') col.prop(pg, 'should_write_metadata')
col.prop(pg, 'should_write_scale_keys')
col = layout.column() if pg.should_write_keyframes:
col = col.column(heading='Keyframes')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'should_convert_to_samples')
advanced_header, advanced_panel = layout.panel('advanced_panel_id', default_closed=True)
advanced_header.label(text='Advanced')
if advanced_panel:
col = advanced_panel.column()
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'bone_mapping_mode') col.prop(pg, 'bone_mapping_mode')
if pg.should_write_keyframes: col = advanced_panel.column(heading='Options')
col = layout.column(heading='Keyframes')
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'should_convert_to_samples')
col.separator()
# FPS
col.prop(pg, 'fps_source')
if pg.fps_source == 'CUSTOM':
col.prop(pg, 'fps_custom')
col = layout.column(heading='Options')
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
col.prop(pg, 'should_use_fake_user') col.prop(pg, 'should_use_fake_user')
col.prop(pg, 'should_stash') col.prop(pg, 'should_stash')
col.prop(pg, 'should_use_config_file') col.prop(pg, 'should_use_config_file')
col.prop(pg, 'should_use_action_name_prefix')
if pg.should_use_action_name_prefix: class PSA_FH_import(FileHandler):
col.prop(pg, 'action_name_prefix') bl_idname = 'PSA_FH_import'
bl_label = 'File handler for Unreal PSA import'
bl_import_operator = 'psa_import.import'
bl_file_extensions = '.psa'
@classmethod
def poll_drop(cls, context: Context):
return context.area and context.area.type == 'VIEW_3D'
classes = ( classes = (
@@ -275,5 +275,5 @@ classes = (
PSA_OT_import_sequences_deselect_all, PSA_OT_import_sequences_deselect_all,
PSA_OT_import_sequences_from_text, PSA_OT_import_sequences_from_text,
PSA_OT_import, PSA_OT_import,
PSA_OT_import_select_file, PSA_FH_import,
) )

View File

@@ -47,6 +47,8 @@ class PSA_PG_import(PropertyGroup):
description='If an action with a matching name already exists, the existing action ' description='If an action with a matching name already exists, the existing action '
'will have it\'s data overwritten instead of a new action being created') 'will have it\'s data overwritten instead of a new action being created')
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set) should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set)
should_write_scale_keys: BoolProperty(default=True, name='Scale Keys', options=empty_set, description=
'Import scale keys, if available')
should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set, should_write_metadata: BoolProperty(default=True, name='Metadata', options=empty_set,
description='Additional data will be written to the custom properties of the ' description='Additional data will be written to the custom properties of the '
'Action (e.g., frame rate)') 'Action (e.g., frame rate)')
@@ -71,18 +73,17 @@ class PSA_PG_import(PropertyGroup):
('EXACT', 'Exact', 'Bone names must match exactly.', 'EXACT', 0), ('EXACT', 'Exact', 'Bone names must match exactly.', 'EXACT', 0),
('CASE_INSENSITIVE', 'Case Insensitive', 'Bones names must match, ignoring case (e.g., the bone PSA bone ' ('CASE_INSENSITIVE', 'Case Insensitive', 'Bones names must match, ignoring case (e.g., the bone PSA bone '
'\'root\' can be mapped to the armature bone \'Root\')', 'CASE_INSENSITIVE', 1), '\'root\' can be mapped to the armature bone \'Root\')', 'CASE_INSENSITIVE', 1),
), )
default='CASE_INSENSITIVE'
) )
fps_source: EnumProperty(name='FPS Source', items=( fps_source: EnumProperty(name='FPS Source', items=(
('SEQUENCE', 'Sequence', 'The sequence frame rate matches the original frame rate', 'ACTION', 0), ('SEQUENCE', 'Sequence', 'The sequence frame rate matches the original frame rate', 'ACTION', 0),
('SCENE', 'Scene', 'The sequence frame rate dilates to match that of the scene', 'SCENE_DATA', 1), ('SCENE', 'Scene', 'The sequence is resampled to the frame rate of the scene', 'SCENE_DATA', 1),
('CUSTOM', 'Custom', 'The sequence frame rate dilates to match a custom frame rate', 2), ('CUSTOM', 'Custom', 'The sequence is resampled to a custom frame rate', 2),
)) ))
fps_custom: FloatProperty( fps_custom: FloatProperty(
default=30.0, default=30.0,
name='Custom FPS', name='Custom FPS',
description='The frame rate to which the imported actions will be converted', description='The frame rate to which the imported sequences will be resampled to',
options=empty_set, options=empty_set,
min=1.0, min=1.0,
soft_min=1.0, soft_min=1.0,

View File

@@ -1,5 +1,5 @@
import typing import typing
from typing import List, Optional from typing import List, Optional, Iterable
import bpy import bpy
import numpy as np import numpy as np
@@ -19,6 +19,7 @@ class PsaImportOptions(object):
self.should_overwrite = False self.should_overwrite = False
self.should_write_keyframes = True self.should_write_keyframes = True
self.should_write_metadata = True self.should_write_metadata = True
self.should_write_scale_keys = True
self.action_name_prefix = '' self.action_name_prefix = ''
self.should_convert_to_samples = False self.should_convert_to_samples = False
self.bone_mapping_mode = 'CASE_INSENSITIVE' self.bone_mapping_mode = 'CASE_INSENSITIVE'
@@ -38,24 +39,25 @@ class ImportBone(object):
self.original_rotation: Quaternion = Quaternion() self.original_rotation: Quaternion = Quaternion()
self.post_rotation: Quaternion = Quaternion() self.post_rotation: Quaternion = Quaternion()
self.fcurves: List[FCurve] = [] self.fcurves: List[FCurve] = []
self.scale_fcurves: List[FCurve] = []
def _calculate_fcurve_data(import_bone: ImportBone, key_data: typing.Iterable[float]): def _calculate_fcurve_data(import_bone: ImportBone, key_data: Iterable[float]):
# Convert world-space transforms to local-space transforms. # Convert world-space transforms to local-space transforms.
key_rotation = Quaternion(key_data[0:4]) key_rotation = Quaternion(key_data[0:4])
key_location = Vector(key_data[4:]) key_location = Vector(key_data[4:])
q = import_bone.post_rotation.copy() q = import_bone.post_rotation.copy()
q.rotate(import_bone.original_rotation) q.rotate(import_bone.original_rotation)
quat = q rotation = q
q = import_bone.post_rotation.copy() q = import_bone.post_rotation.copy()
if import_bone.parent is None: if import_bone.parent is None:
q.rotate(key_rotation.conjugated()) q.rotate(key_rotation.conjugated())
else: else:
q.rotate(key_rotation) q.rotate(key_rotation)
quat.rotate(q.conjugated()) rotation.rotate(q.conjugated())
loc = key_location - import_bone.original_location location = key_location - import_bone.original_location
loc.rotate(import_bone.post_rotation.conjugated()) location.rotate(import_bone.post_rotation.conjugated())
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z
class PsaImportResult: class PsaImportResult:
@@ -79,49 +81,48 @@ def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_name
return armature_bone_index return armature_bone_index
return None return None
def _get_sample_frame_times(source_frame_count: int, frame_step: float) -> typing.Iterable[float]:
def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, time_step: float = 1.0) -> np.ndarray:
'''
Resamples the sequence data matrix to the target frame count.
@param sequence_data_matrix: FxBx7 matrix where F is the number of frames, B is the number of bones, and X is the
number of data elements per bone.
@param target_frame_count: The number of frames to resample to.
@return: The resampled sequence data matrix, or sequence_data_matrix if no resampling is necessary.
'''
def get_sample_times(source_frame_count: int, time_step: float) -> typing.Iterable[float]:
# TODO: for correctness, we should also emit the target frame time as well (because the last frame can be a # TODO: for correctness, we should also emit the target frame time as well (because the last frame can be a
# fractional frame). # fractional frame).
time = 0.0 time = 0.0
while time < source_frame_count - 1: while time < source_frame_count - 1:
yield time yield time
time += time_step time += frame_step
yield source_frame_count - 1 yield source_frame_count - 1
if time_step == 1.0: def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step: float = 1.0) -> np.ndarray:
"""
Resamples the sequence data matrix to the target frame count.
@param sequence_data_matrix: FxBx7 matrix where F is the number of frames, B is the number of bones, and X is the
number of data elements per bone.
@param frame_step: The step between frames in the resampled sequence.
@return: The resampled sequence data matrix, or sequence_data_matrix if no resampling is necessary.
"""
if frame_step == 1.0:
# No resampling is necessary. # No resampling is necessary.
return sequence_data_matrix return sequence_data_matrix
source_frame_count, bone_count = sequence_data_matrix.shape[:2] source_frame_count, bone_count = sequence_data_matrix.shape[:2]
sample_times = list(get_sample_times(source_frame_count, time_step)) sample_frame_times = list(_get_sample_frame_times(source_frame_count, frame_step))
target_frame_count = len(sample_times) target_frame_count = len(sample_frame_times)
resampled_sequence_data_matrix = np.zeros((target_frame_count, bone_count, 7), dtype=float) resampled_sequence_data_matrix = np.zeros((target_frame_count, bone_count, 7), dtype=float)
for sample_index, sample_time in enumerate(sample_times): for sample_frame_index, sample_frame_time in enumerate(sample_frame_times):
frame_index = int(sample_time) frame_index = int(sample_frame_time)
if sample_time % 1.0 == 0.0: if sample_frame_time % 1.0 == 0.0:
# Sample time has no fractional part, so just copy the frame. # Sample time has no fractional part, so just copy the frame.
resampled_sequence_data_matrix[sample_index, :, :] = sequence_data_matrix[frame_index, :, :] resampled_sequence_data_matrix[sample_frame_index, :, :] = sequence_data_matrix[frame_index, :, :]
else: else:
# Sample time has a fractional part, so interpolate between two frames. # Sample time has a fractional part, so interpolate between two frames.
next_frame_index = frame_index + 1 next_frame_index = frame_index + 1
for bone_index in range(bone_count): for bone_index in range(bone_count):
source_frame_1_data = sequence_data_matrix[frame_index, bone_index, :] source_frame_1_data = sequence_data_matrix[frame_index, bone_index, :]
source_frame_2_data = sequence_data_matrix[next_frame_index, bone_index, :] source_frame_2_data = sequence_data_matrix[next_frame_index, bone_index, :]
factor = sample_time - frame_index factor = sample_frame_time - frame_index
q = Quaternion((source_frame_1_data[:4])).slerp(Quaternion((source_frame_2_data[:4])), factor) q = Quaternion((source_frame_1_data[:4])).slerp(Quaternion((source_frame_2_data[:4])), factor)
q.normalize() q.normalize()
l = Vector(source_frame_1_data[4:]).lerp(Vector(source_frame_2_data[4:]), factor) l = Vector(source_frame_1_data[4:]).lerp(Vector(source_frame_2_data[4:]), factor)
resampled_sequence_data_matrix[sample_index, bone_index, :] = q.w, q.x, q.y, q.z, l.x, l.y, l.z resampled_sequence_data_matrix[sample_frame_index, bone_index, :] = q.w, q.x, q.y, q.z, l.x, l.y, l.z
return resampled_sequence_data_matrix return resampled_sequence_data_matrix
@@ -144,7 +145,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
if armature_bone_index is not None: if armature_bone_index is not None:
# Ensure that no other PSA bone has been mapped to this armature bone yet. # Ensure that no other PSA bone has been mapped to this armature bone yet.
if armature_bone_index not in armature_to_psa_bone_indices: if armature_bone_index not in armature_to_psa_bone_indices:
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_index psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name)
armature_to_psa_bone_indices[armature_bone_index] = psa_bone_index armature_to_psa_bone_indices[armature_bone_index] = psa_bone_index
else: else:
# This armature bone has already been mapped to a PSA bone. # This armature bone has already been mapped to a PSA bone.
@@ -173,7 +174,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
# Create intermediate bone data for import operations. # Create intermediate bone data for import operations.
import_bones = [] import_bones = []
psa_bone_names_to_import_bones = dict() import_bones_dict = dict()
for (psa_bone_index, psa_bone), psa_bone_name in zip(enumerate(psa_reader.bones), psa_bone_names): for (psa_bone_index, psa_bone), psa_bone_name in zip(enumerate(psa_reader.bones), psa_bone_names):
if psa_bone_index not in psa_to_armature_bone_indices: if psa_bone_index not in psa_to_armature_bone_indices:
@@ -183,22 +184,17 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
import_bone = ImportBone(psa_bone) import_bone = ImportBone(psa_bone)
import_bone.armature_bone = armature_data.bones[psa_bone_name] import_bone.armature_bone = armature_data.bones[psa_bone_name]
import_bone.pose_bone = armature_object.pose.bones[psa_bone_name] import_bone.pose_bone = armature_object.pose.bones[psa_bone_name]
psa_bone_names_to_import_bones[psa_bone_name] = import_bone import_bones_dict[psa_bone_name] = import_bone
import_bones.append(import_bone) import_bones.append(import_bone)
bones_with_missing_parents = []
for import_bone in filter(lambda x: x is not None, import_bones): for import_bone in filter(lambda x: x is not None, import_bones):
armature_bone = import_bone.armature_bone armature_bone = import_bone.armature_bone
has_parent = armature_bone.parent is not None
if has_parent: if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
if armature_bone.parent.name in psa_bone_names: import_bone.parent = import_bones_dict[armature_bone.parent.name]
import_bone.parent = psa_bone_names_to_import_bones[armature_bone.parent.name]
else:
# Add a warning if the parent bone is not in the PSA.
bones_with_missing_parents.append(armature_bone)
# Calculate the original location & rotation of each bone (in world-space maybe?) # Calculate the original location & rotation of each bone (in world-space maybe?)
if has_parent: if import_bone.parent is not None:
import_bone.original_location = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation import_bone.original_location = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
import_bone.original_location.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated()) import_bone.original_location.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion() import_bone.original_rotation = armature_bone.matrix_local.to_quaternion()
@@ -210,12 +206,6 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
import_bone.post_rotation = import_bone.original_rotation.conjugated() import_bone.post_rotation = import_bone.original_rotation.conjugated()
# Warn about bones with missing parents.
if len(bones_with_missing_parents) > 0:
count = len(bones_with_missing_parents)
message = f'{count} bone(s) have parents that are not present in the PSA:\n' + str([x.name for x in bones_with_missing_parents])
result.warnings.append(message)
context.window_manager.progress_begin(0, len(sequences)) context.window_manager.progress_begin(0, len(sequences))
# Create and populate the data for new sequences. # Create and populate the data for new sequences.
@@ -269,6 +259,14 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name) if add_location_fcurves else None, # Lz action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name) if add_location_fcurves else None, # Lz
] ]
if options.should_write_scale_keys:
scale_data_path = pose_bone.path_from_id('scale')
import_bone.fcurves += [
action.fcurves.new(scale_data_path, index=0, action_group=pose_bone.name), # Sx
action.fcurves.new(scale_data_path, index=1, action_group=pose_bone.name), # Sy
action.fcurves.new(scale_data_path, index=2, action_group=pose_bone.name), # Sz
]
# Read the sequence data matrix from the PSA. # Read the sequence data matrix from the PSA.
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name) sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
@@ -285,7 +283,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
# Resample the sequence data to the target FPS. # Resample the sequence data to the target FPS.
# If the target frame count is the same as the source frame count, this will be a no-op. # If the target frame count is the same as the source frame count, this will be a no-op.
resampled_sequence_data_matrix = _resample_sequence_data_matrix(sequence_data_matrix, resampled_sequence_data_matrix = _resample_sequence_data_matrix(sequence_data_matrix,
time_step=sequence.fps / target_fps) frame_step=sequence.fps / target_fps)
# Write the keyframes out. # Write the keyframes out.
# Note that the f-curve data consists of alternating time and value data. # Note that the f-curve data consists of alternating time and value data.
@@ -305,6 +303,22 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
for fcurve_keyframe in fcurve.keyframe_points: for fcurve_keyframe in fcurve.keyframe_points:
fcurve_keyframe.interpolation = 'LINEAR' fcurve_keyframe.interpolation = 'LINEAR'
if options.should_write_scale_keys:
sequence_scale_data_matrix = psa_reader.read_sequence_scale_key_data_matrix(sequence_name)
# Write the scale keys out.
fcurve_data = numpy.zeros(2 * sequence.frame_count, dtype=float)
# Populate the keyframe time data.
fcurve_data[0::2] = [x * keyframe_time_dilation for x in range(sequence.frame_count)]
for bone_index, import_bone in enumerate(import_bones):
if import_bone is None:
continue
for fcurve_index, fcurve in enumerate(import_bone.scale_fcurves):
fcurve_data[1::2] = sequence_scale_data_matrix[:, bone_index, fcurve_index]
fcurve.keyframe_points.add(sequence.frame_count)
fcurve.keyframe_points.foreach_set('co', fcurve_data)
for fcurve_keyframe in fcurve.keyframe_points:
fcurve_keyframe.interpolation = 'LINEAR'
if options.should_convert_to_samples: if options.should_convert_to_samples:
# Bake the curve to samples. # Bake the curve to samples.
for fcurve in action.fcurves: for fcurve in action.fcurves:

View File

@@ -1,4 +1,5 @@
import ctypes import ctypes
from typing import Optional
import numpy as np import numpy as np
@@ -23,14 +24,15 @@ def _try_fix_cue4parse_issue_103(sequences) -> bool:
class PsaReader(object): class PsaReader(object):
''' """
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle. This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
The keyframe data is not read into memory upon instantiation due to its potentially very large size. The keyframe data is not read into memory upon instantiation due to its potentially very large size.
To read the key data for a particular sequence, call :read_sequence_keys. To read the key data for a particular sequence, call :read_sequence_keys.
''' """
def __init__(self, path): def __init__(self, path):
self.keys_data_offset: int = 0 self.keys_data_offset: int = 0
self.scale_keys_data_offset: Optional[int] = None
self.fp = open(path, 'rb') self.fp = open(path, 'rb')
self.psa: Psa = self._read(self.fp) self.psa: Psa = self._read(self.fp)
@@ -43,11 +45,11 @@ class PsaReader(object):
return self.psa.sequences return self.psa.sequences
def read_sequence_data_matrix(self, sequence_name: str) -> np.ndarray: def read_sequence_data_matrix(self, sequence_name: str) -> np.ndarray:
''' """
Reads and returns the data matrix for the given sequence. Reads and returns the data matrix for the given sequence.
@param sequence_name: The name of the sequence. @param sequence_name: The name of the sequence.
@return: An FxBx7 matrix where F is the number of frames, B is the number of bones. @return: An FxBx7 matrix where F is the number of frames, B is the number of bones.
''' """
sequence = self.psa.sequences[sequence_name] sequence = self.psa.sequences[sequence_name]
keys = self.read_sequence_keys(sequence_name) keys = self.read_sequence_keys(sequence_name)
bone_count = len(self.bones) bone_count = len(self.bones)
@@ -60,13 +62,13 @@ class PsaReader(object):
return matrix return matrix
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]: def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
''' """
Reads and returns the key data for a sequence. Reads and returns the key data for a sequence.
@param sequence_name: The name of the sequence. @param sequence_name: The name of the sequence.
@return: A list of Psa.Keys. @return: A list of keys for the sequence.
''' """
# Set the file reader to the beginning of the keys data # Set the file reader to the beginning of the key data.
sequence = self.psa.sequences[sequence_name] sequence = self.psa.sequences[sequence_name]
data_size = sizeof(Psa.Key) data_size = sizeof(Psa.Key)
bone_count = len(self.psa.bones) bone_count = len(self.psa.bones)
@@ -82,6 +84,49 @@ class PsaReader(object):
offset += data_size offset += data_size
return keys return keys
def read_sequence_scale_key_data_matrix(self, sequence_name: str) -> np.ndarray:
"""
Reads and returns the scale key data matrix for the given sequence.
@param sequence_name: The name of the sequence.
@return: An FxBx3 matrix where F is the number of frames, B is the number of bones.
"""
sequence = self.psa.sequences[sequence_name]
scale_keys = self.read_sequence_scale_keys(sequence_name)
bone_count = len(self.bones)
matrix_size = sequence.frame_count, bone_count, 3
matrix = np.ones(matrix_size)
keys_iter = iter(scale_keys)
for frame_index in range(sequence.frame_count):
for bone_index in range(bone_count):
matrix[frame_index, bone_index, :] = iter(next(keys_iter).scale)
return matrix
def read_sequence_scale_keys(self, sequence_name: str) -> List[Psa.ScaleKey]:
"""
Reads and returns the scale key data for a sequence.
Throws a RuntimeError exception if the sequence does not contain scale keys (use Psa.has_scale_keys to check).
@param sequence_name: The name of the sequence.
@return: A list of scale keys for the sequence.
"""
if not self.psa.has_scale_keys:
raise RuntimeError('The PSA file does not contain scale keys.')
# Set the file reader to the beginning of the key data.
sequence = self.psa.sequences[sequence_name]
data_size = sizeof(Psa.ScaleKey)
bone_count = len(self.psa.bones)
buffer_length = data_size * bone_count * sequence.frame_count
sequence_scale_keys_offset = self.keys_data_offset + (sequence.frame_start_index * bone_count * data_size)
self.fp.seek(sequence_scale_keys_offset, 0)
buffer = self.fp.read(buffer_length)
offset = 0
scale_keys = []
for _ in range(sequence.frame_count * bone_count):
scale_key = Psa.ScaleKey.from_buffer_copy(buffer, offset)
scale_keys.append(scale_key)
offset += data_size
return scale_keys
@staticmethod @staticmethod
def _read_types(fp, data_class, section: Section, data): def _read_types(fp, data_class, section: Section, data):
buffer_length = section.data_size * section.data_count buffer_length = section.data_size * section.data_count
@@ -111,6 +156,10 @@ class PsaReader(object):
# Skip keys on this pass. We will keep this file open and read from it as needed. # Skip keys on this pass. We will keep this file open and read from it as needed.
self.keys_data_offset = fp.tell() self.keys_data_offset = fp.tell()
fp.seek(section.data_size * section.data_count, 1) fp.seek(section.data_size * section.data_count, 1)
elif section.name == b'SCALEKEYS':
# Skip scale keys on this pass. We will keep this file open and read from it as needed.
self.scale_keys_data_offset = fp.tell()
fp.seek(section.data_size * section.data_count, 1)
else: else:
fp.seek(section.data_size * section.data_count, 1) fp.seek(section.data_size * section.data_count, 1)
print(f'Unrecognized section in PSA: "{section.name}"') print(f'Unrecognized section in PSA: "{section.name}"')

View File

@@ -3,6 +3,7 @@ from bpy.types import PropertyGroup, Material
from ...types import PSX_PG_bone_collection_list_item from ...types import PSX_PG_bone_collection_list_item
empty_set = set()
class PSK_PG_material_list_item(PropertyGroup): class PSK_PG_material_list_item(PropertyGroup):
material: PointerProperty(type=Material) material: PointerProperty(type=Material)
@@ -12,7 +13,7 @@ class PSK_PG_material_list_item(PropertyGroup):
class PSK_PG_export(PropertyGroup): class PSK_PG_export(PropertyGroup):
bone_filter_mode: EnumProperty( bone_filter_mode: EnumProperty(
name='Bone Filter', name='Bone Filter',
options=set(), options=empty_set,
description='', description='',
items=( items=(
('ALL', 'All', 'All bones will be exported'), ('ALL', 'All', 'All bones will be exported'),

View File

@@ -2,7 +2,7 @@ import os
import sys import sys
from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty
from bpy.types import Operator from bpy.types import Operator, FileHandler, Context
from bpy_extras.io_utils import ImportHelper from bpy_extras.io_utils import ImportHelper
from ..importer import PskImportOptions, import_psk from ..importer import PskImportOptions, import_psk
@@ -11,6 +11,17 @@ from ..reader import read_psk
empty_set = set() empty_set = set()
class PSK_FH_import(FileHandler):
bl_idname = 'PSK_FH_import'
bl_label = 'File handler for Unreal PSK/PSKX import'
bl_import_operator = 'import_scene.psk'
bl_file_extensions = '.psk;.pskx'
@classmethod
def poll_drop(cls, context: Context):
return context.area and context.area.type == 'VIEW_3D'
class PSK_OT_import(Operator, ImportHelper): class PSK_OT_import(Operator, ImportHelper):
bl_idname = 'import_scene.psk' bl_idname = 'import_scene.psk'
bl_label = 'Import' bl_label = 'Import'
@@ -132,10 +143,11 @@ class PSK_OT_import(Operator, ImportHelper):
col.use_property_decorate = False col.use_property_decorate = False
col.prop(self, 'scale') col.prop(self, 'scale')
layout.prop(self, 'should_import_mesh') mesh_header, mesh_panel = layout.panel('mesh_panel_id', default_closed=False)
mesh_header.prop(self, 'should_import_mesh')
if self.should_import_mesh: if mesh_panel and self.should_import_mesh:
row = layout.row() row = mesh_panel.row()
col = row.column() col = row.column()
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
@@ -147,9 +159,11 @@ class PSK_OT_import(Operator, ImportHelper):
col.prop(self, 'vertex_color_space') col.prop(self, 'vertex_color_space')
col.prop(self, 'should_import_shape_keys', text='Shape Keys') col.prop(self, 'should_import_shape_keys', text='Shape Keys')
layout.prop(self, 'should_import_skeleton') skeleton_header, skeleton_panel = layout.panel('skeleton_panel_id', default_closed=False)
if self.should_import_skeleton: skeleton_header.prop(self, 'should_import_skeleton')
row = layout.row()
if skeleton_panel and self.should_import_skeleton:
row = skeleton_panel.row()
col = row.column() col = row.column()
col.use_property_split = True col.use_property_split = True
col.use_property_decorate = False col.use_property_decorate = False
@@ -158,4 +172,5 @@ class PSK_OT_import(Operator, ImportHelper):
classes = ( classes = (
PSK_OT_import, PSK_OT_import,
PSK_FH_import,
) )

View File

@@ -231,7 +231,6 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
for vertex_normal in psk.vertex_normals: for vertex_normal in psk.vertex_normals:
normals.append(tuple(vertex_normal)) normals.append(tuple(vertex_normal))
mesh_data.normals_split_custom_set_from_vertices(normals) mesh_data.normals_split_custom_set_from_vertices(normals)
# TODO: This has been removed in 4.1!
mesh_data.use_auto_smooth = True mesh_data.use_auto_smooth = True
else: else:
mesh_data.shade_smooth() mesh_data.shade_smooth()